opencv on mbed

Dependencies:   mbed

Committer:
joeverbout
Date:
Thu Mar 31 21:16:38 2016 +0000
Revision:
0:ea44dc9ed014
OpenCV on mbed attempt

Who changed what in which revision?

UserRevisionLine numberNew contents of line
joeverbout 0:ea44dc9ed014 1 /*M///////////////////////////////////////////////////////////////////////////////////////
joeverbout 0:ea44dc9ed014 2 //
joeverbout 0:ea44dc9ed014 3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
joeverbout 0:ea44dc9ed014 4 //
joeverbout 0:ea44dc9ed014 5 // By downloading, copying, installing or using the software you agree to this license.
joeverbout 0:ea44dc9ed014 6 // If you do not agree to this license, do not download, install,
joeverbout 0:ea44dc9ed014 7 // copy or use the software.
joeverbout 0:ea44dc9ed014 8 //
joeverbout 0:ea44dc9ed014 9 //
joeverbout 0:ea44dc9ed014 10 // License Agreement
joeverbout 0:ea44dc9ed014 11 // For Open Source Computer Vision Library
joeverbout 0:ea44dc9ed014 12 //
joeverbout 0:ea44dc9ed014 13 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
joeverbout 0:ea44dc9ed014 14 // Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
joeverbout 0:ea44dc9ed014 15 // Third party copyrights are property of their respective owners.
joeverbout 0:ea44dc9ed014 16 //
joeverbout 0:ea44dc9ed014 17 // Redistribution and use in source and binary forms, with or without modification,
joeverbout 0:ea44dc9ed014 18 // are permitted provided that the following conditions are met:
joeverbout 0:ea44dc9ed014 19 //
joeverbout 0:ea44dc9ed014 20 // * Redistribution's of source code must retain the above copyright notice,
joeverbout 0:ea44dc9ed014 21 // this list of conditions and the following disclaimer.
joeverbout 0:ea44dc9ed014 22 //
joeverbout 0:ea44dc9ed014 23 // * Redistribution's in binary form must reproduce the above copyright notice,
joeverbout 0:ea44dc9ed014 24 // this list of conditions and the following disclaimer in the documentation
joeverbout 0:ea44dc9ed014 25 // and/or other materials provided with the distribution.
joeverbout 0:ea44dc9ed014 26 //
joeverbout 0:ea44dc9ed014 27 // * The name of the copyright holders may not be used to endorse or promote products
joeverbout 0:ea44dc9ed014 28 // derived from this software without specific prior written permission.
joeverbout 0:ea44dc9ed014 29 //
joeverbout 0:ea44dc9ed014 30 // This software is provided by the copyright holders and contributors "as is" and
joeverbout 0:ea44dc9ed014 31 // any express or implied warranties, including, but not limited to, the implied
joeverbout 0:ea44dc9ed014 32 // warranties of merchantability and fitness for a particular purpose are disclaimed.
joeverbout 0:ea44dc9ed014 33 // In no event shall the Intel Corporation or contributors be liable for any direct,
joeverbout 0:ea44dc9ed014 34 // indirect, incidental, special, exemplary, or consequential damages
joeverbout 0:ea44dc9ed014 35 // (including, but not limited to, procurement of substitute goods or services;
joeverbout 0:ea44dc9ed014 36 // loss of use, data, or profits; or business interruption) however caused
joeverbout 0:ea44dc9ed014 37 // and on any theory of liability, whether in contract, strict liability,
joeverbout 0:ea44dc9ed014 38 // or tort (including negligence or otherwise) arising in any way out of
joeverbout 0:ea44dc9ed014 39 // the use of this software, even if advised of the possibility of such damage.
joeverbout 0:ea44dc9ed014 40 //
joeverbout 0:ea44dc9ed014 41 //M*/
joeverbout 0:ea44dc9ed014 42
joeverbout 0:ea44dc9ed014 43 #ifndef __OPENCV_VIDEOSTAB_GLOBAL_MOTION_HPP__
joeverbout 0:ea44dc9ed014 44 #define __OPENCV_VIDEOSTAB_GLOBAL_MOTION_HPP__
joeverbout 0:ea44dc9ed014 45
joeverbout 0:ea44dc9ed014 46 #include <vector>
joeverbout 0:ea44dc9ed014 47 #include <fstream>
joeverbout 0:ea44dc9ed014 48 #include "opencv2/core.hpp"
joeverbout 0:ea44dc9ed014 49 #include "opencv2/features2d.hpp"
joeverbout 0:ea44dc9ed014 50 #include "opencv2/opencv_modules.hpp"
joeverbout 0:ea44dc9ed014 51 #include "opencv2/videostab/optical_flow.hpp"
joeverbout 0:ea44dc9ed014 52 #include "opencv2/videostab/motion_core.hpp"
joeverbout 0:ea44dc9ed014 53 #include "opencv2/videostab/outlier_rejection.hpp"
joeverbout 0:ea44dc9ed014 54
joeverbout 0:ea44dc9ed014 55 #ifdef HAVE_OPENCV_CUDAIMGPROC
joeverbout 0:ea44dc9ed014 56 # include "opencv2/cudaimgproc.hpp"
joeverbout 0:ea44dc9ed014 57 #endif
joeverbout 0:ea44dc9ed014 58
joeverbout 0:ea44dc9ed014 59 namespace cv
joeverbout 0:ea44dc9ed014 60 {
joeverbout 0:ea44dc9ed014 61 namespace videostab
joeverbout 0:ea44dc9ed014 62 {
joeverbout 0:ea44dc9ed014 63
joeverbout 0:ea44dc9ed014 64 //! @addtogroup videostab_motion
joeverbout 0:ea44dc9ed014 65 //! @{
joeverbout 0:ea44dc9ed014 66
joeverbout 0:ea44dc9ed014 67 /** @brief Estimates best global motion between two 2D point clouds in the least-squares sense.
joeverbout 0:ea44dc9ed014 68
joeverbout 0:ea44dc9ed014 69 @note Works in-place and changes input point arrays.
joeverbout 0:ea44dc9ed014 70
joeverbout 0:ea44dc9ed014 71 @param points0 Source set of 2D points (32F).
joeverbout 0:ea44dc9ed014 72 @param points1 Destination set of 2D points (32F).
joeverbout 0:ea44dc9ed014 73 @param model Motion model (up to MM_AFFINE).
joeverbout 0:ea44dc9ed014 74 @param rmse Final root-mean-square error.
joeverbout 0:ea44dc9ed014 75 @return 3x3 2D transformation matrix (32F).
joeverbout 0:ea44dc9ed014 76 */
joeverbout 0:ea44dc9ed014 77 CV_EXPORTS Mat estimateGlobalMotionLeastSquares(
joeverbout 0:ea44dc9ed014 78 InputOutputArray points0, InputOutputArray points1, int model = MM_AFFINE,
joeverbout 0:ea44dc9ed014 79 float *rmse = 0);
joeverbout 0:ea44dc9ed014 80
joeverbout 0:ea44dc9ed014 81 /** @brief Estimates best global motion between two 2D point clouds robustly (using RANSAC method).
joeverbout 0:ea44dc9ed014 82
joeverbout 0:ea44dc9ed014 83 @param points0 Source set of 2D points (32F).
joeverbout 0:ea44dc9ed014 84 @param points1 Destination set of 2D points (32F).
joeverbout 0:ea44dc9ed014 85 @param model Motion model. See cv::videostab::MotionModel.
joeverbout 0:ea44dc9ed014 86 @param params RANSAC method parameters. See videostab::RansacParams.
joeverbout 0:ea44dc9ed014 87 @param rmse Final root-mean-square error.
joeverbout 0:ea44dc9ed014 88 @param ninliers Final number of inliers.
joeverbout 0:ea44dc9ed014 89 */
joeverbout 0:ea44dc9ed014 90 CV_EXPORTS Mat estimateGlobalMotionRansac(
joeverbout 0:ea44dc9ed014 91 InputArray points0, InputArray points1, int model = MM_AFFINE,
joeverbout 0:ea44dc9ed014 92 const RansacParams &params = RansacParams::default2dMotion(MM_AFFINE),
joeverbout 0:ea44dc9ed014 93 float *rmse = 0, int *ninliers = 0);
joeverbout 0:ea44dc9ed014 94
joeverbout 0:ea44dc9ed014 95 /** @brief Base class for all global motion estimation methods.
joeverbout 0:ea44dc9ed014 96 */
joeverbout 0:ea44dc9ed014 97 class CV_EXPORTS MotionEstimatorBase
joeverbout 0:ea44dc9ed014 98 {
joeverbout 0:ea44dc9ed014 99 public:
joeverbout 0:ea44dc9ed014 100 virtual ~MotionEstimatorBase() {}
joeverbout 0:ea44dc9ed014 101
joeverbout 0:ea44dc9ed014 102 /** @brief Sets motion model.
joeverbout 0:ea44dc9ed014 103
joeverbout 0:ea44dc9ed014 104 @param val Motion model. See cv::videostab::MotionModel.
joeverbout 0:ea44dc9ed014 105 */
joeverbout 0:ea44dc9ed014 106 virtual void setMotionModel(MotionModel val) { motionModel_ = val; }
joeverbout 0:ea44dc9ed014 107
joeverbout 0:ea44dc9ed014 108 /**
joeverbout 0:ea44dc9ed014 109 @return Motion model. See cv::videostab::MotionModel.
joeverbout 0:ea44dc9ed014 110 */
joeverbout 0:ea44dc9ed014 111 virtual MotionModel motionModel() const { return motionModel_; }
joeverbout 0:ea44dc9ed014 112
joeverbout 0:ea44dc9ed014 113 /** @brief Estimates global motion between two 2D point clouds.
joeverbout 0:ea44dc9ed014 114
joeverbout 0:ea44dc9ed014 115 @param points0 Source set of 2D points (32F).
joeverbout 0:ea44dc9ed014 116 @param points1 Destination set of 2D points (32F).
joeverbout 0:ea44dc9ed014 117 @param ok Indicates whether motion was estimated successfully.
joeverbout 0:ea44dc9ed014 118 @return 3x3 2D transformation matrix (32F).
joeverbout 0:ea44dc9ed014 119 */
joeverbout 0:ea44dc9ed014 120 virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0) = 0;
joeverbout 0:ea44dc9ed014 121
joeverbout 0:ea44dc9ed014 122 protected:
joeverbout 0:ea44dc9ed014 123 MotionEstimatorBase(MotionModel model) { setMotionModel(model); }
joeverbout 0:ea44dc9ed014 124
joeverbout 0:ea44dc9ed014 125 private:
joeverbout 0:ea44dc9ed014 126 MotionModel motionModel_;
joeverbout 0:ea44dc9ed014 127 };
joeverbout 0:ea44dc9ed014 128
joeverbout 0:ea44dc9ed014 129 /** @brief Describes a robust RANSAC-based global 2D motion estimation method which minimizes L2 error.
joeverbout 0:ea44dc9ed014 130 */
joeverbout 0:ea44dc9ed014 131 class CV_EXPORTS MotionEstimatorRansacL2 : public MotionEstimatorBase
joeverbout 0:ea44dc9ed014 132 {
joeverbout 0:ea44dc9ed014 133 public:
joeverbout 0:ea44dc9ed014 134 MotionEstimatorRansacL2(MotionModel model = MM_AFFINE);
joeverbout 0:ea44dc9ed014 135
joeverbout 0:ea44dc9ed014 136 void setRansacParams(const RansacParams &val) { ransacParams_ = val; }
joeverbout 0:ea44dc9ed014 137 RansacParams ransacParams() const { return ransacParams_; }
joeverbout 0:ea44dc9ed014 138
joeverbout 0:ea44dc9ed014 139 void setMinInlierRatio(float val) { minInlierRatio_ = val; }
joeverbout 0:ea44dc9ed014 140 float minInlierRatio() const { return minInlierRatio_; }
joeverbout 0:ea44dc9ed014 141
joeverbout 0:ea44dc9ed014 142 virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
joeverbout 0:ea44dc9ed014 143
joeverbout 0:ea44dc9ed014 144 private:
joeverbout 0:ea44dc9ed014 145 RansacParams ransacParams_;
joeverbout 0:ea44dc9ed014 146 float minInlierRatio_;
joeverbout 0:ea44dc9ed014 147 };
joeverbout 0:ea44dc9ed014 148
joeverbout 0:ea44dc9ed014 149 /** @brief Describes a global 2D motion estimation method which minimizes L1 error.
joeverbout 0:ea44dc9ed014 150
joeverbout 0:ea44dc9ed014 151 @note To be able to use this method you must build OpenCV with CLP library support. :
joeverbout 0:ea44dc9ed014 152 */
joeverbout 0:ea44dc9ed014 153 class CV_EXPORTS MotionEstimatorL1 : public MotionEstimatorBase
joeverbout 0:ea44dc9ed014 154 {
joeverbout 0:ea44dc9ed014 155 public:
joeverbout 0:ea44dc9ed014 156 MotionEstimatorL1(MotionModel model = MM_AFFINE);
joeverbout 0:ea44dc9ed014 157
joeverbout 0:ea44dc9ed014 158 virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
joeverbout 0:ea44dc9ed014 159
joeverbout 0:ea44dc9ed014 160 private:
joeverbout 0:ea44dc9ed014 161 std::vector<double> obj_, collb_, colub_;
joeverbout 0:ea44dc9ed014 162 std::vector<double> elems_, rowlb_, rowub_;
joeverbout 0:ea44dc9ed014 163 std::vector<int> rows_, cols_;
joeverbout 0:ea44dc9ed014 164
joeverbout 0:ea44dc9ed014 165 void set(int row, int col, double coef)
joeverbout 0:ea44dc9ed014 166 {
joeverbout 0:ea44dc9ed014 167 rows_.push_back(row);
joeverbout 0:ea44dc9ed014 168 cols_.push_back(col);
joeverbout 0:ea44dc9ed014 169 elems_.push_back(coef);
joeverbout 0:ea44dc9ed014 170 }
joeverbout 0:ea44dc9ed014 171 };
joeverbout 0:ea44dc9ed014 172
joeverbout 0:ea44dc9ed014 173 /** @brief Base class for global 2D motion estimation methods which take frames as input.
joeverbout 0:ea44dc9ed014 174 */
joeverbout 0:ea44dc9ed014 175 class CV_EXPORTS ImageMotionEstimatorBase
joeverbout 0:ea44dc9ed014 176 {
joeverbout 0:ea44dc9ed014 177 public:
joeverbout 0:ea44dc9ed014 178 virtual ~ImageMotionEstimatorBase() {}
joeverbout 0:ea44dc9ed014 179
joeverbout 0:ea44dc9ed014 180 virtual void setMotionModel(MotionModel val) { motionModel_ = val; }
joeverbout 0:ea44dc9ed014 181 virtual MotionModel motionModel() const { return motionModel_; }
joeverbout 0:ea44dc9ed014 182
joeverbout 0:ea44dc9ed014 183 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0) = 0;
joeverbout 0:ea44dc9ed014 184
joeverbout 0:ea44dc9ed014 185 protected:
joeverbout 0:ea44dc9ed014 186 ImageMotionEstimatorBase(MotionModel model) { setMotionModel(model); }
joeverbout 0:ea44dc9ed014 187
joeverbout 0:ea44dc9ed014 188 private:
joeverbout 0:ea44dc9ed014 189 MotionModel motionModel_;
joeverbout 0:ea44dc9ed014 190 };
joeverbout 0:ea44dc9ed014 191
joeverbout 0:ea44dc9ed014 192 class CV_EXPORTS FromFileMotionReader : public ImageMotionEstimatorBase
joeverbout 0:ea44dc9ed014 193 {
joeverbout 0:ea44dc9ed014 194 public:
joeverbout 0:ea44dc9ed014 195 FromFileMotionReader(const String &path);
joeverbout 0:ea44dc9ed014 196
joeverbout 0:ea44dc9ed014 197 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
joeverbout 0:ea44dc9ed014 198
joeverbout 0:ea44dc9ed014 199 private:
joeverbout 0:ea44dc9ed014 200 std::ifstream file_;
joeverbout 0:ea44dc9ed014 201 };
joeverbout 0:ea44dc9ed014 202
joeverbout 0:ea44dc9ed014 203 class CV_EXPORTS ToFileMotionWriter : public ImageMotionEstimatorBase
joeverbout 0:ea44dc9ed014 204 {
joeverbout 0:ea44dc9ed014 205 public:
joeverbout 0:ea44dc9ed014 206 ToFileMotionWriter(const String &path, Ptr<ImageMotionEstimatorBase> estimator);
joeverbout 0:ea44dc9ed014 207
joeverbout 0:ea44dc9ed014 208 virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
joeverbout 0:ea44dc9ed014 209 virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
joeverbout 0:ea44dc9ed014 210
joeverbout 0:ea44dc9ed014 211 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
joeverbout 0:ea44dc9ed014 212
joeverbout 0:ea44dc9ed014 213 private:
joeverbout 0:ea44dc9ed014 214 std::ofstream file_;
joeverbout 0:ea44dc9ed014 215 Ptr<ImageMotionEstimatorBase> motionEstimator_;
joeverbout 0:ea44dc9ed014 216 };
joeverbout 0:ea44dc9ed014 217
joeverbout 0:ea44dc9ed014 218 /** @brief Describes a global 2D motion estimation method which uses keypoints detection and optical flow for
joeverbout 0:ea44dc9ed014 219 matching.
joeverbout 0:ea44dc9ed014 220 */
joeverbout 0:ea44dc9ed014 221 class CV_EXPORTS KeypointBasedMotionEstimator : public ImageMotionEstimatorBase
joeverbout 0:ea44dc9ed014 222 {
joeverbout 0:ea44dc9ed014 223 public:
joeverbout 0:ea44dc9ed014 224 KeypointBasedMotionEstimator(Ptr<MotionEstimatorBase> estimator);
joeverbout 0:ea44dc9ed014 225
joeverbout 0:ea44dc9ed014 226 virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
joeverbout 0:ea44dc9ed014 227 virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
joeverbout 0:ea44dc9ed014 228
joeverbout 0:ea44dc9ed014 229 void setDetector(Ptr<FeatureDetector> val) { detector_ = val; }
joeverbout 0:ea44dc9ed014 230 Ptr<FeatureDetector> detector() const { return detector_; }
joeverbout 0:ea44dc9ed014 231
joeverbout 0:ea44dc9ed014 232 void setOpticalFlowEstimator(Ptr<ISparseOptFlowEstimator> val) { optFlowEstimator_ = val; }
joeverbout 0:ea44dc9ed014 233 Ptr<ISparseOptFlowEstimator> opticalFlowEstimator() const { return optFlowEstimator_; }
joeverbout 0:ea44dc9ed014 234
joeverbout 0:ea44dc9ed014 235 void setOutlierRejector(Ptr<IOutlierRejector> val) { outlierRejector_ = val; }
joeverbout 0:ea44dc9ed014 236 Ptr<IOutlierRejector> outlierRejector() const { return outlierRejector_; }
joeverbout 0:ea44dc9ed014 237
joeverbout 0:ea44dc9ed014 238 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
joeverbout 0:ea44dc9ed014 239
joeverbout 0:ea44dc9ed014 240 private:
joeverbout 0:ea44dc9ed014 241 Ptr<MotionEstimatorBase> motionEstimator_;
joeverbout 0:ea44dc9ed014 242 Ptr<FeatureDetector> detector_;
joeverbout 0:ea44dc9ed014 243 Ptr<ISparseOptFlowEstimator> optFlowEstimator_;
joeverbout 0:ea44dc9ed014 244 Ptr<IOutlierRejector> outlierRejector_;
joeverbout 0:ea44dc9ed014 245
joeverbout 0:ea44dc9ed014 246 std::vector<uchar> status_;
joeverbout 0:ea44dc9ed014 247 std::vector<KeyPoint> keypointsPrev_;
joeverbout 0:ea44dc9ed014 248 std::vector<Point2f> pointsPrev_, points_;
joeverbout 0:ea44dc9ed014 249 std::vector<Point2f> pointsPrevGood_, pointsGood_;
joeverbout 0:ea44dc9ed014 250 };
joeverbout 0:ea44dc9ed014 251
joeverbout 0:ea44dc9ed014 252 #if defined(HAVE_OPENCV_CUDAIMGPROC) && defined(HAVE_OPENCV_CUDAOPTFLOW)
joeverbout 0:ea44dc9ed014 253
joeverbout 0:ea44dc9ed014 254 class CV_EXPORTS KeypointBasedMotionEstimatorGpu : public ImageMotionEstimatorBase
joeverbout 0:ea44dc9ed014 255 {
joeverbout 0:ea44dc9ed014 256 public:
joeverbout 0:ea44dc9ed014 257 KeypointBasedMotionEstimatorGpu(Ptr<MotionEstimatorBase> estimator);
joeverbout 0:ea44dc9ed014 258
joeverbout 0:ea44dc9ed014 259 virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
joeverbout 0:ea44dc9ed014 260 virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
joeverbout 0:ea44dc9ed014 261
joeverbout 0:ea44dc9ed014 262 void setOutlierRejector(Ptr<IOutlierRejector> val) { outlierRejector_ = val; }
joeverbout 0:ea44dc9ed014 263 Ptr<IOutlierRejector> outlierRejector() const { return outlierRejector_; }
joeverbout 0:ea44dc9ed014 264
joeverbout 0:ea44dc9ed014 265 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
joeverbout 0:ea44dc9ed014 266 Mat estimate(const cuda::GpuMat &frame0, const cuda::GpuMat &frame1, bool *ok = 0);
joeverbout 0:ea44dc9ed014 267
joeverbout 0:ea44dc9ed014 268 private:
joeverbout 0:ea44dc9ed014 269 Ptr<MotionEstimatorBase> motionEstimator_;
joeverbout 0:ea44dc9ed014 270 Ptr<cuda::CornersDetector> detector_;
joeverbout 0:ea44dc9ed014 271 SparsePyrLkOptFlowEstimatorGpu optFlowEstimator_;
joeverbout 0:ea44dc9ed014 272 Ptr<IOutlierRejector> outlierRejector_;
joeverbout 0:ea44dc9ed014 273
joeverbout 0:ea44dc9ed014 274 cuda::GpuMat frame0_, grayFrame0_, frame1_;
joeverbout 0:ea44dc9ed014 275 cuda::GpuMat pointsPrev_, points_;
joeverbout 0:ea44dc9ed014 276 cuda::GpuMat status_;
joeverbout 0:ea44dc9ed014 277
joeverbout 0:ea44dc9ed014 278 Mat hostPointsPrev_, hostPoints_;
joeverbout 0:ea44dc9ed014 279 std::vector<Point2f> hostPointsPrevTmp_, hostPointsTmp_;
joeverbout 0:ea44dc9ed014 280 std::vector<uchar> rejectionStatus_;
joeverbout 0:ea44dc9ed014 281 };
joeverbout 0:ea44dc9ed014 282
joeverbout 0:ea44dc9ed014 283 #endif // defined(HAVE_OPENCV_CUDAIMGPROC) && defined(HAVE_OPENCV_CUDAOPTFLOW)
joeverbout 0:ea44dc9ed014 284
joeverbout 0:ea44dc9ed014 285 /** @brief Computes motion between two frames assuming that all the intermediate motions are known.
joeverbout 0:ea44dc9ed014 286
joeverbout 0:ea44dc9ed014 287 @param from Source frame index.
joeverbout 0:ea44dc9ed014 288 @param to Destination frame index.
joeverbout 0:ea44dc9ed014 289 @param motions Pair-wise motions. motions[i] denotes motion from the frame i to the frame i+1
joeverbout 0:ea44dc9ed014 290 @return Motion from the frame from to the frame to.
joeverbout 0:ea44dc9ed014 291 */
joeverbout 0:ea44dc9ed014 292 CV_EXPORTS Mat getMotion(int from, int to, const std::vector<Mat> &motions);
joeverbout 0:ea44dc9ed014 293
joeverbout 0:ea44dc9ed014 294 //! @}
joeverbout 0:ea44dc9ed014 295
joeverbout 0:ea44dc9ed014 296 } // namespace videostab
joeverbout 0:ea44dc9ed014 297 } // namespace cv
joeverbout 0:ea44dc9ed014 298
joeverbout 0:ea44dc9ed014 299 #endif
joeverbout 0:ea44dc9ed014 300