openCV library for Renesas RZ/A

Dependents:   RZ_A2M_Mbed_samples

Committer:
RyoheiHagimoto
Date:
Fri Jan 29 04:53:38 2021 +0000
Revision:
0:0e0631af0305
copied from https://github.com/d-kato/opencv-lib.

Who changed what in which revision?

UserRevisionLine numberNew contents of line
RyoheiHagimoto 0:0e0631af0305 1 /*M///////////////////////////////////////////////////////////////////////////////////////
RyoheiHagimoto 0:0e0631af0305 2 //
RyoheiHagimoto 0:0e0631af0305 3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
RyoheiHagimoto 0:0e0631af0305 4 //
RyoheiHagimoto 0:0e0631af0305 5 // By downloading, copying, installing or using the software you agree to this license.
RyoheiHagimoto 0:0e0631af0305 6 // If you do not agree to this license, do not download, install,
RyoheiHagimoto 0:0e0631af0305 7 // copy or use the software.
RyoheiHagimoto 0:0e0631af0305 8 //
RyoheiHagimoto 0:0e0631af0305 9 //
RyoheiHagimoto 0:0e0631af0305 10 // License Agreement
RyoheiHagimoto 0:0e0631af0305 11 // For Open Source Computer Vision Library
RyoheiHagimoto 0:0e0631af0305 12 //
RyoheiHagimoto 0:0e0631af0305 13 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
RyoheiHagimoto 0:0e0631af0305 14 // Copyright (C) 2009-2011, Willow Garage Inc., all rights reserved.
RyoheiHagimoto 0:0e0631af0305 15 // Third party copyrights are property of their respective owners.
RyoheiHagimoto 0:0e0631af0305 16 //
RyoheiHagimoto 0:0e0631af0305 17 // Redistribution and use in source and binary forms, with or without modification,
RyoheiHagimoto 0:0e0631af0305 18 // are permitted provided that the following conditions are met:
RyoheiHagimoto 0:0e0631af0305 19 //
RyoheiHagimoto 0:0e0631af0305 20 // * Redistribution's of source code must retain the above copyright notice,
RyoheiHagimoto 0:0e0631af0305 21 // this list of conditions and the following disclaimer.
RyoheiHagimoto 0:0e0631af0305 22 //
RyoheiHagimoto 0:0e0631af0305 23 // * Redistribution's in binary form must reproduce the above copyright notice,
RyoheiHagimoto 0:0e0631af0305 24 // this list of conditions and the following disclaimer in the documentation
RyoheiHagimoto 0:0e0631af0305 25 // and/or other materials provided with the distribution.
RyoheiHagimoto 0:0e0631af0305 26 //
RyoheiHagimoto 0:0e0631af0305 27 // * The name of the copyright holders may not be used to endorse or promote products
RyoheiHagimoto 0:0e0631af0305 28 // derived from this software without specific prior written permission.
RyoheiHagimoto 0:0e0631af0305 29 //
RyoheiHagimoto 0:0e0631af0305 30 // This software is provided by the copyright holders and contributors "as is" and
RyoheiHagimoto 0:0e0631af0305 31 // any express or implied warranties, including, but not limited to, the implied
RyoheiHagimoto 0:0e0631af0305 32 // warranties of merchantability and fitness for a particular purpose are disclaimed.
RyoheiHagimoto 0:0e0631af0305 33 // In no event shall the Intel Corporation or contributors be liable for any direct,
RyoheiHagimoto 0:0e0631af0305 34 // indirect, incidental, special, exemplary, or consequential damages
RyoheiHagimoto 0:0e0631af0305 35 // (including, but not limited to, procurement of substitute goods or services;
RyoheiHagimoto 0:0e0631af0305 36 // loss of use, data, or profits; or business interruption) however caused
RyoheiHagimoto 0:0e0631af0305 37 // and on any theory of liability, whether in contract, strict liability,
RyoheiHagimoto 0:0e0631af0305 38 // or tort (including negligence or otherwise) arising in any way out of
RyoheiHagimoto 0:0e0631af0305 39 // the use of this software, even if advised of the possibility of such damage.
RyoheiHagimoto 0:0e0631af0305 40 //
RyoheiHagimoto 0:0e0631af0305 41 //M*/
RyoheiHagimoto 0:0e0631af0305 42
RyoheiHagimoto 0:0e0631af0305 43 #ifndef OPENCV_VIDEOSTAB_GLOBAL_MOTION_HPP
RyoheiHagimoto 0:0e0631af0305 44 #define OPENCV_VIDEOSTAB_GLOBAL_MOTION_HPP
RyoheiHagimoto 0:0e0631af0305 45
RyoheiHagimoto 0:0e0631af0305 46 #include <vector>
RyoheiHagimoto 0:0e0631af0305 47 #include <fstream>
RyoheiHagimoto 0:0e0631af0305 48 #include "opencv2/core.hpp"
RyoheiHagimoto 0:0e0631af0305 49 #include "opencv2/features2d.hpp"
RyoheiHagimoto 0:0e0631af0305 50 #include "opencv2/opencv_modules.hpp"
RyoheiHagimoto 0:0e0631af0305 51 #include "opencv2/videostab/optical_flow.hpp"
RyoheiHagimoto 0:0e0631af0305 52 #include "opencv2/videostab/motion_core.hpp"
RyoheiHagimoto 0:0e0631af0305 53 #include "opencv2/videostab/outlier_rejection.hpp"
RyoheiHagimoto 0:0e0631af0305 54
RyoheiHagimoto 0:0e0631af0305 55 #ifdef HAVE_OPENCV_CUDAIMGPROC
RyoheiHagimoto 0:0e0631af0305 56 # include "opencv2/cudaimgproc.hpp"
RyoheiHagimoto 0:0e0631af0305 57 #endif
RyoheiHagimoto 0:0e0631af0305 58
RyoheiHagimoto 0:0e0631af0305 59 namespace cv
RyoheiHagimoto 0:0e0631af0305 60 {
RyoheiHagimoto 0:0e0631af0305 61 namespace videostab
RyoheiHagimoto 0:0e0631af0305 62 {
RyoheiHagimoto 0:0e0631af0305 63
RyoheiHagimoto 0:0e0631af0305 64 //! @addtogroup videostab_motion
RyoheiHagimoto 0:0e0631af0305 65 //! @{
RyoheiHagimoto 0:0e0631af0305 66
RyoheiHagimoto 0:0e0631af0305 67 /** @brief Estimates best global motion between two 2D point clouds in the least-squares sense.
RyoheiHagimoto 0:0e0631af0305 68
RyoheiHagimoto 0:0e0631af0305 69 @note Works in-place and changes input point arrays.
RyoheiHagimoto 0:0e0631af0305 70
RyoheiHagimoto 0:0e0631af0305 71 @param points0 Source set of 2D points (32F).
RyoheiHagimoto 0:0e0631af0305 72 @param points1 Destination set of 2D points (32F).
RyoheiHagimoto 0:0e0631af0305 73 @param model Motion model (up to MM_AFFINE).
RyoheiHagimoto 0:0e0631af0305 74 @param rmse Final root-mean-square error.
RyoheiHagimoto 0:0e0631af0305 75 @return 3x3 2D transformation matrix (32F).
RyoheiHagimoto 0:0e0631af0305 76 */
RyoheiHagimoto 0:0e0631af0305 77 CV_EXPORTS Mat estimateGlobalMotionLeastSquares(
RyoheiHagimoto 0:0e0631af0305 78 InputOutputArray points0, InputOutputArray points1, int model = MM_AFFINE,
RyoheiHagimoto 0:0e0631af0305 79 float *rmse = 0);
RyoheiHagimoto 0:0e0631af0305 80
RyoheiHagimoto 0:0e0631af0305 81 /** @brief Estimates best global motion between two 2D point clouds robustly (using RANSAC method).
RyoheiHagimoto 0:0e0631af0305 82
RyoheiHagimoto 0:0e0631af0305 83 @param points0 Source set of 2D points (32F).
RyoheiHagimoto 0:0e0631af0305 84 @param points1 Destination set of 2D points (32F).
RyoheiHagimoto 0:0e0631af0305 85 @param model Motion model. See cv::videostab::MotionModel.
RyoheiHagimoto 0:0e0631af0305 86 @param params RANSAC method parameters. See videostab::RansacParams.
RyoheiHagimoto 0:0e0631af0305 87 @param rmse Final root-mean-square error.
RyoheiHagimoto 0:0e0631af0305 88 @param ninliers Final number of inliers.
RyoheiHagimoto 0:0e0631af0305 89 */
RyoheiHagimoto 0:0e0631af0305 90 CV_EXPORTS Mat estimateGlobalMotionRansac(
RyoheiHagimoto 0:0e0631af0305 91 InputArray points0, InputArray points1, int model = MM_AFFINE,
RyoheiHagimoto 0:0e0631af0305 92 const RansacParams &params = RansacParams::default2dMotion(MM_AFFINE),
RyoheiHagimoto 0:0e0631af0305 93 float *rmse = 0, int *ninliers = 0);
RyoheiHagimoto 0:0e0631af0305 94
RyoheiHagimoto 0:0e0631af0305 95 /** @brief Base class for all global motion estimation methods.
RyoheiHagimoto 0:0e0631af0305 96 */
RyoheiHagimoto 0:0e0631af0305 97 class CV_EXPORTS MotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 98 {
RyoheiHagimoto 0:0e0631af0305 99 public:
RyoheiHagimoto 0:0e0631af0305 100 virtual ~MotionEstimatorBase() {}
RyoheiHagimoto 0:0e0631af0305 101
RyoheiHagimoto 0:0e0631af0305 102 /** @brief Sets motion model.
RyoheiHagimoto 0:0e0631af0305 103
RyoheiHagimoto 0:0e0631af0305 104 @param val Motion model. See cv::videostab::MotionModel.
RyoheiHagimoto 0:0e0631af0305 105 */
RyoheiHagimoto 0:0e0631af0305 106 virtual void setMotionModel(MotionModel val) { motionModel_ = val; }
RyoheiHagimoto 0:0e0631af0305 107
RyoheiHagimoto 0:0e0631af0305 108 /**
RyoheiHagimoto 0:0e0631af0305 109 @return Motion model. See cv::videostab::MotionModel.
RyoheiHagimoto 0:0e0631af0305 110 */
RyoheiHagimoto 0:0e0631af0305 111 virtual MotionModel motionModel() const { return motionModel_; }
RyoheiHagimoto 0:0e0631af0305 112
RyoheiHagimoto 0:0e0631af0305 113 /** @brief Estimates global motion between two 2D point clouds.
RyoheiHagimoto 0:0e0631af0305 114
RyoheiHagimoto 0:0e0631af0305 115 @param points0 Source set of 2D points (32F).
RyoheiHagimoto 0:0e0631af0305 116 @param points1 Destination set of 2D points (32F).
RyoheiHagimoto 0:0e0631af0305 117 @param ok Indicates whether motion was estimated successfully.
RyoheiHagimoto 0:0e0631af0305 118 @return 3x3 2D transformation matrix (32F).
RyoheiHagimoto 0:0e0631af0305 119 */
RyoheiHagimoto 0:0e0631af0305 120 virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0) = 0;
RyoheiHagimoto 0:0e0631af0305 121
RyoheiHagimoto 0:0e0631af0305 122 protected:
RyoheiHagimoto 0:0e0631af0305 123 MotionEstimatorBase(MotionModel model) { setMotionModel(model); }
RyoheiHagimoto 0:0e0631af0305 124
RyoheiHagimoto 0:0e0631af0305 125 private:
RyoheiHagimoto 0:0e0631af0305 126 MotionModel motionModel_;
RyoheiHagimoto 0:0e0631af0305 127 };
RyoheiHagimoto 0:0e0631af0305 128
RyoheiHagimoto 0:0e0631af0305 129 /** @brief Describes a robust RANSAC-based global 2D motion estimation method which minimizes L2 error.
RyoheiHagimoto 0:0e0631af0305 130 */
RyoheiHagimoto 0:0e0631af0305 131 class CV_EXPORTS MotionEstimatorRansacL2 : public MotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 132 {
RyoheiHagimoto 0:0e0631af0305 133 public:
RyoheiHagimoto 0:0e0631af0305 134 MotionEstimatorRansacL2(MotionModel model = MM_AFFINE);
RyoheiHagimoto 0:0e0631af0305 135
RyoheiHagimoto 0:0e0631af0305 136 void setRansacParams(const RansacParams &val) { ransacParams_ = val; }
RyoheiHagimoto 0:0e0631af0305 137 RansacParams ransacParams() const { return ransacParams_; }
RyoheiHagimoto 0:0e0631af0305 138
RyoheiHagimoto 0:0e0631af0305 139 void setMinInlierRatio(float val) { minInlierRatio_ = val; }
RyoheiHagimoto 0:0e0631af0305 140 float minInlierRatio() const { return minInlierRatio_; }
RyoheiHagimoto 0:0e0631af0305 141
RyoheiHagimoto 0:0e0631af0305 142 virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
RyoheiHagimoto 0:0e0631af0305 143
RyoheiHagimoto 0:0e0631af0305 144 private:
RyoheiHagimoto 0:0e0631af0305 145 RansacParams ransacParams_;
RyoheiHagimoto 0:0e0631af0305 146 float minInlierRatio_;
RyoheiHagimoto 0:0e0631af0305 147 };
RyoheiHagimoto 0:0e0631af0305 148
RyoheiHagimoto 0:0e0631af0305 149 /** @brief Describes a global 2D motion estimation method which minimizes L1 error.
RyoheiHagimoto 0:0e0631af0305 150
RyoheiHagimoto 0:0e0631af0305 151 @note To be able to use this method you must build OpenCV with CLP library support. :
RyoheiHagimoto 0:0e0631af0305 152 */
RyoheiHagimoto 0:0e0631af0305 153 class CV_EXPORTS MotionEstimatorL1 : public MotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 154 {
RyoheiHagimoto 0:0e0631af0305 155 public:
RyoheiHagimoto 0:0e0631af0305 156 MotionEstimatorL1(MotionModel model = MM_AFFINE);
RyoheiHagimoto 0:0e0631af0305 157
RyoheiHagimoto 0:0e0631af0305 158 virtual Mat estimate(InputArray points0, InputArray points1, bool *ok = 0);
RyoheiHagimoto 0:0e0631af0305 159
RyoheiHagimoto 0:0e0631af0305 160 private:
RyoheiHagimoto 0:0e0631af0305 161 std::vector<double> obj_, collb_, colub_;
RyoheiHagimoto 0:0e0631af0305 162 std::vector<double> elems_, rowlb_, rowub_;
RyoheiHagimoto 0:0e0631af0305 163 std::vector<int> rows_, cols_;
RyoheiHagimoto 0:0e0631af0305 164
RyoheiHagimoto 0:0e0631af0305 165 void set(int row, int col, double coef)
RyoheiHagimoto 0:0e0631af0305 166 {
RyoheiHagimoto 0:0e0631af0305 167 rows_.push_back(row);
RyoheiHagimoto 0:0e0631af0305 168 cols_.push_back(col);
RyoheiHagimoto 0:0e0631af0305 169 elems_.push_back(coef);
RyoheiHagimoto 0:0e0631af0305 170 }
RyoheiHagimoto 0:0e0631af0305 171 };
RyoheiHagimoto 0:0e0631af0305 172
RyoheiHagimoto 0:0e0631af0305 173 /** @brief Base class for global 2D motion estimation methods which take frames as input.
RyoheiHagimoto 0:0e0631af0305 174 */
RyoheiHagimoto 0:0e0631af0305 175 class CV_EXPORTS ImageMotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 176 {
RyoheiHagimoto 0:0e0631af0305 177 public:
RyoheiHagimoto 0:0e0631af0305 178 virtual ~ImageMotionEstimatorBase() {}
RyoheiHagimoto 0:0e0631af0305 179
RyoheiHagimoto 0:0e0631af0305 180 virtual void setMotionModel(MotionModel val) { motionModel_ = val; }
RyoheiHagimoto 0:0e0631af0305 181 virtual MotionModel motionModel() const { return motionModel_; }
RyoheiHagimoto 0:0e0631af0305 182
RyoheiHagimoto 0:0e0631af0305 183 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0) = 0;
RyoheiHagimoto 0:0e0631af0305 184
RyoheiHagimoto 0:0e0631af0305 185 protected:
RyoheiHagimoto 0:0e0631af0305 186 ImageMotionEstimatorBase(MotionModel model) { setMotionModel(model); }
RyoheiHagimoto 0:0e0631af0305 187
RyoheiHagimoto 0:0e0631af0305 188 private:
RyoheiHagimoto 0:0e0631af0305 189 MotionModel motionModel_;
RyoheiHagimoto 0:0e0631af0305 190 };
RyoheiHagimoto 0:0e0631af0305 191
RyoheiHagimoto 0:0e0631af0305 192 class CV_EXPORTS FromFileMotionReader : public ImageMotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 193 {
RyoheiHagimoto 0:0e0631af0305 194 public:
RyoheiHagimoto 0:0e0631af0305 195 FromFileMotionReader(const String &path);
RyoheiHagimoto 0:0e0631af0305 196
RyoheiHagimoto 0:0e0631af0305 197 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
RyoheiHagimoto 0:0e0631af0305 198
RyoheiHagimoto 0:0e0631af0305 199 private:
RyoheiHagimoto 0:0e0631af0305 200 std::ifstream file_;
RyoheiHagimoto 0:0e0631af0305 201 };
RyoheiHagimoto 0:0e0631af0305 202
RyoheiHagimoto 0:0e0631af0305 203 class CV_EXPORTS ToFileMotionWriter : public ImageMotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 204 {
RyoheiHagimoto 0:0e0631af0305 205 public:
RyoheiHagimoto 0:0e0631af0305 206 ToFileMotionWriter(const String &path, Ptr<ImageMotionEstimatorBase> estimator);
RyoheiHagimoto 0:0e0631af0305 207
RyoheiHagimoto 0:0e0631af0305 208 virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
RyoheiHagimoto 0:0e0631af0305 209 virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
RyoheiHagimoto 0:0e0631af0305 210
RyoheiHagimoto 0:0e0631af0305 211 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
RyoheiHagimoto 0:0e0631af0305 212
RyoheiHagimoto 0:0e0631af0305 213 private:
RyoheiHagimoto 0:0e0631af0305 214 std::ofstream file_;
RyoheiHagimoto 0:0e0631af0305 215 Ptr<ImageMotionEstimatorBase> motionEstimator_;
RyoheiHagimoto 0:0e0631af0305 216 };
RyoheiHagimoto 0:0e0631af0305 217
RyoheiHagimoto 0:0e0631af0305 218 /** @brief Describes a global 2D motion estimation method which uses keypoints detection and optical flow for
RyoheiHagimoto 0:0e0631af0305 219 matching.
RyoheiHagimoto 0:0e0631af0305 220 */
RyoheiHagimoto 0:0e0631af0305 221 class CV_EXPORTS KeypointBasedMotionEstimator : public ImageMotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 222 {
RyoheiHagimoto 0:0e0631af0305 223 public:
RyoheiHagimoto 0:0e0631af0305 224 KeypointBasedMotionEstimator(Ptr<MotionEstimatorBase> estimator);
RyoheiHagimoto 0:0e0631af0305 225
RyoheiHagimoto 0:0e0631af0305 226 virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
RyoheiHagimoto 0:0e0631af0305 227 virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
RyoheiHagimoto 0:0e0631af0305 228
RyoheiHagimoto 0:0e0631af0305 229 void setDetector(Ptr<FeatureDetector> val) { detector_ = val; }
RyoheiHagimoto 0:0e0631af0305 230 Ptr<FeatureDetector> detector() const { return detector_; }
RyoheiHagimoto 0:0e0631af0305 231
RyoheiHagimoto 0:0e0631af0305 232 void setOpticalFlowEstimator(Ptr<ISparseOptFlowEstimator> val) { optFlowEstimator_ = val; }
RyoheiHagimoto 0:0e0631af0305 233 Ptr<ISparseOptFlowEstimator> opticalFlowEstimator() const { return optFlowEstimator_; }
RyoheiHagimoto 0:0e0631af0305 234
RyoheiHagimoto 0:0e0631af0305 235 void setOutlierRejector(Ptr<IOutlierRejector> val) { outlierRejector_ = val; }
RyoheiHagimoto 0:0e0631af0305 236 Ptr<IOutlierRejector> outlierRejector() const { return outlierRejector_; }
RyoheiHagimoto 0:0e0631af0305 237
RyoheiHagimoto 0:0e0631af0305 238 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
RyoheiHagimoto 0:0e0631af0305 239
RyoheiHagimoto 0:0e0631af0305 240 private:
RyoheiHagimoto 0:0e0631af0305 241 Ptr<MotionEstimatorBase> motionEstimator_;
RyoheiHagimoto 0:0e0631af0305 242 Ptr<FeatureDetector> detector_;
RyoheiHagimoto 0:0e0631af0305 243 Ptr<ISparseOptFlowEstimator> optFlowEstimator_;
RyoheiHagimoto 0:0e0631af0305 244 Ptr<IOutlierRejector> outlierRejector_;
RyoheiHagimoto 0:0e0631af0305 245
RyoheiHagimoto 0:0e0631af0305 246 std::vector<uchar> status_;
RyoheiHagimoto 0:0e0631af0305 247 std::vector<KeyPoint> keypointsPrev_;
RyoheiHagimoto 0:0e0631af0305 248 std::vector<Point2f> pointsPrev_, points_;
RyoheiHagimoto 0:0e0631af0305 249 std::vector<Point2f> pointsPrevGood_, pointsGood_;
RyoheiHagimoto 0:0e0631af0305 250 };
RyoheiHagimoto 0:0e0631af0305 251
RyoheiHagimoto 0:0e0631af0305 252 #if defined(HAVE_OPENCV_CUDAIMGPROC) && defined(HAVE_OPENCV_CUDAOPTFLOW)
RyoheiHagimoto 0:0e0631af0305 253
RyoheiHagimoto 0:0e0631af0305 254 class CV_EXPORTS KeypointBasedMotionEstimatorGpu : public ImageMotionEstimatorBase
RyoheiHagimoto 0:0e0631af0305 255 {
RyoheiHagimoto 0:0e0631af0305 256 public:
RyoheiHagimoto 0:0e0631af0305 257 KeypointBasedMotionEstimatorGpu(Ptr<MotionEstimatorBase> estimator);
RyoheiHagimoto 0:0e0631af0305 258
RyoheiHagimoto 0:0e0631af0305 259 virtual void setMotionModel(MotionModel val) { motionEstimator_->setMotionModel(val); }
RyoheiHagimoto 0:0e0631af0305 260 virtual MotionModel motionModel() const { return motionEstimator_->motionModel(); }
RyoheiHagimoto 0:0e0631af0305 261
RyoheiHagimoto 0:0e0631af0305 262 void setOutlierRejector(Ptr<IOutlierRejector> val) { outlierRejector_ = val; }
RyoheiHagimoto 0:0e0631af0305 263 Ptr<IOutlierRejector> outlierRejector() const { return outlierRejector_; }
RyoheiHagimoto 0:0e0631af0305 264
RyoheiHagimoto 0:0e0631af0305 265 virtual Mat estimate(const Mat &frame0, const Mat &frame1, bool *ok = 0);
RyoheiHagimoto 0:0e0631af0305 266 Mat estimate(const cuda::GpuMat &frame0, const cuda::GpuMat &frame1, bool *ok = 0);
RyoheiHagimoto 0:0e0631af0305 267
RyoheiHagimoto 0:0e0631af0305 268 private:
RyoheiHagimoto 0:0e0631af0305 269 Ptr<MotionEstimatorBase> motionEstimator_;
RyoheiHagimoto 0:0e0631af0305 270 Ptr<cuda::CornersDetector> detector_;
RyoheiHagimoto 0:0e0631af0305 271 SparsePyrLkOptFlowEstimatorGpu optFlowEstimator_;
RyoheiHagimoto 0:0e0631af0305 272 Ptr<IOutlierRejector> outlierRejector_;
RyoheiHagimoto 0:0e0631af0305 273
RyoheiHagimoto 0:0e0631af0305 274 cuda::GpuMat frame0_, grayFrame0_, frame1_;
RyoheiHagimoto 0:0e0631af0305 275 cuda::GpuMat pointsPrev_, points_;
RyoheiHagimoto 0:0e0631af0305 276 cuda::GpuMat status_;
RyoheiHagimoto 0:0e0631af0305 277
RyoheiHagimoto 0:0e0631af0305 278 Mat hostPointsPrev_, hostPoints_;
RyoheiHagimoto 0:0e0631af0305 279 std::vector<Point2f> hostPointsPrevTmp_, hostPointsTmp_;
RyoheiHagimoto 0:0e0631af0305 280 std::vector<uchar> rejectionStatus_;
RyoheiHagimoto 0:0e0631af0305 281 };
RyoheiHagimoto 0:0e0631af0305 282
RyoheiHagimoto 0:0e0631af0305 283 #endif // defined(HAVE_OPENCV_CUDAIMGPROC) && defined(HAVE_OPENCV_CUDAOPTFLOW)
RyoheiHagimoto 0:0e0631af0305 284
RyoheiHagimoto 0:0e0631af0305 285 /** @brief Computes motion between two frames assuming that all the intermediate motions are known.
RyoheiHagimoto 0:0e0631af0305 286
RyoheiHagimoto 0:0e0631af0305 287 @param from Source frame index.
RyoheiHagimoto 0:0e0631af0305 288 @param to Destination frame index.
RyoheiHagimoto 0:0e0631af0305 289 @param motions Pair-wise motions. motions[i] denotes motion from the frame i to the frame i+1
RyoheiHagimoto 0:0e0631af0305 290 @return Motion from the frame from to the frame to.
RyoheiHagimoto 0:0e0631af0305 291 */
RyoheiHagimoto 0:0e0631af0305 292 CV_EXPORTS Mat getMotion(int from, int to, const std::vector<Mat> &motions);
RyoheiHagimoto 0:0e0631af0305 293
RyoheiHagimoto 0:0e0631af0305 294 //! @}
RyoheiHagimoto 0:0e0631af0305 295
RyoheiHagimoto 0:0e0631af0305 296 } // namespace videostab
RyoheiHagimoto 0:0e0631af0305 297 } // namespace cv
RyoheiHagimoto 0:0e0631af0305 298
RyoheiHagimoto 0:0e0631af0305 299 #endif