Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Fork of gr-peach-opencv-project by
cascadedetect.hpp
00001 #pragma once 00002 00003 #include "opencv2/core/ocl.hpp" 00004 00005 namespace cv 00006 { 00007 00008 void clipObjects(Size sz, std::vector<Rect>& objects, 00009 std::vector<int>* a, std::vector<double>* b); 00010 00011 class FeatureEvaluator 00012 { 00013 public: 00014 enum 00015 { 00016 HAAR = 0, 00017 LBP = 1, 00018 HOG = 2 00019 }; 00020 00021 struct ScaleData 00022 { 00023 ScaleData() { scale = 0.f; layer_ofs = ystep = 0; } 00024 Size getWorkingSize(Size winSize) const 00025 { 00026 return Size(std::max(szi.width - winSize.width, 0), 00027 std::max(szi.height - winSize.height, 0)); 00028 } 00029 00030 float scale; 00031 Size szi; 00032 int layer_ofs, ystep; 00033 }; 00034 00035 virtual ~FeatureEvaluator(); 00036 00037 virtual bool read(const FileNode& node, Size origWinSize); 00038 virtual Ptr<FeatureEvaluator> clone() const; 00039 virtual int getFeatureType() const; 00040 int getNumChannels() const { return nchannels; } 00041 00042 virtual bool setImage(InputArray img, const std::vector<float>& scales); 00043 virtual bool setWindow(Point p, int scaleIdx); 00044 const ScaleData& getScaleData(int scaleIdx) const 00045 { 00046 CV_Assert( 0 <= scaleIdx && scaleIdx < (int)scaleData->size()); 00047 return scaleData->at(scaleIdx); 00048 } 00049 virtual void getUMats(std::vector<UMat>& bufs); 00050 virtual void getMats(); 00051 00052 Size getLocalSize() const { return localSize; } 00053 Size getLocalBufSize() const { return lbufSize; } 00054 00055 virtual float calcOrd(int featureIdx) const; 00056 virtual int calcCat(int featureIdx) const; 00057 00058 static Ptr<FeatureEvaluator> create(int type); 00059 00060 protected: 00061 enum { SBUF_VALID=1, USBUF_VALID=2 }; 00062 int sbufFlag; 00063 00064 bool updateScaleData( Size imgsz, const std::vector<float>& _scales ); 00065 virtual void computeChannels( int, InputArray ) {} 00066 virtual void computeOptFeatures() {} 00067 00068 Size origWinSize, sbufSize, localSize, lbufSize; 00069 int nchannels; 00070 Mat sbuf, rbuf; 00071 UMat urbuf, usbuf, ufbuf, uscaleData; 00072 00073 Ptr<std::vector<ScaleData> > scaleData; 00074 }; 00075 00076 00077 class CascadeClassifierImpl : public BaseCascadeClassifier 00078 { 00079 public: 00080 CascadeClassifierImpl(); 00081 virtual ~CascadeClassifierImpl(); 00082 00083 bool empty() const; 00084 bool load( const String& filename ); 00085 void read( const FileNode& node ); 00086 bool read_( const FileNode& node ); 00087 void detectMultiScale( InputArray image, 00088 CV_OUT std::vector<Rect>& objects, 00089 double scaleFactor = 1.1, 00090 int minNeighbors = 3, int flags = 0, 00091 Size minSize = Size(), 00092 Size maxSize = Size() ); 00093 00094 void detectMultiScale( InputArray image, 00095 CV_OUT std::vector<Rect>& objects, 00096 CV_OUT std::vector<int>& numDetections, 00097 double scaleFactor=1.1, 00098 int minNeighbors=3, int flags=0, 00099 Size minSize=Size(), 00100 Size maxSize=Size() ); 00101 00102 void detectMultiScale( InputArray image, 00103 CV_OUT std::vector<Rect>& objects, 00104 CV_OUT std::vector<int>& rejectLevels, 00105 CV_OUT std::vector<double>& levelWeights, 00106 double scaleFactor = 1.1, 00107 int minNeighbors = 3, int flags = 0, 00108 Size minSize = Size(), 00109 Size maxSize = Size(), 00110 bool outputRejectLevels = false ); 00111 00112 00113 bool isOldFormatCascade() const; 00114 Size getOriginalWindowSize() const; 00115 int getFeatureType() const; 00116 void* getOldCascade(); 00117 00118 void setMaskGenerator(const Ptr<MaskGenerator>& maskGenerator); 00119 Ptr<MaskGenerator> getMaskGenerator(); 00120 00121 protected: 00122 enum { SUM_ALIGN = 64 }; 00123 00124 bool detectSingleScale( InputArray image, Size processingRectSize, 00125 int yStep, double factor, std::vector<Rect>& candidates, 00126 std::vector<int>& rejectLevels, std::vector<double>& levelWeights, 00127 Size sumSize0, bool outputRejectLevels = false ); 00128 #ifdef HAVE_OPENCL 00129 bool ocl_detectMultiScaleNoGrouping( const std::vector<float>& scales, 00130 std::vector<Rect>& candidates ); 00131 #endif 00132 void detectMultiScaleNoGrouping( InputArray image, std::vector<Rect>& candidates, 00133 std::vector<int>& rejectLevels, std::vector<double>& levelWeights, 00134 double scaleFactor, Size minObjectSize, Size maxObjectSize, 00135 bool outputRejectLevels = false ); 00136 00137 enum { MAX_FACES = 10000 }; 00138 enum { BOOST = 0 }; 00139 enum { DO_CANNY_PRUNING = CASCADE_DO_CANNY_PRUNING, 00140 SCALE_IMAGE = CASCADE_SCALE_IMAGE, 00141 FIND_BIGGEST_OBJECT = CASCADE_FIND_BIGGEST_OBJECT, 00142 DO_ROUGH_SEARCH = CASCADE_DO_ROUGH_SEARCH 00143 }; 00144 00145 friend class CascadeClassifierInvoker; 00146 friend class SparseCascadeClassifierInvoker; 00147 00148 template<class FEval> 00149 friend int predictOrdered( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 00150 00151 template<class FEval> 00152 friend int predictCategorical( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 00153 00154 template<class FEval> 00155 friend int predictOrderedStump( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 00156 00157 template<class FEval> 00158 friend int predictCategoricalStump( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 00159 00160 int runAt( Ptr<FeatureEvaluator>& feval, Point pt, int scaleIdx, double& weight ); 00161 00162 class Data 00163 { 00164 public: 00165 struct DTreeNode 00166 { 00167 int featureIdx; 00168 float threshold; // for ordered features only 00169 int left; 00170 int right; 00171 }; 00172 00173 struct DTree 00174 { 00175 int nodeCount; 00176 }; 00177 00178 struct Stage 00179 { 00180 int first; 00181 int ntrees; 00182 float threshold; 00183 }; 00184 00185 struct Stump 00186 { 00187 Stump() { } 00188 Stump(int _featureIdx, float _threshold, float _left, float _right) 00189 : featureIdx(_featureIdx), threshold(_threshold), left(_left), right(_right) {} 00190 00191 int featureIdx; 00192 float threshold; 00193 float left; 00194 float right; 00195 }; 00196 00197 Data(); 00198 00199 bool read(const FileNode &node); 00200 00201 int stageType; 00202 int featureType; 00203 int ncategories; 00204 int minNodesPerTree, maxNodesPerTree; 00205 Size origWinSize; 00206 00207 std::vector<Stage> stages; 00208 std::vector<DTree> classifiers; 00209 std::vector<DTreeNode> nodes; 00210 std::vector<float> leaves; 00211 std::vector<int> subsets; 00212 std::vector<Stump> stumps; 00213 }; 00214 00215 Data data; 00216 Ptr<FeatureEvaluator> featureEvaluator; 00217 Ptr<CvHaarClassifierCascade> oldCascade; 00218 00219 Ptr<MaskGenerator> maskGenerator; 00220 UMat ugrayImage; 00221 UMat ufacepos, ustages, unodes, uleaves, usubsets; 00222 #ifdef HAVE_OPENCL 00223 ocl::Kernel haarKernel, lbpKernel; 00224 bool tryOpenCL; 00225 #endif 00226 00227 Mutex mtx; 00228 }; 00229 00230 #define CC_CASCADE_PARAMS "cascadeParams" 00231 #define CC_STAGE_TYPE "stageType" 00232 #define CC_FEATURE_TYPE "featureType" 00233 #define CC_HEIGHT "height" 00234 #define CC_WIDTH "width" 00235 00236 #define CC_STAGE_NUM "stageNum" 00237 #define CC_STAGES "stages" 00238 #define CC_STAGE_PARAMS "stageParams" 00239 00240 #define CC_BOOST "BOOST" 00241 #define CC_MAX_DEPTH "maxDepth" 00242 #define CC_WEAK_COUNT "maxWeakCount" 00243 #define CC_STAGE_THRESHOLD "stageThreshold" 00244 #define CC_WEAK_CLASSIFIERS "weakClassifiers" 00245 #define CC_INTERNAL_NODES "internalNodes" 00246 #define CC_LEAF_VALUES "leafValues" 00247 00248 #define CC_FEATURES "features" 00249 #define CC_FEATURE_PARAMS "featureParams" 00250 #define CC_MAX_CAT_COUNT "maxCatCount" 00251 00252 #define CC_HAAR "HAAR" 00253 #define CC_RECTS "rects" 00254 #define CC_TILTED "tilted" 00255 00256 #define CC_LBP "LBP" 00257 #define CC_RECT "rect" 00258 00259 #define CC_HOG "HOG" 00260 00261 #define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \ 00262 /* (x, y) */ \ 00263 (p0) = sum + (rect).x + (step) * (rect).y, \ 00264 /* (x + w, y) */ \ 00265 (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ 00266 /* (x + w, y) */ \ 00267 (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ 00268 /* (x + w, y + h) */ \ 00269 (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) 00270 00271 #define CV_TILTED_PTRS( p0, p1, p2, p3, tilted, rect, step ) \ 00272 /* (x, y) */ \ 00273 (p0) = tilted + (rect).x + (step) * (rect).y, \ 00274 /* (x - h, y + h) */ \ 00275 (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \ 00276 /* (x + w, y + w) */ \ 00277 (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \ 00278 /* (x + w - h, y + w + h) */ \ 00279 (p3) = tilted + (rect).x + (rect).width - (rect).height \ 00280 + (step) * ((rect).y + (rect).width + (rect).height) 00281 00282 #define CALC_SUM_(p0, p1, p2, p3, offset) \ 00283 ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) 00284 00285 #define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) 00286 00287 #define CV_SUM_OFS( p0, p1, p2, p3, sum, rect, step ) \ 00288 /* (x, y) */ \ 00289 (p0) = sum + (rect).x + (step) * (rect).y, \ 00290 /* (x + w, y) */ \ 00291 (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ 00292 /* (x + w, y) */ \ 00293 (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ 00294 /* (x + w, y + h) */ \ 00295 (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) 00296 00297 #define CV_TILTED_OFS( p0, p1, p2, p3, tilted, rect, step ) \ 00298 /* (x, y) */ \ 00299 (p0) = tilted + (rect).x + (step) * (rect).y, \ 00300 /* (x - h, y + h) */ \ 00301 (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \ 00302 /* (x + w, y + w) */ \ 00303 (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \ 00304 /* (x + w - h, y + w + h) */ \ 00305 (p3) = tilted + (rect).x + (rect).width - (rect).height \ 00306 + (step) * ((rect).y + (rect).width + (rect).height) 00307 00308 #define CALC_SUM_(p0, p1, p2, p3, offset) \ 00309 ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) 00310 00311 #define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) 00312 00313 #define CALC_SUM_OFS_(p0, p1, p2, p3, ptr) \ 00314 ((ptr)[p0] - (ptr)[p1] - (ptr)[p2] + (ptr)[p3]) 00315 00316 #define CALC_SUM_OFS(rect, ptr) CALC_SUM_OFS_((rect)[0], (rect)[1], (rect)[2], (rect)[3], ptr) 00317 00318 //---------------------------------------------- HaarEvaluator --------------------------------------- 00319 class HaarEvaluator : public FeatureEvaluator 00320 { 00321 public: 00322 struct Feature 00323 { 00324 Feature(); 00325 bool read( const FileNode& node ); 00326 00327 bool tilted; 00328 00329 enum { RECT_NUM = 3 }; 00330 struct 00331 { 00332 Rect r; 00333 float weight; 00334 } rect[RECT_NUM]; 00335 }; 00336 00337 struct OptFeature 00338 { 00339 OptFeature(); 00340 00341 enum { RECT_NUM = Feature::RECT_NUM }; 00342 float calc( const int* pwin ) const; 00343 void setOffsets( const Feature& _f, int step, int tofs ); 00344 00345 int ofs[RECT_NUM][4]; 00346 float weight[4]; 00347 }; 00348 00349 HaarEvaluator(); 00350 virtual ~HaarEvaluator(); 00351 00352 virtual bool read( const FileNode& node, Size origWinSize); 00353 virtual Ptr<FeatureEvaluator> clone() const; 00354 virtual int getFeatureType() const { return FeatureEvaluator::HAAR; } 00355 00356 virtual bool setWindow(Point p, int scaleIdx); 00357 Rect getNormRect() const; 00358 int getSquaresOffset() const; 00359 00360 float operator()(int featureIdx) const 00361 { return optfeaturesPtr[featureIdx].calc(pwin) * varianceNormFactor; } 00362 virtual float calcOrd(int featureIdx) const 00363 { return (*this)(featureIdx); } 00364 00365 protected: 00366 virtual void computeChannels( int i, InputArray img ); 00367 virtual void computeOptFeatures(); 00368 00369 Ptr<std::vector<Feature> > features; 00370 Ptr<std::vector<OptFeature> > optfeatures; 00371 Ptr<std::vector<OptFeature> > optfeatures_lbuf; 00372 bool hasTiltedFeatures; 00373 00374 int tofs, sqofs; 00375 Vec4i nofs; 00376 Rect normrect; 00377 const int* pwin; 00378 OptFeature* optfeaturesPtr; // optimization 00379 float varianceNormFactor; 00380 }; 00381 00382 inline HaarEvaluator::Feature :: Feature() 00383 { 00384 tilted = false; 00385 rect[0].r = rect[1].r = rect[2].r = Rect(); 00386 rect[0].weight = rect[1].weight = rect[2].weight = 0; 00387 } 00388 00389 inline HaarEvaluator::OptFeature :: OptFeature() 00390 { 00391 weight[0] = weight[1] = weight[2] = 0.f; 00392 00393 ofs[0][0] = ofs[0][1] = ofs[0][2] = ofs[0][3] = 00394 ofs[1][0] = ofs[1][1] = ofs[1][2] = ofs[1][3] = 00395 ofs[2][0] = ofs[2][1] = ofs[2][2] = ofs[2][3] = 0; 00396 } 00397 00398 inline float HaarEvaluator::OptFeature :: calc( const int* ptr ) const 00399 { 00400 float ret = weight[0] * CALC_SUM_OFS(ofs[0], ptr) + 00401 weight[1] * CALC_SUM_OFS(ofs[1], ptr); 00402 00403 if( weight[2] != 0.0f ) 00404 ret += weight[2] * CALC_SUM_OFS(ofs[2], ptr); 00405 00406 return ret; 00407 } 00408 00409 //---------------------------------------------- LBPEvaluator ------------------------------------- 00410 00411 class LBPEvaluator : public FeatureEvaluator 00412 { 00413 public: 00414 struct Feature 00415 { 00416 Feature(); 00417 Feature( int x, int y, int _block_w, int _block_h ) : 00418 rect(x, y, _block_w, _block_h) {} 00419 00420 bool read(const FileNode& node ); 00421 00422 Rect rect; // weight and height for block 00423 }; 00424 00425 struct OptFeature 00426 { 00427 OptFeature(); 00428 00429 int calc( const int* pwin ) const; 00430 void setOffsets( const Feature& _f, int step ); 00431 int ofs[16]; 00432 }; 00433 00434 LBPEvaluator(); 00435 virtual ~LBPEvaluator(); 00436 00437 virtual bool read( const FileNode& node, Size origWinSize ); 00438 virtual Ptr<FeatureEvaluator> clone() const; 00439 virtual int getFeatureType() const { return FeatureEvaluator::LBP; } 00440 00441 virtual bool setWindow(Point p, int scaleIdx); 00442 00443 int operator()(int featureIdx) const 00444 { return optfeaturesPtr[featureIdx].calc(pwin); } 00445 virtual int calcCat(int featureIdx) const 00446 { return (*this)(featureIdx); } 00447 protected: 00448 virtual void computeChannels( int i, InputArray img ); 00449 virtual void computeOptFeatures(); 00450 00451 Ptr<std::vector<Feature> > features; 00452 Ptr<std::vector<OptFeature> > optfeatures; 00453 Ptr<std::vector<OptFeature> > optfeatures_lbuf; 00454 OptFeature* optfeaturesPtr; // optimization 00455 00456 const int* pwin; 00457 }; 00458 00459 00460 inline LBPEvaluator::Feature :: Feature() 00461 { 00462 rect = Rect(); 00463 } 00464 00465 inline LBPEvaluator::OptFeature :: OptFeature() 00466 { 00467 for( int i = 0; i < 16; i++ ) 00468 ofs[i] = 0; 00469 } 00470 00471 inline int LBPEvaluator::OptFeature :: calc( const int* p ) const 00472 { 00473 int cval = CALC_SUM_OFS_( ofs[5], ofs[6], ofs[9], ofs[10], p ); 00474 00475 return (CALC_SUM_OFS_( ofs[0], ofs[1], ofs[4], ofs[5], p ) >= cval ? 128 : 0) | // 0 00476 (CALC_SUM_OFS_( ofs[1], ofs[2], ofs[5], ofs[6], p ) >= cval ? 64 : 0) | // 1 00477 (CALC_SUM_OFS_( ofs[2], ofs[3], ofs[6], ofs[7], p ) >= cval ? 32 : 0) | // 2 00478 (CALC_SUM_OFS_( ofs[6], ofs[7], ofs[10], ofs[11], p ) >= cval ? 16 : 0) | // 5 00479 (CALC_SUM_OFS_( ofs[10], ofs[11], ofs[14], ofs[15], p ) >= cval ? 8 : 0)| // 8 00480 (CALC_SUM_OFS_( ofs[9], ofs[10], ofs[13], ofs[14], p ) >= cval ? 4 : 0)| // 7 00481 (CALC_SUM_OFS_( ofs[8], ofs[9], ofs[12], ofs[13], p ) >= cval ? 2 : 0)| // 6 00482 (CALC_SUM_OFS_( ofs[4], ofs[5], ofs[8], ofs[9], p ) >= cval ? 1 : 0); 00483 } 00484 00485 00486 //---------------------------------------------- predictor functions ------------------------------------- 00487 00488 template<class FEval> 00489 inline int predictOrdered( CascadeClassifierImpl& cascade, 00490 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 00491 { 00492 int nstages = (int)cascade.data.stages.size(); 00493 int nodeOfs = 0, leafOfs = 0; 00494 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 00495 float* cascadeLeaves = &cascade.data.leaves[0]; 00496 CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; 00497 CascadeClassifierImpl::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; 00498 CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 00499 00500 for( int si = 0; si < nstages; si++ ) 00501 { 00502 CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si]; 00503 int wi, ntrees = stage.ntrees; 00504 sum = 0; 00505 00506 for( wi = 0; wi < ntrees; wi++ ) 00507 { 00508 CascadeClassifierImpl::Data::DTree& weak = cascadeWeaks[stage.first + wi]; 00509 int idx = 0, root = nodeOfs; 00510 00511 do 00512 { 00513 CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[root + idx]; 00514 double val = featureEvaluator(node.featureIdx); 00515 idx = val < node.threshold ? node.left : node.right; 00516 } 00517 while( idx > 0 ); 00518 sum += cascadeLeaves[leafOfs - idx]; 00519 nodeOfs += weak.nodeCount; 00520 leafOfs += weak.nodeCount + 1; 00521 } 00522 if( sum < stage.threshold ) 00523 return -si; 00524 } 00525 return 1; 00526 } 00527 00528 template<class FEval> 00529 inline int predictCategorical( CascadeClassifierImpl& cascade, 00530 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 00531 { 00532 int nstages = (int)cascade.data.stages.size(); 00533 int nodeOfs = 0, leafOfs = 0; 00534 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 00535 size_t subsetSize = (cascade.data.ncategories + 31)/32; 00536 int* cascadeSubsets = &cascade.data.subsets[0]; 00537 float* cascadeLeaves = &cascade.data.leaves[0]; 00538 CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; 00539 CascadeClassifierImpl::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; 00540 CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 00541 00542 for(int si = 0; si < nstages; si++ ) 00543 { 00544 CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si]; 00545 int wi, ntrees = stage.ntrees; 00546 sum = 0; 00547 00548 for( wi = 0; wi < ntrees; wi++ ) 00549 { 00550 CascadeClassifierImpl::Data::DTree& weak = cascadeWeaks[stage.first + wi]; 00551 int idx = 0, root = nodeOfs; 00552 do 00553 { 00554 CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[root + idx]; 00555 int c = featureEvaluator(node.featureIdx); 00556 const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; 00557 idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; 00558 } 00559 while( idx > 0 ); 00560 sum += cascadeLeaves[leafOfs - idx]; 00561 nodeOfs += weak.nodeCount; 00562 leafOfs += weak.nodeCount + 1; 00563 } 00564 if( sum < stage.threshold ) 00565 return -si; 00566 } 00567 return 1; 00568 } 00569 00570 template<class FEval> 00571 inline int predictOrderedStump( CascadeClassifierImpl& cascade, 00572 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 00573 { 00574 CV_Assert(!cascade.data.stumps.empty()); 00575 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 00576 const CascadeClassifierImpl::Data::Stump* cascadeStumps = &cascade.data.stumps[0]; 00577 const CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 00578 00579 int nstages = (int)cascade.data.stages.size(); 00580 double tmp = 0; 00581 00582 for( int stageIdx = 0; stageIdx < nstages; stageIdx++ ) 00583 { 00584 const CascadeClassifierImpl::Data::Stage& stage = cascadeStages[stageIdx]; 00585 tmp = 0; 00586 00587 int ntrees = stage.ntrees; 00588 for( int i = 0; i < ntrees; i++ ) 00589 { 00590 const CascadeClassifierImpl::Data::Stump& stump = cascadeStumps[i]; 00591 double value = featureEvaluator(stump.featureIdx); 00592 tmp += value < stump.threshold ? stump.left : stump.right; 00593 } 00594 00595 if( tmp < stage.threshold ) 00596 { 00597 sum = (double)tmp; 00598 return -stageIdx; 00599 } 00600 cascadeStumps += ntrees; 00601 } 00602 00603 sum = (double)tmp; 00604 return 1; 00605 } 00606 00607 template<class FEval> 00608 inline int predictCategoricalStump( CascadeClassifierImpl& cascade, 00609 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 00610 { 00611 CV_Assert(!cascade.data.stumps.empty()); 00612 int nstages = (int)cascade.data.stages.size(); 00613 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 00614 size_t subsetSize = (cascade.data.ncategories + 31)/32; 00615 const int* cascadeSubsets = &cascade.data.subsets[0]; 00616 const CascadeClassifierImpl::Data::Stump* cascadeStumps = &cascade.data.stumps[0]; 00617 const CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 00618 00619 double tmp = 0; 00620 for( int si = 0; si < nstages; si++ ) 00621 { 00622 const CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si]; 00623 int wi, ntrees = stage.ntrees; 00624 tmp = 0; 00625 00626 for( wi = 0; wi < ntrees; wi++ ) 00627 { 00628 const CascadeClassifierImpl::Data::Stump& stump = cascadeStumps[wi]; 00629 int c = featureEvaluator(stump.featureIdx); 00630 const int* subset = &cascadeSubsets[wi*subsetSize]; 00631 tmp += (subset[c>>5] & (1 << (c & 31))) ? stump.left : stump.right; 00632 } 00633 00634 if( tmp < stage.threshold ) 00635 { 00636 sum = tmp; 00637 return -si; 00638 } 00639 00640 cascadeStumps += ntrees; 00641 cascadeSubsets += ntrees*subsetSize; 00642 } 00643 00644 sum = (double)tmp; 00645 return 1; 00646 } 00647 } 00648
Generated on Tue Jul 12 2022 15:17:19 by
1.7.2
