Renesas GR-PEACH OpenCV Development / gr-peach-opencv-project-sd-card_update

Fork of gr-peach-opencv-project-sd-card by the do

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers cascadedetect.cpp Source File

cascadedetect.cpp

00001 /*M///////////////////////////////////////////////////////////////////////////////////////
00002 //
00003 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
00004 //
00005 //  By downloading, copying, installing or using the software you agree to this license.
00006 //  If you do not agree to this license, do not download, install,
00007 //  copy or use the software.
00008 //
00009 //
00010 //                           License Agreement
00011 //                For Open Source Computer Vision Library
00012 //
00013 // Copyright (C) 2008-2013, Itseez Inc., all rights reserved.
00014 // Third party copyrights are property of their respective owners.
00015 //
00016 // Redistribution and use in source and binary forms, with or without modification,
00017 // are permitted provided that the following conditions are met:
00018 //
00019 //   * Redistribution's of source code must retain the above copyright notice,
00020 //     this list of conditions and the following disclaimer.
00021 //
00022 //   * Redistribution's in binary form must reproduce the above copyright notice,
00023 //     this list of conditions and the following disclaimer in the documentation
00024 //     and/or other materials provided with the distribution.
00025 //
00026 //   * The name of Itseez Inc. may not be used to endorse or promote products
00027 //     derived from this software without specific prior written permission.
00028 //
00029 // This software is provided by the copyright holders and contributors "as is" and
00030 // any express or implied warranties, including, but not limited to, the implied
00031 // warranties of merchantability and fitness for a particular purpose are disclaimed.
00032 // In no event shall the copyright holders or contributors be liable for any direct,
00033 // indirect, incidental, special, exemplary, or consequential damages
00034 // (including, but not limited to, procurement of substitute goods or services;
00035 // loss of use, data, or profits; or business interruption) however caused
00036 // and on any theory of liability, whether in contract, strict liability,
00037 // or tort (including negligence or otherwise) arising in any way out of
00038 // the use of this software, even if advised of the possibility of such damage.
00039 //
00040 //M*/
00041 
00042 #include "precomp.hpp"
00043 #include <cstdio>
00044 
00045 #include "cascadedetect.hpp"
00046 #include "opencv2/objdetect/objdetect_c.h"
00047 //#include "opencl_kernels_objdetect.hpp"
00048 
00049 namespace cv
00050 {
00051 
00052 template<typename _Tp> void copyVectorToUMat(const std::vector<_Tp>& v, UMat& um)
00053 {
00054     if(v.empty())
00055         um.release();
00056     Mat(1, (int)(v.size()*sizeof(v[0])), CV_8U, (void*)&v[0]).copyTo(um);
00057 }
00058 
00059 void groupRectangles (std::vector<Rect>& rectList, int groupThreshold, double eps,
00060                      std::vector<int>* weights, std::vector<double>* levelWeights)
00061 {
00062     if( groupThreshold <= 0 || rectList.empty() )
00063     {
00064         if( weights )
00065         {
00066             size_t i, sz = rectList.size();
00067             weights->resize(sz);
00068             for( i = 0; i < sz; i++ )
00069                 (*weights)[i] = 1;
00070         }
00071         return;
00072     }
00073 
00074     std::vector<int> labels;
00075     int nclasses = partition(rectList, labels, SimilarRects(eps));
00076 
00077     std::vector<Rect> rrects(nclasses);
00078     std::vector<int> rweights(nclasses, 0);
00079     std::vector<int> rejectLevels(nclasses, 0);
00080     std::vector<double> rejectWeights(nclasses, DBL_MIN);
00081     int i, j, nlabels = (int)labels.size();
00082     for( i = 0; i < nlabels; i++ )
00083     {
00084         int cls = labels[i];
00085         rrects[cls].x += rectList[i].x;
00086         rrects[cls].y += rectList[i].y;
00087         rrects[cls].width += rectList[i].width;
00088         rrects[cls].height += rectList[i].height;
00089         rweights[cls]++;
00090     }
00091 
00092     bool useDefaultWeights = false;
00093 
00094     if ( levelWeights && weights && !weights->empty() && !levelWeights->empty() )
00095     {
00096         for( i = 0; i < nlabels; i++ )
00097         {
00098             int cls = labels[i];
00099             if( (*weights)[i] > rejectLevels[cls] )
00100             {
00101                 rejectLevels[cls] = (*weights)[i];
00102                 rejectWeights[cls] = (*levelWeights)[i];
00103             }
00104             else if( ( (*weights)[i] == rejectLevels[cls] ) && ( (*levelWeights)[i] > rejectWeights[cls] ) )
00105                 rejectWeights[cls] = (*levelWeights)[i];
00106         }
00107     }
00108     else
00109         useDefaultWeights = true;
00110 
00111     for( i = 0; i < nclasses; i++ )
00112     {
00113         Rect r = rrects[i];
00114         float s = 1.f/rweights[i];
00115         rrects[i] = Rect(saturate_cast<int>(r.x*s),
00116              saturate_cast<int>(r.y*s),
00117              saturate_cast<int>(r.width*s),
00118              saturate_cast<int>(r.height*s));
00119     }
00120 
00121     rectList.clear();
00122     if( weights )
00123         weights->clear();
00124     if( levelWeights )
00125         levelWeights->clear();
00126 
00127     for( i = 0; i < nclasses; i++ )
00128     {
00129         Rect r1 = rrects[i];
00130         int n1 = rweights[i];
00131         double w1 = rejectWeights[i];
00132         int l1 = rejectLevels[i];
00133 
00134         // filter out rectangles which don't have enough similar rectangles
00135         if( n1 <= groupThreshold )
00136             continue;
00137         // filter out small face rectangles inside large rectangles
00138         for( j = 0; j < nclasses; j++ )
00139         {
00140             int n2 = rweights[j];
00141 
00142             if( j == i || n2 <= groupThreshold )
00143                 continue;
00144             Rect r2 = rrects[j];
00145 
00146             int dx = saturate_cast<int>( r2.width * eps );
00147             int dy = saturate_cast<int>( r2.height * eps );
00148 
00149             if( i != j &&
00150                 r1.x >= r2.x - dx &&
00151                 r1.y >= r2.y - dy &&
00152                 r1.x + r1.width <= r2.x + r2.width + dx &&
00153                 r1.y + r1.height <= r2.y + r2.height + dy &&
00154                 (n2 > std::max(3, n1) || n1 < 3) )
00155                 break;
00156         }
00157 
00158         if( j == nclasses )
00159         {
00160             rectList.push_back(r1);
00161             if( weights )
00162                 weights->push_back(useDefaultWeights ? n1 : l1);
00163             if( levelWeights )
00164                 levelWeights->push_back(w1);
00165         }
00166     }
00167 }
00168 
00169 class MeanshiftGrouping
00170 {
00171 public:
00172     MeanshiftGrouping(const Point3d& densKer, const std::vector<Point3d>& posV,
00173         const std::vector<double>& wV, double eps, int maxIter = 20)
00174     {
00175         densityKernel = densKer;
00176         weightsV = wV;
00177         positionsV = posV;
00178         positionsCount = (int)posV.size();
00179         meanshiftV.resize(positionsCount);
00180         distanceV.resize(positionsCount);
00181         iterMax = maxIter;
00182         modeEps = eps;
00183 
00184         for (unsigned i = 0; i<positionsV.size(); i++)
00185         {
00186             meanshiftV[i] = getNewValue(positionsV[i]);
00187             distanceV[i] = moveToMode(meanshiftV[i]);
00188             meanshiftV[i] -= positionsV[i];
00189         }
00190     }
00191 
00192     void getModes(std::vector<Point3d>& modesV, std::vector<double>& resWeightsV, const double eps)
00193     {
00194         for (size_t i=0; i <distanceV.size(); i++)
00195         {
00196             bool is_found = false;
00197             for(size_t j=0; j<modesV.size(); j++)
00198             {
00199                 if ( getDistance(distanceV[i], modesV[j]) < eps)
00200                 {
00201                     is_found=true;
00202                     break;
00203                 }
00204             }
00205             if (!is_found)
00206             {
00207                 modesV.push_back(distanceV[i]);
00208             }
00209         }
00210 
00211         resWeightsV.resize(modesV.size());
00212 
00213         for (size_t i=0; i<modesV.size(); i++)
00214         {
00215             resWeightsV[i] = getResultWeight(modesV[i]);
00216         }
00217     }
00218 
00219 protected:
00220     std::vector<Point3d> positionsV;
00221     std::vector<double> weightsV;
00222 
00223     Point3d densityKernel;
00224     int positionsCount;
00225 
00226     std::vector<Point3d> meanshiftV;
00227     std::vector<Point3d> distanceV;
00228     int iterMax;
00229     double modeEps;
00230 
00231     Point3d getNewValue(const Point3d& inPt) const
00232     {
00233         Point3d resPoint(.0);
00234         Point3d ratPoint(.0);
00235         for (size_t i=0; i<positionsV.size(); i++)
00236         {
00237             Point3d aPt= positionsV[i];
00238             Point3d bPt = inPt;
00239             Point3d sPt = densityKernel;
00240 
00241             sPt.x *= std::exp(aPt.z);
00242             sPt.y *= std::exp(aPt.z);
00243 
00244             aPt.x /= sPt.x;
00245             aPt.y /= sPt.y;
00246             aPt.z /= sPt.z;
00247 
00248             bPt.x /= sPt.x;
00249             bPt.y /= sPt.y;
00250             bPt.z /= sPt.z;
00251 
00252             double w = (weightsV[i])*std::exp(-((aPt-bPt).dot(aPt-bPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1)));
00253 
00254             resPoint += w*aPt;
00255 
00256             ratPoint.x += w/sPt.x;
00257             ratPoint.y += w/sPt.y;
00258             ratPoint.z += w/sPt.z;
00259         }
00260         resPoint.x /= ratPoint.x;
00261         resPoint.y /= ratPoint.y;
00262         resPoint.z /= ratPoint.z;
00263         return resPoint;
00264     }
00265 
00266     double getResultWeight(const Point3d& inPt) const
00267     {
00268         double sumW=0;
00269         for (size_t i=0; i<positionsV.size(); i++)
00270         {
00271             Point3d aPt = positionsV[i];
00272             Point3d sPt = densityKernel;
00273 
00274             sPt.x *= std::exp(aPt.z);
00275             sPt.y *= std::exp(aPt.z);
00276 
00277             aPt -= inPt;
00278 
00279             aPt.x /= sPt.x;
00280             aPt.y /= sPt.y;
00281             aPt.z /= sPt.z;
00282 
00283             sumW+=(weightsV[i])*std::exp(-(aPt.dot(aPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1)));
00284         }
00285         return sumW;
00286     }
00287 
00288     Point3d moveToMode(Point3d aPt) const
00289     {
00290         Point3d bPt;
00291         for (int i = 0; i<iterMax; i++)
00292         {
00293             bPt = aPt;
00294             aPt = getNewValue(bPt);
00295             if ( getDistance(aPt, bPt) <= modeEps )
00296             {
00297                 break;
00298             }
00299         }
00300         return aPt;
00301     }
00302 
00303     double getDistance(Point3d p1, Point3d p2) const
00304     {
00305         Point3d ns = densityKernel;
00306         ns.x *= std::exp(p2.z);
00307         ns.y *= std::exp(p2.z);
00308         p2 -= p1;
00309         p2.x /= ns.x;
00310         p2.y /= ns.y;
00311         p2.z /= ns.z;
00312         return p2.dot(p2);
00313     }
00314 };
00315 //new grouping function with using meanshift
00316 static void groupRectangles_meanshift(std::vector<Rect>& rectList, double detectThreshold, std::vector<double>* foundWeights,
00317                                       std::vector<double>& scales, Size winDetSize)
00318 {
00319     int detectionCount = (int)rectList.size();
00320     std::vector<Point3d> hits(detectionCount), resultHits;
00321     std::vector<double> hitWeights(detectionCount), resultWeights;
00322     Point2d hitCenter;
00323 
00324     for (int i=0; i < detectionCount; i++)
00325     {
00326         hitWeights[i] = (*foundWeights)[i];
00327         hitCenter = (rectList[i].tl() + rectList[i].br())*(0.5); //center of rectangles
00328         hits[i] = Point3d(hitCenter.x, hitCenter.y, std::log(scales[i]));
00329     }
00330 
00331     rectList.clear();
00332     if (foundWeights)
00333         foundWeights->clear();
00334 
00335     double logZ = std::log(1.3);
00336     Point3d smothing(8, 16, logZ);
00337 
00338     MeanshiftGrouping msGrouping(smothing, hits, hitWeights, 1e-5, 100);
00339 
00340     msGrouping.getModes(resultHits, resultWeights, 1);
00341 
00342     for (unsigned i=0; i < resultHits.size(); ++i)
00343     {
00344 
00345         double scale = std::exp(resultHits[i].z);
00346         hitCenter.x = resultHits[i].x;
00347         hitCenter.y = resultHits[i].y;
00348         Size s( int(winDetSize.width * scale), int(winDetSize.height * scale) );
00349         Rect resultRect( int(hitCenter.x-s.width/2), int(hitCenter.y-s.height/2),
00350             int(s.width), int(s.height) );
00351 
00352         if (resultWeights[i] > detectThreshold)
00353         {
00354             rectList.push_back(resultRect);
00355             foundWeights->push_back(resultWeights[i]);
00356         }
00357     }
00358 }
00359 
00360 void groupRectangles (std::vector<Rect>& rectList, int groupThreshold, double eps)
00361 {
00362     groupRectangles (rectList, groupThreshold, eps, 0, 0);
00363 }
00364 
00365 void groupRectangles (std::vector<Rect>& rectList, std::vector<int>& weights, int groupThreshold, double eps)
00366 {
00367     groupRectangles (rectList, groupThreshold, eps, &weights, 0);
00368 }
00369 //used for cascade detection algorithm for ROC-curve calculating
00370 void groupRectangles (std::vector<Rect>& rectList, std::vector<int>& rejectLevels,
00371                      std::vector<double>& levelWeights, int groupThreshold, double eps)
00372 {
00373     groupRectangles (rectList, groupThreshold, eps, &rejectLevels, &levelWeights);
00374 }
00375 //can be used for HOG detection algorithm only
00376 void groupRectangles_meanshift(std::vector<Rect>& rectList, std::vector<double>& foundWeights,
00377                                std::vector<double>& foundScales, double detectThreshold, Size winDetSize)
00378 {
00379     groupRectangles_meanshift(rectList, detectThreshold, &foundWeights, foundScales, winDetSize);
00380 }
00381 
00382 
00383 FeatureEvaluator::~FeatureEvaluator() {}
00384 
00385 bool FeatureEvaluator::read(const FileNode&, Size _origWinSize)
00386 {
00387     origWinSize = _origWinSize;
00388     localSize = lbufSize = Size(0, 0);
00389     if (scaleData.empty())
00390         scaleData = makePtr<std::vector<ScaleData> >();
00391     else
00392         scaleData->clear();
00393     return true;
00394 }
00395 
00396 Ptr<FeatureEvaluator> FeatureEvaluator::clone() const { return Ptr<FeatureEvaluator>(); }
00397 int FeatureEvaluator::getFeatureType() const {return -1;}
00398 bool FeatureEvaluator::setWindow(Point, int) { return true; }
00399 void FeatureEvaluator::getUMats(std::vector<UMat>& bufs)
00400 {
00401     if (!(sbufFlag & USBUF_VALID))
00402     {
00403         sbuf.copyTo(usbuf);
00404         sbufFlag |= USBUF_VALID;
00405     }
00406 
00407     bufs.clear();
00408     bufs.push_back(uscaleData);
00409     bufs.push_back(usbuf);
00410     bufs.push_back(ufbuf);
00411 }
00412 
00413 void FeatureEvaluator::getMats()
00414 {
00415     if (!(sbufFlag & SBUF_VALID))
00416     {
00417         usbuf.copyTo(sbuf);
00418         sbufFlag |= SBUF_VALID;
00419     }
00420 }
00421 
00422 float FeatureEvaluator::calcOrd(int) const { return 0.; }
00423 int FeatureEvaluator::calcCat(int) const { return 0; }
00424 
00425 bool FeatureEvaluator::updateScaleData( Size imgsz, const std::vector<float>& _scales )
00426 {
00427     if( scaleData.empty() )
00428         scaleData = makePtr<std::vector<ScaleData> >();
00429 
00430     size_t i, nscales = _scales.size();
00431     bool recalcOptFeatures = nscales != scaleData->size();
00432     scaleData->resize(nscales);
00433 
00434     int layer_dy = 0;
00435     Point layer_ofs(0,0);
00436     Size prevBufSize = sbufSize;
00437     sbufSize.width = std::max(sbufSize.width, (int)alignSize(cvRound(imgsz.width/_scales[0]) + 31, 32));
00438     recalcOptFeatures = recalcOptFeatures || sbufSize.width != prevBufSize.width;
00439 
00440     for( i = 0; i < nscales; i++ )
00441     {
00442         FeatureEvaluator::ScaleData& s = scaleData->at(i);
00443         if( !recalcOptFeatures && fabs(s.scale - _scales[i]) > FLT_EPSILON*100*_scales[i] )
00444             recalcOptFeatures = true;
00445         float sc = _scales[i];
00446         Size sz;
00447         sz.width = cvRound(imgsz.width/sc);
00448         sz.height = cvRound(imgsz.height/sc);
00449         s.ystep = sc >= 2 ? 1 : 2;
00450         s.scale = sc;
00451         s.szi = Size(sz.width+1, sz.height+1);
00452 
00453         if( i == 0 )
00454         {
00455             layer_dy = s.szi.height;
00456         }
00457 
00458         if( layer_ofs.x + s.szi.width > sbufSize.width )
00459         {
00460             layer_ofs = Point(0, layer_ofs.y + layer_dy);
00461             layer_dy = s.szi.height;
00462         }
00463         s.layer_ofs = layer_ofs.y*sbufSize.width + layer_ofs.x;
00464         layer_ofs.x += s.szi.width;
00465     }
00466 
00467     layer_ofs.y += layer_dy;
00468     sbufSize.height = std::max(sbufSize.height, layer_ofs.y);
00469     recalcOptFeatures = recalcOptFeatures || sbufSize.height != prevBufSize.height;
00470     return recalcOptFeatures;
00471 }
00472 
00473 
00474 bool FeatureEvaluator::setImage( InputArray _image, const std::vector<float>& _scales )
00475 {
00476     Size imgsz = _image.size();
00477     bool recalcOptFeatures = updateScaleData(imgsz, _scales);
00478 
00479     size_t i, nscales = scaleData->size();
00480     if (nscales == 0)
00481     {
00482         return false;
00483     }
00484     Size sz0 = scaleData->at(0).szi;
00485     sz0 = Size(std::max(rbuf.cols, (int)alignSize(sz0.width, 16)), std::max(rbuf.rows, sz0.height));
00486 
00487     if (recalcOptFeatures)
00488     {
00489         computeOptFeatures();
00490         copyVectorToUMat(*scaleData, uscaleData);
00491     }
00492 
00493     if (_image.isUMat() && localSize.area() > 0)
00494     {
00495         usbuf.create(sbufSize.height*nchannels, sbufSize.width, CV_32S);
00496         urbuf.create(sz0, CV_8U);
00497 
00498         for (i = 0; i < nscales; i++)
00499         {
00500             const ScaleData& s = scaleData->at(i);
00501             UMat dst(urbuf, Rect(0, 0, s.szi.width - 1, s.szi.height - 1));
00502             resize(_image, dst, dst.size(), 1. / s.scale, 1. / s.scale, INTER_LINEAR);
00503             computeChannels((int)i, dst);
00504         }
00505         sbufFlag = USBUF_VALID;
00506     }
00507     else
00508     {
00509         Mat image = _image.getMat();
00510         sbuf.create(sbufSize.height*nchannels, sbufSize.width, CV_32S);
00511         rbuf.create(sz0, CV_8U);
00512 
00513         for (i = 0; i < nscales; i++)
00514         {
00515             const ScaleData& s = scaleData->at(i);
00516             Mat dst(s.szi.height - 1, s.szi.width - 1, CV_8U, rbuf.ptr());
00517             resize(image, dst, dst.size(), 1. / s.scale, 1. / s.scale, INTER_LINEAR);
00518             computeChannels((int)i, dst);
00519         }
00520         sbufFlag = SBUF_VALID;
00521     }
00522 
00523     return true;
00524 }
00525 
00526 //----------------------------------------------  HaarEvaluator ---------------------------------------
00527 
00528 bool HaarEvaluator::Feature :: read( const FileNode& node )
00529 {
00530     FileNode rnode = node[CC_RECTS];
00531     FileNodeIterator it = rnode.begin(), it_end = rnode.end();
00532 
00533     int ri;
00534     for( ri = 0; ri < RECT_NUM; ri++ )
00535     {
00536         rect[ri].r = Rect();
00537         rect[ri].weight = 0.f;
00538     }
00539 
00540     for(ri = 0; it != it_end; ++it, ri++)
00541     {
00542         FileNodeIterator it2 = (*it).begin();
00543         it2 >> rect[ri].r.x >> rect[ri].r.y >>
00544             rect[ri].r.width >> rect[ri].r.height >> rect[ri].weight;
00545     }
00546 
00547     tilted = (int)node[CC_TILTED] != 0;
00548     return true;
00549 }
00550 
00551 HaarEvaluator::HaarEvaluator()
00552 {
00553     optfeaturesPtr = 0;
00554     pwin = 0;
00555     localSize = Size(4, 2);
00556     lbufSize = Size(0, 0);
00557     nchannels = 0;
00558     tofs = 0;
00559 }
00560 
00561 HaarEvaluator::~HaarEvaluator()
00562 {
00563 }
00564 
00565 bool HaarEvaluator::read(const FileNode& node, Size _origWinSize)
00566 {
00567     if (!FeatureEvaluator::read(node, _origWinSize))
00568         return false;
00569     size_t i, n = node.size();
00570     CV_Assert(n > 0);
00571     if(features.empty())
00572         features = makePtr<std::vector<Feature> >();
00573     if(optfeatures.empty())
00574         optfeatures = makePtr<std::vector<OptFeature> >();
00575     if (optfeatures_lbuf.empty())
00576         optfeatures_lbuf = makePtr<std::vector<OptFeature> >();
00577     features->resize(n);
00578     FileNodeIterator it = node.begin();
00579     hasTiltedFeatures = false;
00580     std::vector<Feature>& ff = *features;
00581     sbufSize = Size();
00582     ufbuf.release();
00583 
00584     for(i = 0; i < n; i++, ++it)
00585     {
00586         if(!ff[i].read(*it))
00587             return false;
00588         if( ff[i].tilted )
00589             hasTiltedFeatures = true;
00590     }
00591     nchannels = hasTiltedFeatures ? 3 : 2;
00592     normrect = Rect(1, 1, origWinSize.width - 2, origWinSize.height - 2);
00593 
00594     localSize = lbufSize = Size(0, 0);
00595     if (ocl::haveOpenCL())
00596     {
00597         if (ocl::Device::getDefault().isAMD() || ocl::Device::getDefault().isIntel())
00598         {
00599             localSize = Size(8, 8);
00600             lbufSize = Size(origWinSize.width + localSize.width,
00601                             origWinSize.height + localSize.height);
00602             if (lbufSize.area() > 1024)
00603                 lbufSize = Size(0, 0);
00604         }
00605     }
00606 
00607     return true;
00608 }
00609 
00610 Ptr<FeatureEvaluator> HaarEvaluator::clone() const
00611 {
00612     Ptr<HaarEvaluator> ret = makePtr<HaarEvaluator>();
00613     *ret = *this;
00614     return ret;
00615 }
00616 
00617 
00618 void HaarEvaluator::computeChannels(int scaleIdx, InputArray img)
00619 {
00620     const ScaleData& s = scaleData->at(scaleIdx);
00621     sqofs = hasTiltedFeatures ? sbufSize.area() * 2 : sbufSize.area();
00622 
00623     if (img.isUMat())
00624     {
00625         int sx = s.layer_ofs % sbufSize.width;
00626         int sy = s.layer_ofs / sbufSize.width;
00627         int sqy = sy + (sqofs / sbufSize.width);
00628         UMat sum(usbuf, Rect(sx, sy, s.szi.width, s.szi.height));
00629         UMat sqsum(usbuf, Rect(sx, sqy, s.szi.width, s.szi.height));
00630         sqsum.flags = (sqsum.flags & ~UMat::DEPTH_MASK) | CV_32S;
00631 
00632         if (hasTiltedFeatures)
00633         {
00634             int sty = sy + (tofs / sbufSize.width);
00635             UMat tilted(usbuf, Rect(sx, sty, s.szi.width, s.szi.height));
00636             integral (img, sum, sqsum, tilted, CV_32S, CV_32S);
00637         }
00638         else
00639         {
00640             UMatData* u = sqsum.u;
00641             integral (img, sum, sqsum, noArray(), CV_32S, CV_32S);
00642             CV_Assert(sqsum.u == u && sqsum.size() == s.szi && sqsum.type()==CV_32S);
00643         }
00644     }
00645     else
00646     {
00647         Mat sum(s.szi, CV_32S, sbuf.ptr<int>() + s.layer_ofs, sbuf.step);
00648         Mat sqsum(s.szi, CV_32S, sum.ptr<int>() + sqofs, sbuf.step);
00649 
00650         if (hasTiltedFeatures)
00651         {
00652             Mat tilted(s.szi, CV_32S, sum.ptr<int>() + tofs, sbuf.step);
00653             integral (img, sum, sqsum, tilted, CV_32S, CV_32S);
00654         }
00655         else
00656             integral (img, sum, sqsum, noArray(), CV_32S, CV_32S);
00657     }
00658 }
00659 
00660 void HaarEvaluator::computeOptFeatures()
00661 {
00662     if (hasTiltedFeatures)
00663         tofs = sbufSize.area();
00664 
00665     int sstep = sbufSize.width;
00666     CV_SUM_OFS( nofs[0], nofs[1], nofs[2], nofs[3], 0, normrect, sstep );
00667 
00668     size_t fi, nfeatures = features->size();
00669     const std::vector<Feature>& ff = *features;
00670     optfeatures->resize(nfeatures);
00671     optfeaturesPtr = &(*optfeatures)[0];
00672     for( fi = 0; fi < nfeatures; fi++ )
00673         optfeaturesPtr[fi].setOffsets( ff[fi], sstep, tofs );
00674     optfeatures_lbuf->resize(nfeatures);
00675 
00676     for( fi = 0; fi < nfeatures; fi++ )
00677         optfeatures_lbuf->at(fi).setOffsets(ff[fi], lbufSize.width > 0 ? lbufSize.width : sstep, tofs);
00678 
00679     copyVectorToUMat(*optfeatures_lbuf, ufbuf);
00680 }
00681 
00682 bool HaarEvaluator::setWindow( Point pt, int scaleIdx )
00683 {
00684     const ScaleData& s = getScaleData(scaleIdx);
00685 
00686     if( pt.x < 0 || pt.y < 0 ||
00687         pt.x + origWinSize.width >= s.szi.width ||
00688         pt.y + origWinSize.height >= s.szi.height )
00689         return false;
00690 
00691     pwin = &sbuf.at<int>(pt) + s.layer_ofs;
00692     const int* pq = (const int*)(pwin + sqofs);
00693     int valsum = CALC_SUM_OFS(nofs, pwin);
00694     unsigned valsqsum = (unsigned)(CALC_SUM_OFS(nofs, pq));
00695 
00696     double area = normrect.area();
00697     double nf = area * valsqsum - (double)valsum * valsum;
00698     if( nf > 0. )
00699     {
00700         nf = std::sqrt(nf);
00701         varianceNormFactor = (float)(1./nf);
00702         return area*varianceNormFactor < 1e-1;
00703     }
00704     else
00705     {
00706         varianceNormFactor = 1.f;
00707         return false;
00708     }
00709 }
00710 
00711 
00712 void HaarEvaluator::OptFeature::setOffsets( const Feature& _f, int step, int _tofs )
00713 {
00714     weight[0] = _f.rect[0].weight;
00715     weight[1] = _f.rect[1].weight;
00716     weight[2] = _f.rect[2].weight;
00717 
00718     if( _f.tilted )
00719     {
00720         CV_TILTED_OFS( ofs[0][0], ofs[0][1], ofs[0][2], ofs[0][3], _tofs, _f.rect[0].r, step );
00721         CV_TILTED_OFS( ofs[1][0], ofs[1][1], ofs[1][2], ofs[1][3], _tofs, _f.rect[1].r, step );
00722         CV_TILTED_OFS( ofs[2][0], ofs[2][1], ofs[2][2], ofs[2][3], _tofs, _f.rect[2].r, step );
00723     }
00724     else
00725     {
00726         CV_SUM_OFS( ofs[0][0], ofs[0][1], ofs[0][2], ofs[0][3], 0, _f.rect[0].r, step );
00727         CV_SUM_OFS( ofs[1][0], ofs[1][1], ofs[1][2], ofs[1][3], 0, _f.rect[1].r, step );
00728         CV_SUM_OFS( ofs[2][0], ofs[2][1], ofs[2][2], ofs[2][3], 0, _f.rect[2].r, step );
00729     }
00730 }
00731 
00732 Rect HaarEvaluator::getNormRect() const
00733 {
00734     return normrect;
00735 }
00736 
00737 int HaarEvaluator::getSquaresOffset() const
00738 {
00739     return sqofs;
00740 }
00741 
00742 //----------------------------------------------  LBPEvaluator -------------------------------------
00743 bool LBPEvaluator::Feature :: read(const FileNode& node )
00744 {
00745     FileNode rnode = node[CC_RECT];
00746     FileNodeIterator it = rnode.begin();
00747     it >> rect.x >> rect.y >> rect.width >> rect.height;
00748     return true;
00749 }
00750 
00751 LBPEvaluator::LBPEvaluator()
00752 {
00753     features = makePtr<std::vector<Feature> >();
00754     optfeatures = makePtr<std::vector<OptFeature> >();
00755     scaleData = makePtr<std::vector<ScaleData> >();
00756 }
00757 
00758 LBPEvaluator::~LBPEvaluator()
00759 {
00760 }
00761 
00762 bool LBPEvaluator::read( const FileNode& node, Size _origWinSize )
00763 {
00764     if (!FeatureEvaluator::read(node, _origWinSize))
00765         return false;
00766     if(features.empty())
00767         features = makePtr<std::vector<Feature> >();
00768     if(optfeatures.empty())
00769         optfeatures = makePtr<std::vector<OptFeature> >();
00770     if (optfeatures_lbuf.empty())
00771         optfeatures_lbuf = makePtr<std::vector<OptFeature> >();
00772 
00773     features->resize(node.size());
00774     optfeaturesPtr = 0;
00775     FileNodeIterator it = node.begin(), it_end = node.end();
00776     std::vector<Feature>& ff = *features;
00777     for(int i = 0; it != it_end; ++it, i++)
00778     {
00779         if(!ff[i].read(*it))
00780             return false;
00781     }
00782     nchannels = 1;
00783     localSize = lbufSize = Size(0, 0);
00784     if (ocl::haveOpenCL())
00785         localSize = Size(8, 8);
00786 
00787     return true;
00788 }
00789 
00790 Ptr<FeatureEvaluator> LBPEvaluator::clone() const
00791 {
00792     Ptr<LBPEvaluator> ret = makePtr<LBPEvaluator>();
00793     *ret = *this;
00794     return ret;
00795 }
00796 
00797 void LBPEvaluator::computeChannels(int scaleIdx, InputArray _img)
00798 {
00799     const ScaleData& s = scaleData->at(scaleIdx);
00800 
00801     if (_img.isUMat())
00802     {
00803         int sx = s.layer_ofs % sbufSize.width;
00804         int sy = s.layer_ofs / sbufSize.width;
00805         UMat sum(usbuf, Rect(sx, sy, s.szi.width, s.szi.height));
00806         integral (_img, sum, noArray(), noArray(), CV_32S);
00807     }
00808     else
00809     {
00810         Mat sum(s.szi, CV_32S, sbuf.ptr<int>() + s.layer_ofs, sbuf.step);
00811         integral (_img, sum, noArray(), noArray(), CV_32S);
00812     }
00813 }
00814 
00815 void LBPEvaluator::computeOptFeatures()
00816 {
00817     int sstep = sbufSize.width;
00818 
00819     size_t fi, nfeatures = features->size();
00820     const std::vector<Feature>& ff = *features;
00821     optfeatures->resize(nfeatures);
00822     optfeaturesPtr = &(*optfeatures)[0];
00823     for( fi = 0; fi < nfeatures; fi++ )
00824         optfeaturesPtr[fi].setOffsets( ff[fi], sstep );
00825     copyVectorToUMat(*optfeatures, ufbuf);
00826 }
00827 
00828 
00829 void LBPEvaluator::OptFeature::setOffsets( const Feature& _f, int step )
00830 {
00831     Rect tr = _f.rect;
00832     int w0 = tr.width;
00833     int h0 = tr.height;
00834 
00835     CV_SUM_OFS( ofs[0], ofs[1], ofs[4], ofs[5], 0, tr, step );
00836     tr.x += 2*w0;
00837     CV_SUM_OFS( ofs[2], ofs[3], ofs[6], ofs[7], 0, tr, step );
00838     tr.y += 2*h0;
00839     CV_SUM_OFS( ofs[10], ofs[11], ofs[14], ofs[15], 0, tr, step );
00840     tr.x -= 2*w0;
00841     CV_SUM_OFS( ofs[8], ofs[9], ofs[12], ofs[13], 0, tr, step );
00842 }
00843 
00844 
00845 bool LBPEvaluator::setWindow( Point pt, int scaleIdx )
00846 {
00847     CV_Assert(0 <= scaleIdx && scaleIdx < (int)scaleData->size());
00848     const ScaleData& s = scaleData->at(scaleIdx);
00849 
00850     if( pt.x < 0 || pt.y < 0 ||
00851         pt.x + origWinSize.width >= s.szi.width ||
00852         pt.y + origWinSize.height >= s.szi.height )
00853         return false;
00854 
00855     pwin = &sbuf.at<int>(pt) + s.layer_ofs;
00856     return true;
00857 }
00858 
00859 
00860 Ptr<FeatureEvaluator> FeatureEvaluator::create( int featureType )
00861 {
00862     return featureType == HAAR ? Ptr<FeatureEvaluator>(new HaarEvaluator) :
00863         featureType == LBP ? Ptr<FeatureEvaluator>(new LBPEvaluator) :
00864         Ptr<FeatureEvaluator>();
00865 }
00866 
00867 //---------------------------------------- Classifier Cascade --------------------------------------------
00868 
00869 CascadeClassifierImpl::CascadeClassifierImpl()
00870 {
00871 }
00872 
00873 CascadeClassifierImpl::~CascadeClassifierImpl()
00874 {
00875 }
00876 
00877 bool CascadeClassifierImpl::empty() const
00878 {
00879     return !oldCascade && data.stages.empty();
00880 }
00881 
00882 bool CascadeClassifierImpl::load(const String& filename)
00883 {
00884     oldCascade.release();
00885     data = Data();
00886     featureEvaluator.release();
00887 
00888     FileStorage fs(filename, FileStorage::READ);
00889     if( !fs.isOpened() )
00890         return false;
00891 
00892     if( read_(fs.getFirstTopLevelNode()) )
00893         return true;
00894 
00895     fs.release();
00896 
00897     oldCascade.reset((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
00898     return !oldCascade.empty();
00899 }
00900 
00901 void CascadeClassifierImpl::read(const FileNode& node)
00902 {
00903     read_(node);
00904 }
00905 
00906 int CascadeClassifierImpl::runAt( Ptr<FeatureEvaluator>& evaluator, Point pt, int scaleIdx, double& weight )
00907 {
00908     assert( !oldCascade &&
00909            (data.featureType == FeatureEvaluator::HAAR ||
00910             data.featureType == FeatureEvaluator::LBP ||
00911             data.featureType == FeatureEvaluator::HOG) );
00912 
00913     if( !evaluator->setWindow(pt, scaleIdx) )
00914         return -1;
00915     if( data.maxNodesPerTree == 1 )
00916     {
00917         if( data.featureType == FeatureEvaluator::HAAR )
00918             return predictOrderedStump<HaarEvaluator>( *this, evaluator, weight );
00919         else if( data.featureType == FeatureEvaluator::LBP )
00920             return predictCategoricalStump<LBPEvaluator>( *this, evaluator, weight );
00921         else
00922             return -2;
00923     }
00924     else
00925     {
00926         if( data.featureType == FeatureEvaluator::HAAR )
00927             return predictOrdered<HaarEvaluator>( *this, evaluator, weight );
00928         else if( data.featureType == FeatureEvaluator::LBP )
00929             return predictCategorical<LBPEvaluator>( *this, evaluator, weight );
00930         else
00931             return -2;
00932     }
00933 }
00934 
00935 void CascadeClassifierImpl::setMaskGenerator(const Ptr<MaskGenerator>& _maskGenerator)
00936 {
00937     maskGenerator=_maskGenerator;
00938 }
00939 Ptr<CascadeClassifierImpl::MaskGenerator> CascadeClassifierImpl::getMaskGenerator()
00940 {
00941     return maskGenerator;
00942 }
00943 
00944 Ptr<BaseCascadeClassifier::MaskGenerator> createFaceDetectionMaskGenerator()
00945 {
00946 #ifdef HAVE_TEGRA_OPTIMIZATION
00947     if (tegra::useTegra())
00948         return tegra::getCascadeClassifierMaskGenerator();
00949 #endif
00950     return Ptr<BaseCascadeClassifier::MaskGenerator>();
00951 }
00952 
00953 class CascadeClassifierInvoker : public ParallelLoopBody
00954 {
00955 public:
00956     CascadeClassifierInvoker( CascadeClassifierImpl& _cc, int _nscales, int _nstripes,
00957                               const FeatureEvaluator::ScaleData* _scaleData,
00958                               const int* _stripeSizes, std::vector<Rect>& _vec,
00959                               std::vector<int>& _levels, std::vector<double>& _weights,
00960                               bool outputLevels, const Mat& _mask, Mutex* _mtx)
00961     {
00962         classifier = &_cc;
00963         nscales = _nscales;
00964         nstripes = _nstripes;
00965         scaleData = _scaleData;
00966         stripeSizes = _stripeSizes;
00967         rectangles = &_vec;
00968         rejectLevels = outputLevels ? &_levels : 0;
00969         levelWeights = outputLevels ? &_weights : 0;
00970         mask = _mask;
00971         mtx = _mtx;
00972     }
00973 
00974     void operator()(const Range& range) const
00975     {
00976         Ptr<FeatureEvaluator> evaluator = classifier->featureEvaluator->clone();
00977         double gypWeight = 0.;
00978         Size origWinSize = classifier->data.origWinSize;
00979 
00980         for( int scaleIdx = 0; scaleIdx < nscales; scaleIdx++ )
00981         {
00982             const FeatureEvaluator::ScaleData& s = scaleData[scaleIdx];
00983             float scalingFactor = s.scale;
00984             int yStep = s.ystep;
00985             int stripeSize = stripeSizes[scaleIdx];
00986             int y0 = range.start*stripeSize;
00987             Size szw = s.getWorkingSize(origWinSize);
00988             int y1 = std::min(range.end*stripeSize, szw.height);
00989             Size winSize(cvRound(origWinSize.width * scalingFactor),
00990                          cvRound(origWinSize.height * scalingFactor));
00991 
00992             for( int y = y0; y < y1; y += yStep )
00993             {
00994                 for( int x = 0; x < szw.width; x += yStep )
00995                 {
00996                     int result = classifier->runAt(evaluator, Point(x, y), scaleIdx, gypWeight);
00997                     if( rejectLevels )
00998                     {
00999                         if( result == 1 )
01000                             result = -(int)classifier->data.stages.size();
01001                         if( -result >= 0 ) // TODO: Add variable to define a specific last accepted Stage - ABI_COMPATIBILITY problem with new/changed virtual functions - PR #5362
01002                         {
01003                             mtx->lock();
01004                             rectangles->push_back(Rect(cvRound(x*scalingFactor),
01005                                                        cvRound(y*scalingFactor),
01006                                                        winSize.width, winSize.height));
01007                             rejectLevels->push_back(-result);
01008                             levelWeights->push_back(gypWeight);
01009                             mtx->unlock();
01010                         }
01011                     }
01012                     else if( result > 0 )
01013                     {
01014                         mtx->lock();
01015                         rectangles->push_back(Rect(cvRound(x*scalingFactor),
01016                                                    cvRound(y*scalingFactor),
01017                                                    winSize.width, winSize.height));
01018                         mtx->unlock();
01019                     }
01020                     if( result == 0 )
01021                         x += yStep;
01022                 }
01023             }
01024         }
01025     }
01026 
01027     CascadeClassifierImpl* classifier;
01028     std::vector<Rect>* rectangles;
01029     int nscales, nstripes;
01030     const FeatureEvaluator::ScaleData* scaleData;
01031     const int* stripeSizes;
01032     std::vector<int> *rejectLevels;
01033     std::vector<double> *levelWeights;
01034     std::vector<float> scales;
01035     Mat mask;
01036     Mutex* mtx;
01037 };
01038 
01039 
01040 struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } };
01041 struct getNeighbors { int operator ()(const CvAvgComp& e) const { return e.neighbors; } };
01042 
01043 #ifdef HAVE_OPENCL
01044 bool CascadeClassifierImpl::ocl_detectMultiScaleNoGrouping( const std::vector<float>& scales,
01045                                                             std::vector<Rect>& candidates )
01046 {
01047     int featureType = getFeatureType();
01048     std::vector<UMat> bufs;
01049     featureEvaluator->getUMats(bufs);
01050     Size localsz = featureEvaluator->getLocalSize();
01051     if( localsz.area() == 0 )
01052         return false;
01053     Size lbufSize = featureEvaluator->getLocalBufSize();
01054     size_t localsize[] = { (size_t)localsz.width, (size_t)localsz.height };
01055     const int grp_per_CU = 12;
01056     size_t globalsize[] = { grp_per_CU*ocl::Device::getDefault().maxComputeUnits()*localsize[0], localsize[1] };
01057     bool ok = false;
01058 
01059     ufacepos.create(1, MAX_FACES*3+1, CV_32S);
01060     UMat ufacepos_count(ufacepos, Rect(0, 0, 1, 1));
01061     ufacepos_count.setTo(Scalar::all(0));
01062 
01063     if( ustages.empty() )
01064     {
01065         copyVectorToUMat(data.stages, ustages);
01066         if (!data.stumps.empty())
01067             copyVectorToUMat(data.stumps, unodes);
01068         else
01069             copyVectorToUMat(data.nodes, unodes);
01070         copyVectorToUMat(data.leaves, uleaves);
01071         if( !data.subsets.empty() )
01072             copyVectorToUMat(data.subsets, usubsets);
01073     }
01074 
01075     int nstages = (int)data.stages.size();
01076     int splitstage_ocl = 1;
01077 
01078     if( featureType == FeatureEvaluator::HAAR )
01079     {
01080         Ptr<HaarEvaluator> haar = featureEvaluator.dynamicCast<HaarEvaluator>();
01081         if( haar.empty() )
01082             return false;
01083 
01084         if( haarKernel.empty() )
01085         {
01086             String opts;
01087             if (lbufSize.area())
01088                 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D SUM_BUF_SIZE=%d -D SUM_BUF_STEP=%d -D NODE_COUNT=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D HAAR",
01089                               localsz.width, localsz.height, lbufSize.area(), lbufSize.width, data.maxNodesPerTree, splitstage_ocl, nstages, MAX_FACES);
01090             else
01091                 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D NODE_COUNT=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D HAAR",
01092                               localsz.width, localsz.height, data.maxNodesPerTree, splitstage_ocl, nstages, MAX_FACES);
01093             haarKernel.create("runHaarClassifier", ocl::objdetect::cascadedetect_oclsrc, opts);
01094             if( haarKernel.empty() )
01095                 return false;
01096         }
01097 
01098         Rect normrect = haar->getNormRect();
01099         int sqofs = haar->getSquaresOffset();
01100 
01101         haarKernel.args((int)scales.size(),
01102                         ocl::KernelArg::PtrReadOnly(bufs[0]), // scaleData
01103                         ocl::KernelArg::ReadOnlyNoSize(bufs[1]), // sum
01104                         ocl::KernelArg::PtrReadOnly(bufs[2]), // optfeatures
01105 
01106                         // cascade classifier
01107                         ocl::KernelArg::PtrReadOnly(ustages),
01108                         ocl::KernelArg::PtrReadOnly(unodes),
01109                         ocl::KernelArg::PtrReadOnly(uleaves),
01110 
01111                         ocl::KernelArg::PtrWriteOnly(ufacepos), // positions
01112                         normrect, sqofs, data.origWinSize);
01113         ok = haarKernel.run(2, globalsize, localsize, true);
01114     }
01115     else if( featureType == FeatureEvaluator::LBP )
01116     {
01117         if (data.maxNodesPerTree > 1)
01118             return false;
01119 
01120         Ptr<LBPEvaluator> lbp = featureEvaluator.dynamicCast<LBPEvaluator>();
01121         if( lbp.empty() )
01122             return false;
01123 
01124         if( lbpKernel.empty() )
01125         {
01126             String opts;
01127             if (lbufSize.area())
01128                 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D SUM_BUF_SIZE=%d -D SUM_BUF_STEP=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D LBP",
01129                               localsz.width, localsz.height, lbufSize.area(), lbufSize.width, splitstage_ocl, nstages, MAX_FACES);
01130             else
01131                 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D LBP",
01132                               localsz.width, localsz.height, splitstage_ocl, nstages, MAX_FACES);
01133             lbpKernel.create("runLBPClassifierStumpSimple", ocl::objdetect::cascadedetect_oclsrc, opts);
01134             if( lbpKernel.empty() )
01135                 return false;
01136         }
01137 
01138         int subsetSize = (data.ncategories + 31)/32;
01139         lbpKernel.args((int)scales.size(),
01140                        ocl::KernelArg::PtrReadOnly(bufs[0]), // scaleData
01141                        ocl::KernelArg::ReadOnlyNoSize(bufs[1]), // sum
01142                        ocl::KernelArg::PtrReadOnly(bufs[2]), // optfeatures
01143 
01144                        // cascade classifier
01145                        ocl::KernelArg::PtrReadOnly(ustages),
01146                        ocl::KernelArg::PtrReadOnly(unodes),
01147                        ocl::KernelArg::PtrReadOnly(usubsets),
01148                        subsetSize,
01149 
01150                        ocl::KernelArg::PtrWriteOnly(ufacepos), // positions
01151                        data.origWinSize);
01152 
01153         ok = lbpKernel.run(2, globalsize, localsize, true);
01154     }
01155 
01156     if( ok )
01157     {
01158         Mat facepos = ufacepos.getMat(ACCESS_READ);
01159         const int* fptr = facepos.ptr<int>();
01160         int nfaces = fptr[0];
01161         nfaces = std::min(nfaces, (int)MAX_FACES);
01162 
01163         for( int i = 0; i < nfaces; i++ )
01164         {
01165             const FeatureEvaluator::ScaleData& s = featureEvaluator->getScaleData(fptr[i*3 + 1]);
01166             candidates.push_back(Rect(cvRound(fptr[i*3 + 2]*s.scale),
01167                                       cvRound(fptr[i*3 + 3]*s.scale),
01168                                       cvRound(data.origWinSize.width*s.scale),
01169                                       cvRound(data.origWinSize.height*s.scale)));
01170         }
01171     }
01172     return ok;
01173 }
01174 #endif
01175 
01176 bool CascadeClassifierImpl::isOldFormatCascade() const
01177 {
01178     return !oldCascade.empty();
01179 }
01180 
01181 int CascadeClassifierImpl::getFeatureType() const
01182 {
01183     return featureEvaluator->getFeatureType();
01184 }
01185 
01186 Size CascadeClassifierImpl::getOriginalWindowSize() const
01187 {
01188     return data.origWinSize;
01189 }
01190 
01191 void* CascadeClassifierImpl::getOldCascade()
01192 {
01193     return oldCascade;
01194 }
01195 
01196 static void detectMultiScaleOldFormat( const Mat& image, Ptr<CvHaarClassifierCascade> oldCascade,
01197                                        std::vector<Rect>& objects,
01198                                        std::vector<int>& rejectLevels,
01199                                        std::vector<double>& levelWeights,
01200                                        std::vector<CvAvgComp>& vecAvgComp,
01201                                        double scaleFactor, int minNeighbors,
01202                                        int flags, Size minObjectSize, Size maxObjectSize,
01203                                        bool outputRejectLevels = false )
01204 {
01205     MemStorage storage(cvCreateMemStorage(0));
01206     CvMat _image = image;
01207     CvSeq* _objects = cvHaarDetectObjectsForROC( &_image, oldCascade, storage, rejectLevels, levelWeights, scaleFactor,
01208                                                  minNeighbors, flags, minObjectSize, maxObjectSize, outputRejectLevels );
01209     Seq<CvAvgComp>(_objects).copyTo(vecAvgComp);
01210     objects.resize(vecAvgComp.size());
01211     std::transform(vecAvgComp.begin(), vecAvgComp.end(), objects.begin(), getRect());
01212 }
01213 
01214 
01215 void CascadeClassifierImpl::detectMultiScaleNoGrouping( InputArray _image, std::vector<Rect>& candidates,
01216                                                     std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
01217                                                     double scaleFactor, Size minObjectSize, Size maxObjectSize,
01218                                                     bool outputRejectLevels )
01219 {
01220     Size imgsz = _image.size();
01221 
01222     Mat grayImage;
01223     _InputArray gray;
01224 
01225     candidates.clear();
01226     rejectLevels.clear();
01227     levelWeights.clear();
01228 
01229     if( maxObjectSize.height == 0 || maxObjectSize.width == 0 )
01230         maxObjectSize = imgsz;
01231 
01232 #ifdef HAVE_OPENCL
01233     bool use_ocl = tryOpenCL && ocl::useOpenCL() &&
01234          featureEvaluator->getLocalSize().area() > 0 &&
01235          ocl::Device::getDefault().type() != ocl::Device::TYPE_CPU &&
01236          (data.minNodesPerTree == data.maxNodesPerTree) &&
01237          !isOldFormatCascade() &&
01238          maskGenerator.empty() &&
01239          !outputRejectLevels;
01240 #endif
01241 
01242     /*if( use_ocl )
01243     {
01244         if (_image.channels() > 1)
01245             cvtColor(_image, ugrayImage, COLOR_BGR2GRAY);
01246         else if (_image.isUMat())
01247             ugrayImage = _image.getUMat();
01248         else
01249             _image.copyTo(ugrayImage);
01250         gray = ugrayImage;
01251     }
01252     else*/
01253     {
01254         if (_image.channels() > 1)
01255             cvtColor(_image, grayImage, COLOR_BGR2GRAY);
01256         else if (_image.isMat())
01257             grayImage = _image.getMat();
01258         else
01259             _image.copyTo(grayImage);
01260         gray = grayImage;
01261     }
01262 
01263     std::vector<float> scales;
01264     scales.reserve(1024);
01265 
01266     for( double factor = 1; ; factor *= scaleFactor )
01267     {
01268         Size originalWindowSize = getOriginalWindowSize();
01269 
01270         Size windowSize( cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) );
01271         if( windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height ||
01272             windowSize.width > imgsz.width || windowSize.height > imgsz.height )
01273             break;
01274         if( windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height )
01275             continue;
01276         scales.push_back((float)factor);
01277     }
01278 
01279     if( scales.size() == 0 || !featureEvaluator->setImage(gray, scales) )
01280         return;
01281 
01282 #ifdef HAVE_OPENCL
01283     // OpenCL code
01284     CV_OCL_RUN(use_ocl, ocl_detectMultiScaleNoGrouping( scales, candidates ))
01285 
01286     tryOpenCL = false;
01287 #endif
01288 
01289     // CPU code
01290     featureEvaluator->getMats();
01291     {
01292         Mat currentMask;
01293         if (maskGenerator)
01294             currentMask = maskGenerator->generateMask(gray.getMat());
01295 
01296         size_t i, nscales = scales.size();
01297         cv::AutoBuffer<int>  stripeSizeBuf(nscales);
01298         int* stripeSizes = stripeSizeBuf;
01299         const FeatureEvaluator::ScaleData* s = &featureEvaluator->getScaleData(0);
01300         Size szw = s->getWorkingSize(data.origWinSize);
01301         int nstripes = cvCeil(szw.width/32.);
01302         for( i = 0; i < nscales; i++ )
01303         {
01304             szw = s[i].getWorkingSize(data.origWinSize);
01305             stripeSizes[i] = std::max((szw.height/s[i].ystep + nstripes-1)/nstripes, 1)*s[i].ystep;
01306         }
01307 
01308         CascadeClassifierInvoker invoker(*this, (int)nscales, nstripes, s, stripeSizes,
01309                                          candidates, rejectLevels, levelWeights,
01310                                          outputRejectLevels, currentMask, &mtx);
01311         parallel_for_(Range(0, nstripes), invoker);
01312     }
01313 }
01314 
01315 
01316 void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
01317                                           std::vector<int>& rejectLevels,
01318                                           std::vector<double>& levelWeights,
01319                                           double scaleFactor, int minNeighbors,
01320                                           int flags, Size minObjectSize, Size maxObjectSize,
01321                                           bool outputRejectLevels )
01322 {
01323     CV_Assert( scaleFactor > 1 && _image.depth() == CV_8U );
01324 
01325     if( empty() )
01326         return;
01327 
01328     if( isOldFormatCascade() )
01329     {
01330         Mat image = _image.getMat();
01331         std::vector<CvAvgComp> fakeVecAvgComp;
01332         detectMultiScaleOldFormat( image, oldCascade, objects, rejectLevels, levelWeights, fakeVecAvgComp, scaleFactor,
01333                                    minNeighbors, flags, minObjectSize, maxObjectSize, outputRejectLevels );
01334     }
01335     else
01336     {
01337         detectMultiScaleNoGrouping( _image, objects, rejectLevels, levelWeights, scaleFactor, minObjectSize, maxObjectSize,
01338                                     outputRejectLevels );
01339         const double GROUP_EPS = 0.2;
01340         if( outputRejectLevels )
01341         {
01342             groupRectangles ( objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS );
01343         }
01344         else
01345         {
01346             groupRectangles ( objects, minNeighbors, GROUP_EPS );
01347         }
01348     }
01349 }
01350 
01351 void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
01352                                           double scaleFactor, int minNeighbors,
01353                                           int flags, Size minObjectSize, Size maxObjectSize)
01354 {
01355     std::vector<int> fakeLevels;
01356     std::vector<double> fakeWeights;
01357     detectMultiScale( _image, objects, fakeLevels, fakeWeights, scaleFactor,
01358         minNeighbors, flags, minObjectSize, maxObjectSize );
01359 }
01360 
01361 void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects,
01362                                           std::vector<int>& numDetections, double scaleFactor,
01363                                           int minNeighbors, int flags, Size minObjectSize,
01364                                           Size maxObjectSize )
01365 {
01366     Mat image = _image.getMat();
01367     CV_Assert( scaleFactor > 1 && image.depth() == CV_8U );
01368 
01369     if( empty() )
01370         return;
01371 
01372     std::vector<int> fakeLevels;
01373     std::vector<double> fakeWeights;
01374     if( isOldFormatCascade() )
01375     {
01376         std::vector<CvAvgComp> vecAvgComp;
01377         detectMultiScaleOldFormat( image, oldCascade, objects, fakeLevels, fakeWeights, vecAvgComp, scaleFactor,
01378                                    minNeighbors, flags, minObjectSize, maxObjectSize );
01379         numDetections.resize(vecAvgComp.size());
01380         std::transform(vecAvgComp.begin(), vecAvgComp.end(), numDetections.begin(), getNeighbors());
01381     }
01382     else
01383     {
01384         detectMultiScaleNoGrouping( image, objects, fakeLevels, fakeWeights, scaleFactor, minObjectSize, maxObjectSize );
01385         const double GROUP_EPS = 0.2;
01386         groupRectangles ( objects, numDetections, minNeighbors, GROUP_EPS );
01387     }
01388 }
01389 
01390 
01391 CascadeClassifierImpl::Data::Data()
01392 {
01393     stageType = featureType = ncategories = maxNodesPerTree = 0;
01394 }
01395 
01396 bool CascadeClassifierImpl::Data::read(const FileNode &root)
01397 {
01398     static const float THRESHOLD_EPS = 1e-5f;
01399 
01400     // load stage params
01401     String stageTypeStr = (String)root[CC_STAGE_TYPE];
01402     if( stageTypeStr == CC_BOOST )
01403         stageType = BOOST;
01404     else
01405         return false;
01406 
01407     String featureTypeStr = (String)root[CC_FEATURE_TYPE];
01408     if( featureTypeStr == CC_HAAR )
01409         featureType = FeatureEvaluator::HAAR;
01410     else if( featureTypeStr == CC_LBP )
01411         featureType = FeatureEvaluator::LBP;
01412     else if( featureTypeStr == CC_HOG )
01413     {
01414         featureType = FeatureEvaluator::HOG;
01415         CV_Error(Error::StsNotImplemented, "HOG cascade is not supported in 3.0");
01416     }
01417     else
01418         return false;
01419 
01420     origWinSize.width = (int)root[CC_WIDTH];
01421     origWinSize.height = (int)root[CC_HEIGHT];
01422     CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 );
01423 
01424     // load feature params
01425     FileNode fn = root[CC_FEATURE_PARAMS];
01426     if( fn.empty() )
01427         return false;
01428 
01429     ncategories = fn[CC_MAX_CAT_COUNT];
01430     int subsetSize = (ncategories + 31)/32,
01431         nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 );
01432 
01433     // load stages
01434     fn = root[CC_STAGES];
01435     if( fn.empty() )
01436         return false;
01437 
01438     stages.reserve(fn.size());
01439     classifiers.clear();
01440     nodes.clear();
01441     stumps.clear();
01442 
01443     FileNodeIterator it = fn.begin(), it_end = fn.end();
01444     minNodesPerTree = INT_MAX;
01445     maxNodesPerTree = 0;
01446 
01447     for( int si = 0; it != it_end; si++, ++it )
01448     {
01449         FileNode fns = *it;
01450         Stage stage;
01451         stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS;
01452         fns = fns[CC_WEAK_CLASSIFIERS];
01453         if(fns.empty())
01454             return false;
01455         stage.ntrees = (int)fns.size();
01456         stage.first = (int)classifiers.size();
01457         stages.push_back(stage);
01458         classifiers.reserve(stages[si].first + stages[si].ntrees);
01459 
01460         FileNodeIterator it1 = fns.begin(), it1_end = fns.end();
01461         for( ; it1 != it1_end; ++it1 ) // weak trees
01462         {
01463             FileNode fnw = *it1;
01464             FileNode internalNodes = fnw[CC_INTERNAL_NODES];
01465             FileNode leafValues = fnw[CC_LEAF_VALUES];
01466             if( internalNodes.empty() || leafValues.empty() )
01467                 return false;
01468 
01469             DTree tree;
01470             tree.nodeCount = (int)internalNodes.size()/nodeStep;
01471             minNodesPerTree = std::min(minNodesPerTree, tree.nodeCount);
01472             maxNodesPerTree = std::max(maxNodesPerTree, tree.nodeCount);
01473 
01474             classifiers.push_back(tree);
01475 
01476             nodes.reserve(nodes.size() + tree.nodeCount);
01477             leaves.reserve(leaves.size() + leafValues.size());
01478             if( subsetSize > 0 )
01479                 subsets.reserve(subsets.size() + tree.nodeCount*subsetSize);
01480 
01481             FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end();
01482 
01483             for( ; internalNodesIter != internalNodesEnd; ) // nodes
01484             {
01485                 DTreeNode node;
01486                 node.left = (int)*internalNodesIter; ++internalNodesIter;
01487                 node.right = (int)*internalNodesIter; ++internalNodesIter;
01488                 node.featureIdx = (int)*internalNodesIter; ++internalNodesIter;
01489                 if( subsetSize > 0 )
01490                 {
01491                     for( int j = 0; j < subsetSize; j++, ++internalNodesIter )
01492                         subsets.push_back((int)*internalNodesIter);
01493                     node.threshold = 0.f;
01494                 }
01495                 else
01496                 {
01497                     node.threshold = (float)*internalNodesIter; ++internalNodesIter;
01498                 }
01499                 nodes.push_back(node);
01500             }
01501 
01502             internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end();
01503 
01504             for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves
01505                 leaves.push_back((float)*internalNodesIter);
01506         }
01507     }
01508 
01509     if( maxNodesPerTree == 1 )
01510     {
01511         int nodeOfs = 0, leafOfs = 0;
01512         size_t nstages = stages.size();
01513         for( size_t stageIdx = 0; stageIdx < nstages; stageIdx++ )
01514         {
01515             const Stage& stage = stages[stageIdx];
01516 
01517             int ntrees = stage.ntrees;
01518             for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 )
01519             {
01520                 const DTreeNode& node = nodes[nodeOfs];
01521                 stumps.push_back(Stump(node.featureIdx, node.threshold,
01522                                        leaves[leafOfs], leaves[leafOfs+1]));
01523             }
01524         }
01525     }
01526 
01527     return true;
01528 }
01529 
01530 
01531 bool CascadeClassifierImpl::read_(const FileNode& root)
01532 {
01533 #ifdef HAVE_OPENCL
01534     tryOpenCL = true;
01535     haarKernel = ocl::Kernel();
01536     lbpKernel = ocl::Kernel();
01537 #endif
01538     ustages.release();
01539     unodes.release();
01540     uleaves.release();
01541     if( !data.read(root) )
01542         return false;
01543 
01544     // load features
01545     featureEvaluator = FeatureEvaluator::create(data.featureType);
01546     FileNode fn = root[CC_FEATURES];
01547     if( fn.empty() )
01548         return false;
01549 
01550     return featureEvaluator->read(fn, data.origWinSize);
01551 }
01552 
01553 template<> void DefaultDeleter<CvHaarClassifierCascade>::operator ()(CvHaarClassifierCascade* obj) const
01554 { cvReleaseHaarClassifierCascade(&obj); }
01555 
01556 
01557 BaseCascadeClassifier::~BaseCascadeClassifier()
01558 {
01559 }
01560 
01561 CascadeClassifier::CascadeClassifier() {}
01562 CascadeClassifier::CascadeClassifier(const String& filename)
01563 {
01564     load(filename);
01565 }
01566 
01567 CascadeClassifier::~CascadeClassifier()
01568 {
01569 }
01570 
01571 bool CascadeClassifier::empty() const
01572 {
01573     return cc.empty() || cc->empty();
01574 }
01575 
01576 bool CascadeClassifier::load( const String& filename )
01577 {
01578     cc = makePtr<CascadeClassifierImpl>();
01579     if(!cc->load(filename))
01580         cc.release();
01581     return !empty();
01582 }
01583 
01584 bool CascadeClassifier::read(const FileNode &root)
01585 {
01586     Ptr<CascadeClassifierImpl> ccimpl = makePtr<CascadeClassifierImpl>();
01587     bool ok = ccimpl->read_(root);
01588     if( ok )
01589         cc = ccimpl.staticCast<BaseCascadeClassifier>();
01590     else
01591         cc.release();
01592     return ok;
01593 }
01594 
01595 void clipObjects(Size sz, std::vector<Rect>& objects,
01596                  std::vector<int>* a, std::vector<double>* b)
01597 {
01598     size_t i, j = 0, n = objects.size();
01599     Rect win0 = Rect(0, 0, sz.width, sz.height);
01600     if(a)
01601     {
01602         CV_Assert(a->size() == n);
01603     }
01604     if(b)
01605     {
01606         CV_Assert(b->size() == n);
01607     }
01608 
01609     for( i = 0; i < n; i++ )
01610     {
01611         Rect r = win0 & objects[i];
01612         if( r.area() > 0 )
01613         {
01614             objects[j] = r;
01615             if( i > j )
01616             {
01617                 if(a) a->at(j) = a->at(i);
01618                 if(b) b->at(j) = b->at(i);
01619             }
01620             j++;
01621         }
01622     }
01623 
01624     if( j < n )
01625     {
01626         objects.resize(j);
01627         if(a) a->resize(j);
01628         if(b) b->resize(j);
01629     }
01630 }
01631 
01632 void CascadeClassifier::detectMultiScale( InputArray image,
01633                       CV_OUT std::vector<Rect>& objects,
01634                       double scaleFactor,
01635                       int minNeighbors, int flags,
01636                       Size minSize,
01637                       Size maxSize )
01638 {
01639     CV_Assert(!empty());
01640     cc->detectMultiScale(image, objects, scaleFactor, minNeighbors, flags, minSize, maxSize);
01641     clipObjects(image.size(), objects, 0, 0);
01642 }
01643 
01644 void CascadeClassifier::detectMultiScale( InputArray image,
01645                       CV_OUT std::vector<Rect>& objects,
01646                       CV_OUT std::vector<int>& numDetections,
01647                       double scaleFactor,
01648                       int minNeighbors, int flags,
01649                       Size minSize, Size maxSize )
01650 {
01651     CV_Assert(!empty());
01652     cc->detectMultiScale(image, objects, numDetections,
01653                          scaleFactor, minNeighbors, flags, minSize, maxSize);
01654     clipObjects(image.size(), objects, &numDetections, 0);
01655 }
01656 
01657 void CascadeClassifier::detectMultiScale( InputArray image,
01658                       CV_OUT std::vector<Rect>& objects,
01659                       CV_OUT std::vector<int>& rejectLevels,
01660                       CV_OUT std::vector<double>& levelWeights,
01661                       double scaleFactor,
01662                       int minNeighbors, int flags,
01663                       Size minSize, Size maxSize,
01664                       bool outputRejectLevels )
01665 {
01666     CV_Assert(!empty());
01667     cc->detectMultiScale(image, objects, rejectLevels, levelWeights,
01668                          scaleFactor, minNeighbors, flags,
01669                          minSize, maxSize, outputRejectLevels);
01670     clipObjects(image.size(), objects, &rejectLevels, &levelWeights);
01671 }
01672 
01673 bool CascadeClassifier::isOldFormatCascade() const
01674 {
01675     CV_Assert(!empty());
01676     return cc->isOldFormatCascade();
01677 }
01678 
01679 Size CascadeClassifier::getOriginalWindowSize() const
01680 {
01681     CV_Assert(!empty());
01682     return cc->getOriginalWindowSize();
01683 }
01684 
01685 int CascadeClassifier::getFeatureType() const
01686 {
01687     CV_Assert(!empty());
01688     return cc->getFeatureType();
01689 }
01690 
01691 void* CascadeClassifier::getOldCascade()
01692 {
01693     CV_Assert(!empty());
01694     return cc->getOldCascade();
01695 }
01696 
01697 void CascadeClassifier::setMaskGenerator(const Ptr<BaseCascadeClassifier::MaskGenerator>& maskGenerator)
01698 {
01699     CV_Assert(!empty());
01700     cc->setMaskGenerator(maskGenerator);
01701 }
01702 
01703 Ptr<BaseCascadeClassifier::MaskGenerator> CascadeClassifier::getMaskGenerator()
01704 {
01705     CV_Assert(!empty());
01706     return cc->getMaskGenerator();
01707 }
01708 
01709 } // namespace cv
01710