Important changes to repositories hosted on mbed.com
Mbed hosted mercurial repositories are deprecated and are due to be permanently deleted in July 2026.
To keep a copy of this software download the repository Zip archive or clone locally using Mercurial.
It is also possible to export all your personal repositories from the account settings page.
Fork of gr-peach-opencv-project-sd-card by
pca.cpp
00001 /*M/////////////////////////////////////////////////////////////////////////////////////// 00002 // 00003 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. 00004 // 00005 // By downloading, copying, installing or using the software you agree to this license. 00006 // If you do not agree to this license, do not download, install, 00007 // copy or use the software. 00008 // 00009 // 00010 // License Agreement 00011 // For Open Source Computer Vision Library 00012 // 00013 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved. 00014 // Copyright (C) 2009, Willow Garage Inc., all rights reserved. 00015 // Copyright (C) 2013, OpenCV Foundation, all rights reserved. 00016 // Third party copyrights are property of their respective owners. 00017 // 00018 // Redistribution and use in source and binary forms, with or without modification, 00019 // are permitted provided that the following conditions are met: 00020 // 00021 // * Redistribution's of source code must retain the above copyright notice, 00022 // this list of conditions and the following disclaimer. 00023 // 00024 // * Redistribution's in binary form must reproduce the above copyright notice, 00025 // this list of conditions and the following disclaimer in the documentation 00026 // and/or other materials provided with the distribution. 00027 // 00028 // * The name of the copyright holders may not be used to endorse or promote products 00029 // derived from this software without specific prior written permission. 00030 // 00031 // This software is provided by the copyright holders and contributors "as is" and 00032 // any express or implied warranties, including, but not limited to, the implied 00033 // warranties of merchantability and fitness for a particular purpose are disclaimed. 00034 // In no event shall the Intel Corporation or contributors be liable for any direct, 00035 // indirect, incidental, special, exemplary, or consequential damages 00036 // (including, but not limited to, procurement of substitute goods or services; 00037 // loss of use, data, or profits; or business interruption) however caused 00038 // and on any theory of liability, whether in contract, strict liability, 00039 // or tort (including negligence or otherwise) arising in any way out of 00040 // the use of this software, even if advised of the possibility of such damage. 00041 // 00042 //M*/ 00043 00044 #include "precomp.hpp" 00045 00046 /****************************************************************************************\ 00047 * PCA * 00048 \****************************************************************************************/ 00049 00050 namespace cv 00051 { 00052 00053 PCA::PCA() {} 00054 00055 PCA::PCA(InputArray data, InputArray _mean, int flags, int maxComponents) 00056 { 00057 operator()(data, _mean, flags, maxComponents); 00058 } 00059 00060 PCA::PCA(InputArray data, InputArray _mean, int flags, double retainedVariance) 00061 { 00062 operator()(data, _mean, flags, retainedVariance); 00063 } 00064 00065 PCA& PCA::operator()(InputArray _data, InputArray __mean, int flags, int maxComponents) 00066 { 00067 Mat data = _data.getMat(), _mean = __mean.getMat(); 00068 int covar_flags = CV_COVAR_SCALE; 00069 int len, in_count; 00070 Size mean_sz; 00071 00072 CV_Assert( data.channels() == 1 ); 00073 if( flags & CV_PCA_DATA_AS_COL ) 00074 { 00075 len = data.rows; 00076 in_count = data.cols; 00077 covar_flags |= CV_COVAR_COLS; 00078 mean_sz = Size(1, len); 00079 } 00080 else 00081 { 00082 len = data.cols; 00083 in_count = data.rows; 00084 covar_flags |= CV_COVAR_ROWS; 00085 mean_sz = Size(len, 1); 00086 } 00087 00088 int count = std::min(len, in_count), out_count = count; 00089 if( maxComponents > 0 ) 00090 out_count = std::min(count, maxComponents); 00091 00092 // "scrambled" way to compute PCA (when cols(A)>rows(A)): 00093 // B = A'A; B*x=b*x; C = AA'; C*y=c*y -> AA'*y=c*y -> A'A*(A'*y)=c*(A'*y) -> c = b, x=A'*y 00094 if( len <= in_count ) 00095 covar_flags |= CV_COVAR_NORMAL; 00096 00097 int ctype = std::max(CV_32F, data.depth()); 00098 mean.create( mean_sz, ctype ); 00099 00100 Mat covar( count, count, ctype ); 00101 00102 if( !_mean.empty() ) 00103 { 00104 CV_Assert( _mean.size() == mean_sz ); 00105 _mean.convertTo(mean, ctype); 00106 covar_flags |= CV_COVAR_USE_AVG; 00107 } 00108 00109 calcCovarMatrix( data, covar, mean, covar_flags, ctype ); 00110 eigen( covar, eigenvalues, eigenvectors ); 00111 00112 if( !(covar_flags & CV_COVAR_NORMAL) ) 00113 { 00114 // CV_PCA_DATA_AS_ROW: cols(A)>rows(A). x=A'*y -> x'=y'*A 00115 // CV_PCA_DATA_AS_COL: rows(A)>cols(A). x=A''*y -> x'=y'*A' 00116 Mat tmp_data, tmp_mean = repeat(mean, data.rows/mean.rows, data.cols/mean.cols); 00117 if( data.type() != ctype || tmp_mean.data == mean.data ) 00118 { 00119 data.convertTo( tmp_data, ctype ); 00120 subtract( tmp_data, tmp_mean, tmp_data ); 00121 } 00122 else 00123 { 00124 subtract( data, tmp_mean, tmp_mean ); 00125 tmp_data = tmp_mean; 00126 } 00127 00128 Mat evects1(count, len, ctype); 00129 gemm( eigenvectors, tmp_data, 1, Mat(), 0, evects1, 00130 (flags & CV_PCA_DATA_AS_COL) ? CV_GEMM_B_T : 0); 00131 eigenvectors = evects1; 00132 00133 // normalize eigenvectors 00134 int i; 00135 for( i = 0; i < out_count; i++ ) 00136 { 00137 Mat vec = eigenvectors.row(i); 00138 normalize(vec, vec); 00139 } 00140 } 00141 00142 if( count > out_count ) 00143 { 00144 // use clone() to physically copy the data and thus deallocate the original matrices 00145 eigenvalues = eigenvalues.rowRange(0,out_count).clone(); 00146 eigenvectors = eigenvectors.rowRange(0,out_count).clone(); 00147 } 00148 return *this; 00149 } 00150 00151 void PCA::write(FileStorage& fs ) const 00152 { 00153 CV_Assert( fs.isOpened() ); 00154 00155 //fs << "name" << "PCA"; 00156 // fs << "vectors" << eigenvectors; 00157 // fs << "values" << eigenvalues; 00158 // fs << "mean" << mean; 00159 } 00160 00161 void PCA::read(const FileNode& fs) 00162 { 00163 CV_Assert( !fs.empty() ); 00164 String name = (String)fs["name"]; 00165 CV_Assert( name == "PCA" ); 00166 00167 cv::read(fs["vectors"], eigenvectors); 00168 cv::read(fs["values"], eigenvalues); 00169 cv::read(fs["mean"], mean); 00170 } 00171 00172 template <typename T> 00173 int computeCumulativeEnergy(const Mat& eigenvalues, double retainedVariance) 00174 { 00175 CV_DbgAssert( eigenvalues.type() == DataType<T>::type ); 00176 00177 Mat g(eigenvalues.size(), DataType<T>::type); 00178 00179 for(int ig = 0; ig < g.rows; ig++) 00180 { 00181 g.at<T>(ig, 0) = 0; 00182 for(int im = 0; im <= ig; im++) 00183 { 00184 g.at<T>(ig,0) += eigenvalues.at<T>(im,0); 00185 } 00186 } 00187 00188 int L; 00189 00190 for(L = 0; L < eigenvalues.rows; L++) 00191 { 00192 double energy = g.at<T>(L, 0) / g.at<T>(g.rows - 1, 0); 00193 if(energy > retainedVariance) 00194 break; 00195 } 00196 00197 L = std::max(2, L); 00198 00199 return L; 00200 } 00201 00202 PCA& PCA::operator()(InputArray _data, InputArray __mean, int flags, double retainedVariance) 00203 { 00204 Mat data = _data.getMat(), _mean = __mean.getMat(); 00205 int covar_flags = CV_COVAR_SCALE; 00206 int len, in_count; 00207 Size mean_sz; 00208 00209 CV_Assert( data.channels() == 1 ); 00210 if( flags & CV_PCA_DATA_AS_COL ) 00211 { 00212 len = data.rows; 00213 in_count = data.cols; 00214 covar_flags |= CV_COVAR_COLS; 00215 mean_sz = Size(1, len); 00216 } 00217 else 00218 { 00219 len = data.cols; 00220 in_count = data.rows; 00221 covar_flags |= CV_COVAR_ROWS; 00222 mean_sz = Size(len, 1); 00223 } 00224 00225 CV_Assert( retainedVariance > 0 && retainedVariance <= 1 ); 00226 00227 int count = std::min(len, in_count); 00228 00229 // "scrambled" way to compute PCA (when cols(A)>rows(A)): 00230 // B = A'A; B*x=b*x; C = AA'; C*y=c*y -> AA'*y=c*y -> A'A*(A'*y)=c*(A'*y) -> c = b, x=A'*y 00231 if( len <= in_count ) 00232 covar_flags |= CV_COVAR_NORMAL; 00233 00234 int ctype = std::max(CV_32F, data.depth()); 00235 mean.create( mean_sz, ctype ); 00236 00237 Mat covar( count, count, ctype ); 00238 00239 if( !_mean.empty() ) 00240 { 00241 CV_Assert( _mean.size() == mean_sz ); 00242 _mean.convertTo(mean, ctype); 00243 } 00244 00245 calcCovarMatrix( data, covar, mean, covar_flags, ctype ); 00246 eigen( covar, eigenvalues, eigenvectors ); 00247 00248 if( !(covar_flags & CV_COVAR_NORMAL) ) 00249 { 00250 // CV_PCA_DATA_AS_ROW: cols(A)>rows(A). x=A'*y -> x'=y'*A 00251 // CV_PCA_DATA_AS_COL: rows(A)>cols(A). x=A''*y -> x'=y'*A' 00252 Mat tmp_data, tmp_mean = repeat(mean, data.rows/mean.rows, data.cols/mean.cols); 00253 if( data.type() != ctype || tmp_mean.data == mean.data ) 00254 { 00255 data.convertTo( tmp_data, ctype ); 00256 subtract( tmp_data, tmp_mean, tmp_data ); 00257 } 00258 else 00259 { 00260 subtract( data, tmp_mean, tmp_mean ); 00261 tmp_data = tmp_mean; 00262 } 00263 00264 Mat evects1(count, len, ctype); 00265 gemm( eigenvectors, tmp_data, 1, Mat(), 0, evects1, 00266 (flags & CV_PCA_DATA_AS_COL) ? CV_GEMM_B_T : 0); 00267 eigenvectors = evects1; 00268 00269 // normalize all eigenvectors 00270 int i; 00271 for( i = 0; i < eigenvectors.rows; i++ ) 00272 { 00273 Mat vec = eigenvectors.row(i); 00274 normalize(vec, vec); 00275 } 00276 } 00277 00278 // compute the cumulative energy content for each eigenvector 00279 int L; 00280 if (ctype == CV_32F) 00281 L = computeCumulativeEnergy<float>(eigenvalues, retainedVariance); 00282 else 00283 L = computeCumulativeEnergy<double>(eigenvalues, retainedVariance); 00284 00285 // use clone() to physically copy the data and thus deallocate the original matrices 00286 eigenvalues = eigenvalues.rowRange(0,L).clone(); 00287 eigenvectors = eigenvectors.rowRange(0,L).clone(); 00288 00289 return *this; 00290 } 00291 00292 void PCA::project(InputArray _data, OutputArray result) const 00293 { 00294 Mat data = _data.getMat(); 00295 CV_Assert( !mean.empty() && !eigenvectors.empty() && 00296 ((mean.rows == 1 && mean.cols == data.cols) || (mean.cols == 1 && mean.rows == data.rows))); 00297 Mat tmp_data, tmp_mean = repeat(mean, data.rows/mean.rows, data.cols/mean.cols); 00298 int ctype = mean.type(); 00299 if( data.type() != ctype || tmp_mean.data == mean.data ) 00300 { 00301 data.convertTo( tmp_data, ctype ); 00302 subtract( tmp_data, tmp_mean, tmp_data ); 00303 } 00304 else 00305 { 00306 subtract( data, tmp_mean, tmp_mean ); 00307 tmp_data = tmp_mean; 00308 } 00309 if( mean.rows == 1 ) 00310 gemm( tmp_data, eigenvectors, 1, Mat(), 0, result, GEMM_2_T ); 00311 else 00312 gemm( eigenvectors, tmp_data, 1, Mat(), 0, result, 0 ); 00313 } 00314 00315 Mat PCA::project(InputArray data) const 00316 { 00317 Mat result; 00318 project(data, result); 00319 return result; 00320 } 00321 00322 void PCA::backProject(InputArray _data, OutputArray result) const 00323 { 00324 Mat data = _data.getMat(); 00325 CV_Assert( !mean.empty() && !eigenvectors.empty() && 00326 ((mean.rows == 1 && eigenvectors.rows == data.cols) || 00327 (mean.cols == 1 && eigenvectors.rows == data.rows))); 00328 00329 Mat tmp_data, tmp_mean; 00330 data.convertTo(tmp_data, mean.type()); 00331 if( mean.rows == 1 ) 00332 { 00333 tmp_mean = repeat(mean, data.rows, 1); 00334 gemm( tmp_data, eigenvectors, 1, tmp_mean, 1, result, 0 ); 00335 } 00336 else 00337 { 00338 tmp_mean = repeat(mean, 1, data.cols); 00339 gemm( eigenvectors, tmp_data, 1, tmp_mean, 1, result, GEMM_1_T ); 00340 } 00341 } 00342 00343 Mat PCA::backProject(InputArray data) const 00344 { 00345 Mat result; 00346 backProject(data, result); 00347 return result; 00348 } 00349 00350 } 00351 00352 void cv::PCACompute(InputArray data, InputOutputArray mean, 00353 OutputArray eigenvectors, int maxComponents) 00354 { 00355 PCA pca; 00356 pca(data, mean, 0, maxComponents); 00357 pca.mean.copyTo(mean); 00358 pca.eigenvectors.copyTo(eigenvectors); 00359 } 00360 00361 void cv::PCACompute(InputArray data, InputOutputArray mean, 00362 OutputArray eigenvectors, double retainedVariance) 00363 { 00364 PCA pca; 00365 pca(data, mean, 0, retainedVariance); 00366 pca.mean.copyTo(mean); 00367 pca.eigenvectors.copyTo(eigenvectors); 00368 } 00369 00370 void cv::PCAProject(InputArray data, InputArray mean, 00371 InputArray eigenvectors, OutputArray result) 00372 { 00373 PCA pca; 00374 pca.mean = mean.getMat(); 00375 pca.eigenvectors = eigenvectors.getMat(); 00376 pca.project(data, result); 00377 } 00378 00379 void cv::PCABackProject(InputArray data, InputArray mean, 00380 InputArray eigenvectors, OutputArray result) 00381 { 00382 PCA pca; 00383 pca.mean = mean.getMat(); 00384 pca.eigenvectors = eigenvectors.getMat(); 00385 pca.backProject(data, result); 00386 } 00387
Generated on Tue Jul 12 2022 14:47:31 by
