Renesas GR-PEACH OpenCV Development / gr-peach-opencv-project-sd-card_update

Fork of gr-peach-opencv-project-sd-card by the do

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers conjugate_gradient.cpp Source File

conjugate_gradient.cpp

00001 /*M///////////////////////////////////////////////////////////////////////////////////////
00002 //
00003 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
00004 //
00005 //  By downloading, copying, installing or using the software you agree to this license.
00006 //  If you do not agree to this license, do not download, install,
00007 //  copy or use the software.
00008 //
00009 //
00010 //                           License Agreement
00011 //                For Open Source Computer Vision Library
00012 //
00013 // Copyright (C) 2013, OpenCV Foundation, all rights reserved.
00014 // Third party copyrights are property of their respective owners.
00015 //
00016 // Redistribution and use in source and binary forms, with or without modification,
00017 // are permitted provided that the following conditions are met:
00018 //
00019 //   * Redistribution's of source code must retain the above copyright notice,
00020 //     this list of conditions and the following disclaimer.
00021 //
00022 //   * Redistribution's in binary form must reproduce the above copyright notice,
00023 //     this list of conditions and the following disclaimer in the documentation
00024 //     and/or other materials provided with the distribution.
00025 //
00026 //   * The name of the copyright holders may not be used to endorse or promote products
00027 //     derived from this software without specific prior written permission.
00028 //
00029 // This software is provided by the copyright holders and contributors "as is" and
00030 // any express or implied warranties, including, but not limited to, the implied
00031 // warranties of merchantability and fitness for a particular purpose are disclaimed.
00032 // In no event shall the OpenCV Foundation or contributors be liable for any direct,
00033 // indirect, incidental, special, exemplary, or consequential damages
00034 // (including, but not limited to, procurement of substitute goods or services;
00035 // loss of use, data, or profits; or business interruption) however caused
00036 // and on any theory of liability, whether in contract, strict liability,
00037 // or tort (including negligence or otherwise) arising in any way out of
00038 // the use of this software, even if advised of the possibility of such damage.
00039 //
00040 //M*/
00041 
00042 #include "precomp.hpp"
00043 
00044 #define dprintf(x)
00045 #define print_matrix(x)
00046 
00047 namespace cv
00048 {
00049     double MinProblemSolver::Function::getGradientEps() const { return 1e-3; }
00050     void MinProblemSolver::Function::getGradient(const double* x, double* grad)
00051     {
00052         double eps = getGradientEps();
00053         int i, n = getDims();
00054         AutoBuffer<double> x_buf(n);
00055         double* x_ = x_buf;
00056         for( i = 0; i < n; i++ )
00057             x_[i] = x[i];
00058         for( i = 0; i < n; i++ )
00059         {
00060             x_[i] = x[i] + eps;
00061             double y1 = calc(x_);
00062             x_[i] = x[i] - eps;
00063             double y0 = calc(x_);
00064             grad[i] = (y1 - y0)/(2*eps);
00065             x_[i] = x[i];
00066         }
00067     }
00068 
00069 #define SEC_METHOD_ITERATIONS 4
00070 #define INITIAL_SEC_METHOD_SIGMA 0.1
00071     class ConjGradSolverImpl : public ConjGradSolver
00072     {
00073     public:
00074         Ptr<Function> getFunction() const;
00075         void setFunction(const Ptr<Function>& f);
00076         TermCriteria getTermCriteria() const;
00077         ConjGradSolverImpl();
00078         void setTermCriteria(const TermCriteria& termcrit);
00079         double minimize(InputOutputArray x);
00080     protected:
00081         Ptr<MinProblemSolver::Function> _Function;
00082         TermCriteria _termcrit;
00083         Mat_<double> d,r,buf_x,r_old;
00084         Mat_<double> minimizeOnTheLine_buf1,minimizeOnTheLine_buf2;
00085     private:
00086         static void minimizeOnTheLine(Ptr<MinProblemSolver::Function> _f,Mat_<double>& x,const Mat_<double>& d,Mat_<double>& buf1,Mat_<double>& buf2);
00087     };
00088 
00089     void ConjGradSolverImpl::minimizeOnTheLine(Ptr<MinProblemSolver::Function> _f,Mat_<double>& x,const Mat_<double>& d,Mat_<double>& buf1,
00090             Mat_<double>& buf2){
00091         double sigma=INITIAL_SEC_METHOD_SIGMA;
00092         buf1=0.0;
00093         buf2=0.0;
00094 
00095         dprintf(("before minimizeOnTheLine\n"));
00096         dprintf(("x:\n"));
00097         print_matrix(x);
00098         dprintf(("d:\n"));
00099         print_matrix(d);
00100 
00101         for(int i=0;i<SEC_METHOD_ITERATIONS;i++){
00102             _f->getGradient((double*)x.data,(double*)buf1.data);
00103             dprintf(("buf1:\n"));
00104             print_matrix(buf1);
00105             x=x+sigma*d;
00106             _f->getGradient((double*)x.data,(double*)buf2.data);
00107             dprintf(("buf2:\n"));
00108             print_matrix(buf2);
00109             double d1=buf1.dot(d), d2=buf2.dot(d);
00110             if((d1-d2)==0){
00111                 break;
00112             }
00113             double alpha=-sigma*d1/(d2-d1);
00114             dprintf(("(buf2.dot(d)-buf1.dot(d))=%f\nalpha=%f\n",(buf2.dot(d)-buf1.dot(d)),alpha));
00115             x=x+(alpha-sigma)*d;
00116             sigma=-alpha;
00117         }
00118 
00119         dprintf(("after minimizeOnTheLine\n"));
00120         print_matrix(x);
00121     }
00122 
00123     double ConjGradSolverImpl::minimize(InputOutputArray x){
00124         CV_Assert(_Function.empty()==false);
00125         dprintf(("termcrit:\n\ttype: %d\n\tmaxCount: %d\n\tEPS: %g\n",_termcrit.type,_termcrit.maxCount,_termcrit.epsilon));
00126 
00127         Mat x_mat=x.getMat();
00128         CV_Assert(MIN(x_mat.rows,x_mat.cols)==1);
00129         int ndim=MAX(x_mat.rows,x_mat.cols);
00130         CV_Assert(x_mat.type()==CV_64FC1);
00131 
00132         if(d.cols!=ndim){
00133             d.create(1,ndim);
00134             r.create(1,ndim);
00135             r_old.create(1,ndim);
00136             minimizeOnTheLine_buf1.create(1,ndim);
00137             minimizeOnTheLine_buf2.create(1,ndim);
00138         }
00139 
00140         Mat_<double> proxy_x;
00141         if(x_mat.rows>1){
00142             buf_x.create(1,ndim);
00143             Mat_<double> proxy(ndim,1,buf_x.ptr<double>());
00144             x_mat.copyTo(proxy);
00145             proxy_x=buf_x;
00146         }else{
00147             proxy_x=x_mat;
00148         }
00149         _Function->getGradient(proxy_x.ptr<double>(),d.ptr<double>());
00150         d*=-1.0;
00151         d.copyTo(r);
00152 
00153         //here everything goes. check that everything is setted properly
00154         dprintf(("proxy_x\n"));print_matrix(proxy_x);
00155         dprintf(("d first time\n"));print_matrix(d);
00156         dprintf(("r\n"));print_matrix(r);
00157 
00158         for(int count=0;count<_termcrit.maxCount;count++){
00159             minimizeOnTheLine(_Function,proxy_x,d,minimizeOnTheLine_buf1,minimizeOnTheLine_buf2);
00160             r.copyTo(r_old);
00161             _Function->getGradient(proxy_x.ptr<double>(),r.ptr<double>());
00162             r*=-1.0;
00163             double r_norm_sq=norm(r);
00164             if(_termcrit.type==(TermCriteria::MAX_ITER+TermCriteria::EPS) && r_norm_sq<_termcrit.epsilon){
00165                 break;
00166             }
00167             r_norm_sq=r_norm_sq*r_norm_sq;
00168             double beta=MAX(0.0,(r_norm_sq-r.dot(r_old))/r_norm_sq);
00169             d=r+beta*d;
00170         }
00171 
00172 
00173 
00174         if(x_mat.rows>1){
00175             Mat(ndim, 1, CV_64F, proxy_x.ptr<double>()).copyTo(x);
00176         }
00177         return _Function->calc(proxy_x.ptr<double>());
00178     }
00179 
00180     ConjGradSolverImpl::ConjGradSolverImpl(){
00181         _Function=Ptr<Function>();
00182     }
00183     Ptr<MinProblemSolver::Function> ConjGradSolverImpl::getFunction()const{
00184         return _Function;
00185     }
00186     void ConjGradSolverImpl::setFunction(const Ptr<Function>& f){
00187         _Function=f;
00188     }
00189     TermCriteria ConjGradSolverImpl::getTermCriteria()const{
00190         return _termcrit;
00191     }
00192     void ConjGradSolverImpl::setTermCriteria(const TermCriteria& termcrit){
00193         CV_Assert((termcrit.type==(TermCriteria::MAX_ITER+TermCriteria::EPS) && termcrit.epsilon>0 && termcrit.maxCount>0) ||
00194                 ((termcrit.type==TermCriteria::MAX_ITER) && termcrit.maxCount>0));
00195         _termcrit=termcrit;
00196     }
00197     // both minRange & minError are specified by termcrit.epsilon; In addition, user may specify the number of iterations that the algorithm does.
00198     Ptr<ConjGradSolver> ConjGradSolver::create(const Ptr<MinProblemSolver::Function> & f, TermCriteria termcrit){
00199         Ptr<ConjGradSolver> CG = makePtr<ConjGradSolverImpl>();
00200         CG->setFunction(f);
00201         CG->setTermCriteria(termcrit);
00202         return CG;
00203     }
00204 }
00205