Refined interface for Conjugate Gradient
Some interface was refined (most notably, the method for returning Hessian was removed and the method for getting gradient was added as optional to base Solver::Function class) and basic code for setters/getters was added. Now is the time for the real work on an algorithm.
This commit is contained in:
parent
eb1333d0a8
commit
581d454536
11
modules/optim/doc/conjugate_gradient.rst
Normal file
11
modules/optim/doc/conjugate_gradient.rst
Normal file
@ -0,0 +1,11 @@
|
||||
Conjugate Gradient
|
||||
=======================
|
||||
|
||||
.. highlight:: cpp
|
||||
|
||||
optim::ConjGradSolver
|
||||
---------------------------------
|
||||
|
||||
.. ocv:class:: optim::ConjGradSolver
|
||||
|
||||
This class is used
|
@ -55,6 +55,7 @@ public:
|
||||
public:
|
||||
virtual ~Function() {}
|
||||
virtual double calc(const double* x) const = 0;
|
||||
virtual void getGradient(const double* /*x*/,double* /*grad*/) {}
|
||||
};
|
||||
|
||||
virtual Ptr<Function> getFunction() const = 0;
|
||||
@ -86,17 +87,7 @@ CV_EXPORTS_W Ptr<DownhillSolver> createDownhillSolver(const Ptr<Solver::Function
|
||||
TermCriteria termcrit=TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS,5000,0.000001));
|
||||
|
||||
//! conjugate gradient method
|
||||
class CV_EXPORTS ConjGradSolver : public Solver
|
||||
{
|
||||
public:
|
||||
class CV_EXPORTS Function : public Solver::Function
|
||||
{
|
||||
public:
|
||||
//! gradient is like the first derivative for multivar function
|
||||
virtual void getGradient(const double* x,double* buf)const=0;
|
||||
//! Jacobian is like the second derivative
|
||||
virtual void getJacobian(const double* x)const=0;
|
||||
};
|
||||
class CV_EXPORTS ConjGradSolver : public Solver{
|
||||
};
|
||||
|
||||
CV_EXPORTS_W Ptr<ConjGradSolver> createConjGradSolver(const Ptr<Solver::Function>& f=Ptr<ConjGradSolver::Function>(),
|
||||
|
77
modules/optim/src/conjugate_gradient.cpp
Normal file
77
modules/optim/src/conjugate_gradient.cpp
Normal file
@ -0,0 +1,77 @@
|
||||
#include "precomp.hpp"
|
||||
#include "debug.hpp"
|
||||
|
||||
namespace cv{namespace optim{
|
||||
|
||||
class ConjGradSolverImpl : public ConjGradSolver
|
||||
{
|
||||
public:
|
||||
Ptr<Function> getFunction() const;
|
||||
void setFunction(const Ptr<Function>& f);
|
||||
TermCriteria getTermCriteria() const;
|
||||
ConjGradSolverImpl();
|
||||
void setTermCriteria(const TermCriteria& termcrit);
|
||||
double minimize(InputOutputArray x);
|
||||
protected:
|
||||
Ptr<Solver::Function> _Function;
|
||||
TermCriteria _termcrit;
|
||||
Mat_<double> d,r,buf_x,r_old;
|
||||
private:
|
||||
};
|
||||
|
||||
double ConjGradSolverImpl::minimize(InputOutputArray x){
|
||||
CV_Assert(_Function.empty()==false);
|
||||
dprintf(("termcrit:\n\ttype: %d\n\tmaxCount: %d\n\tEPS: %g\n",_termcrit.type,_termcrit.maxCount,_termcrit.epsilon));
|
||||
|
||||
Mat x_mat=x.getMat();
|
||||
CV_Assert(MIN(x_mat.rows,x_mat.cols)==1);
|
||||
int ndim=MAX(x_mat.rows,x_mat.cols);
|
||||
CV_Assert(x_mat.type()==CV_64FC1);
|
||||
|
||||
d.create(1,ndim);
|
||||
r.create(1,ndim);
|
||||
r_old.create(1,ndim);
|
||||
|
||||
Mat_<double> proxy_x;
|
||||
if(x_mat.rows>1){
|
||||
buf_x.create(1,ndim);
|
||||
Mat_<double> proxy(ndim,1,(double*)buf_x.data);
|
||||
x_mat.copyTo(proxy);
|
||||
proxy_x=buf_x;
|
||||
}else{
|
||||
proxy_x=x_mat;
|
||||
}
|
||||
|
||||
//here everything goes. check that everything is setted properly
|
||||
|
||||
if(x_mat.rows>1){
|
||||
Mat(ndim, 1, CV_64F, (double*)proxy_x.data).copyTo(x);
|
||||
}
|
||||
return 0.0;
|
||||
}
|
||||
ConjGradSolverImpl::ConjGradSolverImpl(){
|
||||
_Function=Ptr<Function>();
|
||||
}
|
||||
Ptr<Solver::Function> ConjGradSolverImpl::getFunction()const{
|
||||
return _Function;
|
||||
}
|
||||
void ConjGradSolverImpl::setFunction(const Ptr<Function>& f){
|
||||
_Function=f;
|
||||
}
|
||||
TermCriteria ConjGradSolverImpl::getTermCriteria()const{
|
||||
return _termcrit;
|
||||
}
|
||||
void ConjGradSolverImpl::setTermCriteria(const TermCriteria& termcrit){
|
||||
CV_Assert((termcrit.type==(TermCriteria::MAX_ITER+TermCriteria::EPS) && termcrit.epsilon>0 && termcrit.maxCount>0) ||
|
||||
((termcrit.type==TermCriteria::MAX_ITER) && termcrit.maxCount>0));
|
||||
_termcrit=termcrit;
|
||||
}
|
||||
// both minRange & minError are specified by termcrit.epsilon; In addition, user may specify the number of iterations that the algorithm does.
|
||||
Ptr<ConjGradSolver> createConjGradSolver(const Ptr<Solver::Function>& f, TermCriteria termcrit){
|
||||
ConjGradSolver *CG=new ConjGradSolverImpl();
|
||||
CG->setFunction(f);
|
||||
CG->setTermCriteria(termcrit);
|
||||
return Ptr<ConjGradSolver>(CG);
|
||||
}
|
||||
}}
|
||||
|
@ -19,6 +19,8 @@ namespace cv{namespace optim{
|
||||
Ptr<Solver::Function> _Function;
|
||||
TermCriteria _termcrit;
|
||||
Mat _step;
|
||||
Mat_<double> buf_x;
|
||||
|
||||
private:
|
||||
inline void createInitialSimplex(Mat_<double>& simplex,Mat& step);
|
||||
inline double innerDownhillSimplex(cv::Mat_<double>& p,double MinRange,double MinError,int& nfunk,
|
||||
@ -209,7 +211,10 @@ namespace cv{namespace optim{
|
||||
Mat_<double> proxy_x;
|
||||
|
||||
if(x_mat.rows>1){
|
||||
proxy_x=x_mat.t();
|
||||
buf_x.create(1,_step.cols);
|
||||
Mat_<double> proxy(_step.cols,1,(double*)buf_x.data);
|
||||
x_mat.copyTo(proxy);
|
||||
proxy_x=buf_x;
|
||||
}else{
|
||||
proxy_x=x_mat;
|
||||
}
|
||||
|
61
modules/optim/test/test_conjugate_gradient.cpp
Normal file
61
modules/optim/test/test_conjugate_gradient.cpp
Normal file
@ -0,0 +1,61 @@
|
||||
#include "test_precomp.hpp"
|
||||
#include <cstdlib>
|
||||
|
||||
static void mytest(cv::Ptr<cv::optim::ConjGradSolver> solver,cv::Ptr<cv::optim::Solver::Function> ptr_F,cv::Mat& x,
|
||||
cv::Mat& etalon_x,double etalon_res){
|
||||
solver->setFunction(ptr_F);
|
||||
//int ndim=MAX(step.cols,step.rows);
|
||||
double res=solver->minimize(x);
|
||||
std::cout<<"res:\n\t"<<res<<std::endl;
|
||||
std::cout<<"x:\n\t"<<x<<std::endl;
|
||||
std::cout<<"etalon_res:\n\t"<<etalon_res<<std::endl;
|
||||
std::cout<<"etalon_x:\n\t"<<etalon_x<<std::endl;
|
||||
double tol=solver->getTermCriteria().epsilon;
|
||||
ASSERT_TRUE(std::abs(res-etalon_res)<tol);
|
||||
/*for(cv::Mat_<double>::iterator it1=x.begin<double>(),it2=etalon_x.begin<double>();it1!=x.end<double>();it1++,it2++){
|
||||
ASSERT_TRUE(std::abs((*it1)-(*it2))<tol);
|
||||
}*/
|
||||
std::cout<<"--------------------------\n";
|
||||
}
|
||||
|
||||
class SphereF:public cv::optim::Solver::Function{
|
||||
public:
|
||||
double calc(const double* x)const{
|
||||
return x[0]*x[0]+x[1]*x[1]+x[2]*x[2]+x[3]*x[3];
|
||||
}
|
||||
void getGradient(const double* x,double* grad){
|
||||
for(int i=0;i<4;i++,grad++,x++){
|
||||
grad[0]=2*x[0];
|
||||
}
|
||||
}
|
||||
};
|
||||
//TODO: test transp/usual x
|
||||
/*class RosenbrockF:public cv::optim::Solver::Function{
|
||||
double calc(const double* x)const{
|
||||
return 100*(x[1]-x[0]*x[0])*(x[1]-x[0]*x[0])+(1-x[0])*(1-x[0]);
|
||||
}
|
||||
};*/
|
||||
|
||||
TEST(Optim_ConjGrad, regression_basic){
|
||||
cv::Ptr<cv::optim::ConjGradSolver> solver=cv::optim::createConjGradSolver();
|
||||
#if 1
|
||||
{
|
||||
cv::Ptr<cv::optim::Solver::Function> ptr_F(new SphereF());
|
||||
cv::Mat x=(cv::Mat_<double>(1,2)<<1.0,1.0),
|
||||
etalon_x=(cv::Mat_<double>(1,2)<<0.0,0.0);
|
||||
double etalon_res=0.0;
|
||||
return;
|
||||
mytest(solver,ptr_F,x,etalon_x,etalon_res);
|
||||
}
|
||||
#endif
|
||||
#if 0
|
||||
{
|
||||
cv::Ptr<cv::optim::Solver::Function> ptr_F(new RosenbrockF());
|
||||
cv::Mat x=(cv::Mat_<double>(2,1)<<0.0,0.0),
|
||||
step=(cv::Mat_<double>(2,1)<<0.5,+0.5),
|
||||
etalon_x=(cv::Mat_<double>(2,1)<<1.0,1.0);
|
||||
double etalon_res=0.0;
|
||||
mytest(solver,ptr_F,x,step,etalon_x,etalon_res);
|
||||
}
|
||||
#endif
|
||||
}
|
Loading…
Reference in New Issue
Block a user