made everything compile and even run somehow

This commit is contained in:
Vadim Pisarevsky
2014-08-03 01:41:09 +04:00
parent 10b60f8d16
commit c20ff6ce19
31 changed files with 11910 additions and 9061 deletions

View File

@@ -135,7 +135,7 @@ public:
virtual Mat getCatMap() const = 0;
virtual void setTrainTestSplit(int count, bool shuffle=true) = 0;
virtual void setTrainTestSplitRatio(float ratio, bool shuffle=true) = 0;
virtual void setTrainTestSplitRatio(double ratio, bool shuffle=true) = 0;
virtual void shuffleTrainTest() = 0;
static Mat getSubVector(const Mat& vec, const Mat& idx);
@@ -156,7 +156,6 @@ class CV_EXPORTS_W StatModel : public Algorithm
{
public:
enum { UPDATE_MODEL = 1, RAW_OUTPUT=1, COMPRESSED_INPUT=2, PREPROCESSED_INPUT=4 };
virtual ~StatModel();
virtual void clear();
virtual int getVarCount() const = 0;
@@ -164,16 +163,30 @@ public:
virtual bool isTrained() const = 0;
virtual bool isClassifier() const = 0;
virtual bool train( const Ptr<TrainData>& trainData, int flags=0 ) = 0;
virtual bool train( const Ptr<TrainData>& trainData, int flags=0 );
virtual bool train( InputArray samples, int layout, InputArray responses );
virtual float calcError( const Ptr<TrainData>& data, bool test, OutputArray resp ) const;
virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const = 0;
template<typename _Tp> static Ptr<_Tp> load(const String& filename)
{
FileStorage fs(filename, FileStorage::READ);
Ptr<_Tp> p = _Tp::create();
p->read(fs.getFirstTopLevelNode());
return p->isTrained() ? p : Ptr<_Tp>();
Ptr<_Tp> model = _Tp::create();
model->read(fs.getFirstTopLevelNode());
return model->isTrained() ? model : Ptr<_Tp>();
}
template<typename _Tp> static Ptr<_Tp> train(const Ptr<TrainData>& data, const typename _Tp::Params& p, int flags=0)
{
Ptr<_Tp> model = _Tp::create(p);
return !model.empty() && model->train(data, flags) ? model : Ptr<_Tp>();
}
template<typename _Tp> static Ptr<_Tp> train(InputArray samples, int layout, InputArray responses,
const typename _Tp::Params& p, int flags=0)
{
Ptr<_Tp> model = _Tp::create(p);
return !model.empty() && model->train(TrainData::create(samples, layout, responses), flags) ? model : Ptr<_Tp>();
}
virtual void save(const String& filename) const;
@@ -192,11 +205,17 @@ public:
class CV_EXPORTS_W NormalBayesClassifier : public StatModel
{
public:
virtual ~NormalBayesClassifier();
class CV_EXPORTS_W_MAP Params
{
public:
Params();
};
virtual float predictProb( InputArray inputs, OutputArray outputs,
OutputArray outputProbs, int flags=0 ) const = 0;
virtual void setParams(const Params& params) = 0;
virtual Params getParams() const = 0;
static Ptr<NormalBayesClassifier> create();
static Ptr<NormalBayesClassifier> create(const Params& params=Params());
};
/****************************************************************************************\
@@ -207,13 +226,21 @@ public:
class CV_EXPORTS_W KNearest : public StatModel
{
public:
virtual void setDefaultK(int k) = 0;
virtual int getDefaultK() const = 0;
class CV_EXPORTS_W_MAP Params
{
public:
Params(int defaultK=10, bool isclassifier=true);
int defaultK;
bool isclassifier;
};
virtual void setParams(const Params& p) = 0;
virtual Params getParams() const = 0;
virtual float findNearest( InputArray samples, int k,
OutputArray results,
OutputArray neighborResponses=noArray(),
OutputArray dist=noArray() ) const = 0;
static Ptr<KNearest> create(bool isclassifier=true);
static Ptr<KNearest> create(const Params& params=Params());
};
/****************************************************************************************\
@@ -247,7 +274,6 @@ public:
class CV_EXPORTS Kernel : public Algorithm
{
public:
virtual ~Kernel();
virtual int getType() const = 0;
virtual void calc( int vcount, int n, const float* vecs, const float* another, float* results ) = 0;
};
@@ -261,8 +287,6 @@ public:
// SVM params type
enum { C=0, GAMMA=1, P=2, NU=3, COEF=4, DEGREE=5 };
virtual ~SVM();
virtual bool trainAuto( const Ptr<TrainData>& data, int kFold = 10,
ParamGrid Cgrid = SVM::getDefaultGrid(SVM::C),
ParamGrid gammaGrid = SVM::getDefaultGrid(SVM::GAMMA),
@@ -399,8 +423,6 @@ public:
int subsetOfs;
};
virtual ~DTrees();
virtual void setDParams(const Params& p);
virtual Params getDParams() const;
@@ -464,7 +486,6 @@ public:
// Boosting type
enum { DISCRETE=0, REAL=1, LOGIT=2, GENTLE=3 };
virtual ~Boost();
virtual Params getBParams() const = 0;
virtual void setBParams(const Params& p) = 0;
@@ -491,7 +512,6 @@ public:
};
enum {SQUARED_LOSS=0, ABSOLUTE_LOSS, HUBER_LOSS=3, DEVIANCE_LOSS};
virtual ~GBTrees();
virtual void setK(int k) = 0;
@@ -513,10 +533,16 @@ public:
struct CV_EXPORTS_W_MAP Params
{
Params();
Params( TermCriteria termCrit, int trainMethod, double param1, double param2=0 );
Params( const Mat& layerSizes, int activateFunc, double fparam1, double fparam2,
TermCriteria termCrit, int trainMethod, double param1, double param2=0 );
enum { BACKPROP=0, RPROP=1 };
CV_PROP_RW Mat layerSizes;
CV_PROP_RW int activateFunc;
CV_PROP_RW double fparam1;
CV_PROP_RW double fparam2;
CV_PROP_RW TermCriteria termCrit;
CV_PROP_RW int trainMethod;
@@ -527,23 +553,17 @@ public:
CV_PROP_RW double rpDW0, rpDWPlus, rpDWMinus, rpDWMin, rpDWMax;
};
virtual ~ANN_MLP();
// possible activation functions
enum { IDENTITY = 0, SIGMOID_SYM = 1, GAUSSIAN = 2 };
// available training flags
enum { UPDATE_WEIGHTS = 1, NO_INPUT_SCALE = 2, NO_OUTPUT_SCALE = 4 };
virtual Mat getLayerSizes() const = 0;
virtual Mat getWeights(int layerIdx) const = 0;
virtual void setParams(const Params& p) = 0;
virtual Params getParams() const = 0;
static Ptr<ANN_MLP> create(InputArray layerSizes=noArray(),
const Params& params=Params(),
int activateFunc=ANN_MLP::SIGMOID_SYM,
double fparam1=0, double fparam2=0);
static Ptr<ANN_MLP> create(const Params& params=Params());
};
/****************************************************************************************\

View File

@@ -42,10 +42,11 @@
namespace cv { namespace ml {
ANN_MLP::~ANN_MLP() {}
ANN_MLP::Params::Params()
{
layerSizes = Mat();
activateFunc = SIGMOID_SYM;
fparam1 = fparam2 = 0;
termCrit = TermCriteria( TermCriteria::COUNT + TermCriteria::EPS, 1000, 0.01 );
trainMethod = RPROP;
bpDWScale = bpMomentScale = 0.1;
@@ -54,8 +55,13 @@ ANN_MLP::Params::Params()
}
ANN_MLP::Params::Params( TermCriteria _termCrit, int _trainMethod, double _param1, double _param2 )
ANN_MLP::Params::Params( const Mat& _layerSizes, int _activateFunc, double _fparam1, double _fparam2,
TermCriteria _termCrit, int _trainMethod, double _param1, double _param2 )
{
layerSizes = _layerSizes;
activateFunc = _activateFunc;
fparam1 = _fparam1;
fparam2 = _fparam2;
termCrit = _termCrit;
trainMethod = _trainMethod;
bpDWScale = bpMomentScale = 0.1;
@@ -95,15 +101,25 @@ public:
clear();
}
ANN_MLPImpl( const Mat& _layer_sizes, int _activ_func,
double _f_param1, double _f_param2 )
ANN_MLPImpl( const Params& p )
{
clear();
create( _layer_sizes, _activ_func, _f_param1, _f_param2 );
setParams(p);
}
virtual ~ANN_MLPImpl() {}
void setParams(const Params& p)
{
params = p;
create( params.layerSizes );
set_activ_func( params.activateFunc, params.fparam1, params.fparam2 );
}
Params getParams() const
{
return params;
}
void clear()
{
min_val = max_val = min_val1 = max_val1 = 0.;
@@ -183,16 +199,13 @@ public:
}
}
void create( InputArray _layer_sizes, int _activ_func,
double _f_param1, double _f_param2 )
void create( InputArray _layer_sizes )
{
clear();
_layer_sizes.copyTo(layer_sizes);
int l_count = layer_count();
set_activ_func( _activ_func, _f_param1, _f_param2 );
weights.resize(l_count + 2);
max_lsize = 0;
@@ -665,16 +678,6 @@ public:
calc_output_scale( outputs, flags );
}
void setParams( const Params& _params )
{
params = _params;
}
Params getParams() const
{
return params;
}
bool train( const Ptr<TrainData>& trainData, int flags )
{
const int MAX_ITER = 1000;
@@ -1240,7 +1243,7 @@ public:
vector<int> _layer_sizes;
fn["layer_sizes"] >> _layer_sizes;
create( _layer_sizes, SIGMOID_SYM, 0, 0 );
create( _layer_sizes );
int i, l_count = layer_count();
read_params(fn);
@@ -1307,15 +1310,9 @@ public:
};
Ptr<ANN_MLP> ANN_MLP::create(InputArray _layerSizes,
const ANN_MLP::Params& params,
int activateFunc,
double fparam1, double fparam2)
Ptr<ANN_MLP> ANN_MLP::create(const ANN_MLP::Params& params)
{
Mat layerSizes = _layerSizes.getMat();
Ptr<ANN_MLPImpl> ann = makePtr<ANN_MLPImpl>(layerSizes, activateFunc, fparam1, fparam2);
ann->setParams(params);
Ptr<ANN_MLPImpl> ann = makePtr<ANN_MLPImpl>(params);
return ann;
}

View File

@@ -54,8 +54,6 @@ log_ratio( double val )
}
Boost::~Boost() {}
Boost::Params::Params()
{
boostType = Boost::REAL;
@@ -106,6 +104,7 @@ public:
void startTraining( const Ptr<TrainData>& trainData, int flags )
{
DTreesImpl::startTraining(trainData, flags);
sumResult.assign(w->sidx.size(), 0.);
if( bparams.boostType != Boost::DISCRETE )
{
@@ -114,14 +113,10 @@ public:
w->ord_responses.resize(n);
double a = -1, b = 1;
if( bparams.boostType == Boost::REAL )
a = 0;
else if( bparams.boostType == Boost::LOGIT )
if( bparams.boostType == Boost::LOGIT )
{
sumResult.assign(w->sidx.size(), 0.);
a = -2, b = 2;
}
for( i = 0; i < n; i++ )
w->ord_responses[i] = w->cat_responses[i] > 0 ? b : a;
}
@@ -197,7 +192,7 @@ public:
}
else if( bparams.boostType == Boost::REAL )
{
double p = node->value;
double p = (node->value+1)*0.5;
node->value = 0.5*log_ratio(p);
}
}
@@ -227,7 +222,7 @@ public:
{
int i, n = (int)w->sidx.size();
int nvars = (int)varIdx.size();
double sumw = 0.;
double sumw = 0., C = 1.;
cv::AutoBuffer<double> buf(n*3 + nvars);
double* result = buf;
float* sbuf = (float*)(result + n*3);
@@ -261,7 +256,7 @@ public:
if( sumw != 0 )
err /= sumw;
double C = -log_ratio( err );
C = -log_ratio( err );
double scale = std::exp(C);
sumw = 0;
@@ -289,6 +284,7 @@ public:
for( i = 0; i < n; i++ )
{
int si = w->sidx[i];
CV_Assert( std::abs(w->ord_responses[si]) == 1 );
double wval = w->sample_weights[si]*std::exp(-result[i]*w->ord_responses[si]);
sumw += wval;
w->sample_weights[si] = wval;
@@ -330,6 +326,20 @@ public:
}
else
CV_Error(CV_StsNotImplemented, "Unknown boosting type");
/*if( bparams.boostType != Boost::LOGIT )
{
double err = 0;
for( i = 0; i < n; i++ )
{
sumResult[i] += result[i]*C;
if( bparams.boostType != Boost::DISCRETE )
err += sumResult[i]*w->ord_responses[w->sidx[i]] < 0;
else
err += sumResult[i]*w->cat_responses[w->sidx[i]] < 0;
}
printf("%d trees. C=%.2f, training error=%.1f%%, working set size=%d (out of %d)\n", (int)roots.size(), C, err*100./n, (int)sidx.size(), n);
}*/
// renormalize weights
if( sumw > FLT_EPSILON )

View File

@@ -379,7 +379,7 @@ public:
tempCatOfs.push_back(ofs);
std::copy(labels.begin(), labels.end(), std::back_inserter(tempCatMap));
}
else if( haveMissing )
else
{
tempCatOfs.push_back(Vec2i(0, 0));
/*Mat missing_i = layout == ROW_SAMPLE ? missing.col(i) : missing.row(i);
@@ -741,9 +741,9 @@ public:
CV_Error( CV_StsBadArg, "type of some variables is not specified" );
}
void setTrainTestSplitRatio(float ratio, bool shuffle)
void setTrainTestSplitRatio(double ratio, bool shuffle)
{
CV_Assert( 0 <= ratio && ratio <= 1 );
CV_Assert( 0. <= ratio && ratio <= 1. );
setTrainTestSplit(cvRound(getNSamples()*ratio), shuffle);
}

View File

@@ -50,7 +50,6 @@ ParamGrid::ParamGrid(double _minVal, double _maxVal, double _logStep)
logStep = std::max(_logStep, 1.);
}
StatModel::~StatModel() {}
void StatModel::clear() {}
int StatModel::getVarCount() const { return 0; }
@@ -61,6 +60,11 @@ bool StatModel::train( const Ptr<TrainData>&, int )
return false;
}
bool StatModel::train( InputArray samples, int layout, InputArray responses )
{
return train(TrainData::create(samples, layout, responses));
}
float StatModel::calcError( const Ptr<TrainData>& data, bool testerr, OutputArray _resp ) const
{
Mat samples = data->getSamples();

View File

@@ -49,18 +49,27 @@
namespace cv {
namespace ml {
KNearest::Params::Params(int k, bool isclassifier_)
{
defaultK = k;
isclassifier = isclassifier_;
}
class KNearestImpl : public KNearest
{
public:
KNearestImpl(bool __isClassifier=true)
KNearestImpl(const Params& p)
{
defaultK = 3;
_isClassifier = __isClassifier;
params = p;
}
virtual ~KNearestImpl() {}
bool isClassifier() const { return _isClassifier; }
Params getParams() const { return params; }
void setParams(const Params& p) { params = p; }
bool isClassifier() const { return params.isclassifier; }
bool isTrained() const { return !samples.empty(); }
String getDefaultModelName() const { return "opencv_ml_knn"; }
@@ -188,7 +197,7 @@ public:
if( results || testidx+range.start == 0 )
{
if( !_isClassifier || k == 1 )
if( !params.isclassifier || k == 1 )
{
float s = 0.f;
for( j = 0; j < k; j++ )
@@ -316,12 +325,13 @@ public:
float predict(InputArray inputs, OutputArray outputs, int) const
{
return findNearest( inputs, defaultK, outputs, noArray(), noArray() );
return findNearest( inputs, params.defaultK, outputs, noArray(), noArray() );
}
void write( FileStorage& fs ) const
{
fs << "is_classifier" << (int)_isClassifier;
fs << "is_classifier" << (int)params.isclassifier;
fs << "default_k" << params.defaultK;
fs << "samples" << samples;
fs << "responses" << responses;
@@ -330,24 +340,21 @@ public:
void read( const FileNode& fn )
{
clear();
_isClassifier = (int)fn["is_classifier"] != 0;
params.isclassifier = (int)fn["is_classifier"] != 0;
params.defaultK = (int)fn["default_k"];
fn["samples"] >> samples;
fn["responses"] >> responses;
}
void setDefaultK(int _k) { defaultK = _k; }
int getDefaultK() const { return defaultK; }
Mat samples;
Mat responses;
bool _isClassifier;
int defaultK;
Params params;
};
Ptr<KNearest> KNearest::create(bool isClassifier)
Ptr<KNearest> KNearest::create(const Params& p)
{
return makePtr<KNearestImpl>(isClassifier);
return makePtr<KNearestImpl>(p);
}
}

View File

@@ -43,7 +43,7 @@
namespace cv {
namespace ml {
NormalBayesClassifier::~NormalBayesClassifier() {}
NormalBayesClassifier::Params::Params() {}
class NormalBayesClassifierImpl : public NormalBayesClassifier
{
@@ -53,6 +53,9 @@ public:
nallvars = 0;
}
void setParams(const Params&) {}
Params getParams() const { return Params(); }
bool train( const Ptr<TrainData>& trainData, int flags )
{
const float min_variation = FLT_EPSILON;
@@ -452,7 +455,7 @@ public:
};
Ptr<NormalBayesClassifier> NormalBayesClassifier::create()
Ptr<NormalBayesClassifier> NormalBayesClassifier::create(const Params&)
{
Ptr<NormalBayesClassifierImpl> p = makePtr<NormalBayesClassifierImpl>();
return p;

View File

@@ -134,8 +134,6 @@ SVM::Params::Params( int _svmType, int _kernelType,
termCrit = _termCrit;
}
SVM::Kernel::~Kernel() {}
/////////////////////////////////////// SVM kernel ///////////////////////////////////////
class SVMKernelImpl : public SVM::Kernel
{
@@ -358,7 +356,51 @@ static void sortSamplesByClasses( const Mat& _samples, const Mat& _responses,
//////////////////////// SVM implementation //////////////////////////////
SVM::~SVM() {}
ParamGrid SVM::getDefaultGrid( int param_id )
{
ParamGrid grid;
if( param_id == SVM::C )
{
grid.minVal = 0.1;
grid.maxVal = 500;
grid.logStep = 5; // total iterations = 5
}
else if( param_id == SVM::GAMMA )
{
grid.minVal = 1e-5;
grid.maxVal = 0.6;
grid.logStep = 15; // total iterations = 4
}
else if( param_id == SVM::P )
{
grid.minVal = 0.01;
grid.maxVal = 100;
grid.logStep = 7; // total iterations = 4
}
else if( param_id == SVM::NU )
{
grid.minVal = 0.01;
grid.maxVal = 0.2;
grid.logStep = 3; // total iterations = 3
}
else if( param_id == SVM::COEF )
{
grid.minVal = 0.1;
grid.maxVal = 300;
grid.logStep = 14; // total iterations = 3
}
else if( param_id == SVM::DEGREE )
{
grid.minVal = 0.01;
grid.maxVal = 4;
grid.logStep = 7; // total iterations = 3
}
else
cvError( CV_StsBadArg, "SVM::getDefaultGrid", "Invalid type of parameter "
"(use one of SVM::C, SVM::GAMMA et al.)", __FILE__, __LINE__ );
return grid;
}
class SVMImpl : public SVM
{
@@ -371,52 +413,6 @@ public:
int ofs;
};
virtual ParamGrid getDefaultGrid( int param_id ) const
{
ParamGrid grid;
if( param_id == SVM::C )
{
grid.minVal = 0.1;
grid.maxVal = 500;
grid.logStep = 5; // total iterations = 5
}
else if( param_id == SVM::GAMMA )
{
grid.minVal = 1e-5;
grid.maxVal = 0.6;
grid.logStep = 15; // total iterations = 4
}
else if( param_id == SVM::P )
{
grid.minVal = 0.01;
grid.maxVal = 100;
grid.logStep = 7; // total iterations = 4
}
else if( param_id == SVM::NU )
{
grid.minVal = 0.01;
grid.maxVal = 0.2;
grid.logStep = 3; // total iterations = 3
}
else if( param_id == SVM::COEF )
{
grid.minVal = 0.1;
grid.maxVal = 300;
grid.logStep = 14; // total iterations = 3
}
else if( param_id == SVM::DEGREE )
{
grid.minVal = 0.01;
grid.maxVal = 4;
grid.logStep = 7; // total iterations = 3
}
else
cvError( CV_StsBadArg, "SVM::getDefaultGrid", "Invalid type of parameter "
"(use one of SVM::C, SVM::GAMMA et al.)", __FILE__, __LINE__ );
return grid;
}
// Generalized SMO+SVMlight algorithm
// Solves:
//
@@ -1568,6 +1564,9 @@ public:
if( svmType == C_SVC || svmType == NU_SVC )
{
responses = data->getTrainNormCatResponses();
if( responses.empty() )
CV_Error(CV_StsBadArg, "in the case of classification problem the responses must be categorical; "
"either specify varType when creating TrainData, or pass integer responses");
class_labels = data->getClassLabels();
}
else
@@ -1793,7 +1792,7 @@ public:
{
int svmType = svm->params.svmType;
int sv_total = svm->sv.rows;
int class_count = !svm->class_labels.empty() ? svm->class_labels.cols : svmType == ONE_CLASS ? 1 : 0;
int class_count = !svm->class_labels.empty() ? (int)svm->class_labels.total() : svmType == ONE_CLASS ? 1 : 0;
AutoBuffer<float> _buffer(sv_total + (class_count+1)*2);
float* buffer = _buffer;

View File

@@ -48,8 +48,6 @@ namespace ml {
using std::vector;
DTrees::~DTrees() {}
void DTrees::setDParams(const DTrees::Params&)
{
CV_Error(CV_StsNotImplemented, "");

View File

@@ -313,7 +313,7 @@ void CV_KNearestTest::run( int /*start_from*/ )
int code = cvtest::TS::OK;
Ptr<KNearest> knearest = KNearest::create(true);
knearest->train(TrainData::create(trainData, cv::ml::ROW_SAMPLE, trainLabels), 0);;
knearest->train(trainData, cv::ml::ROW_SAMPLE, trainLabels);
knearest->findNearest( testData, 4, bestLabels);
float err;
if( !calcErr( bestLabels, testLabels, sizes, err, true ) )

View File

@@ -371,8 +371,9 @@ int CV_MLBaseTest::train( int testCaseIdx )
data->getVarIdx(), data->getTrainSampleIdx());
int layer_sz[] = { data->getNAllVars(), 100, 100, (int)cls_map.size() };
Mat layer_sizes( 1, (int)(sizeof(layer_sz)/sizeof(layer_sz[0])), CV_32S, layer_sz );
model = ANN_MLP::create(layer_sizes, ANN_MLP::Params(TermCriteria(TermCriteria::COUNT,300,0.01),
str_to_ann_train_method(train_method_str), param1, param2));
model = ANN_MLP::create(ANN_MLP::Params(layer_sizes, ANN_MLP::SIGMOID_SYM, 0, 0,
TermCriteria(TermCriteria::COUNT,300,0.01),
str_to_ann_train_method(train_method_str), param1, param2));
}
else if( modelName == CV_DTREE )
{