Updated ml module interfaces and documentation
This commit is contained in:
@@ -36,9 +36,11 @@ int main( int /*argc*/, char** /*argv*/ )
|
||||
samples = samples.reshape(1, 0);
|
||||
|
||||
// cluster the data
|
||||
Ptr<EM> em_model = EM::train( samples, noArray(), labels, noArray(),
|
||||
EM::Params(N, EM::COV_MAT_SPHERICAL,
|
||||
TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 300, 0.1)));
|
||||
Ptr<EM> em_model = EM::create();
|
||||
em_model->setClustersNumber(N);
|
||||
em_model->setCovarianceMatrixType(EM::COV_MAT_SPHERICAL);
|
||||
em_model->setTermCriteria(TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 300, 0.1));
|
||||
em_model->trainEM( samples, noArray(), labels, noArray() );
|
||||
|
||||
// classify every image pixel
|
||||
for( i = 0; i < img.rows; i++ )
|
||||
|
@@ -178,8 +178,23 @@ build_rtrees_classifier( const string& data_filename,
|
||||
{
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
// Params( int maxDepth, int minSampleCount,
|
||||
// double regressionAccuracy, bool useSurrogates,
|
||||
// int maxCategories, const Mat& priors,
|
||||
// bool calcVarImportance, int nactiveVars,
|
||||
// TermCriteria termCrit );
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
model = StatModel::train<RTrees>(tdata, RTrees::Params(10,10,0,false,15,Mat(),true,4,TC(100,0.01f)));
|
||||
model = RTrees::create();
|
||||
model->setMaxDepth(10);
|
||||
model->setMinSampleCount(10);
|
||||
model->setRegressionAccuracy(0);
|
||||
model->setUseSurrogates(false);
|
||||
model->setMaxCategories(15);
|
||||
model->setPriors(Mat());
|
||||
model->setCalculateVarImportance(true);
|
||||
model->setActiveVarCount(4);
|
||||
model->setTermCriteria(TC(100,0.01f));
|
||||
model->train(tdata);
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
@@ -269,7 +284,14 @@ build_boost_classifier( const string& data_filename,
|
||||
priors[1] = 26;
|
||||
|
||||
cout << "Training the classifier (may take a few minutes)...\n";
|
||||
model = StatModel::train<Boost>(tdata, Boost::Params(Boost::GENTLE, 100, 0.95, 5, false, Mat(priors) ));
|
||||
model = Boost::create();
|
||||
model->setBoostType(Boost::GENTLE);
|
||||
model->setWeakCount(100);
|
||||
model->setWeightTrimRate(0.95);
|
||||
model->setMaxDepth(5);
|
||||
model->setUseSurrogates(false);
|
||||
model->setPriors(Mat(priors));
|
||||
model->train(tdata);
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
@@ -374,11 +396,11 @@ build_mlp_classifier( const string& data_filename,
|
||||
Mat layer_sizes( 1, nlayers, CV_32S, layer_sz );
|
||||
|
||||
#if 1
|
||||
int method = ANN_MLP::Params::BACKPROP;
|
||||
int method = ANN_MLP::BACKPROP;
|
||||
double method_param = 0.001;
|
||||
int max_iter = 300;
|
||||
#else
|
||||
int method = ANN_MLP::Params::RPROP;
|
||||
int method = ANN_MLP::RPROP;
|
||||
double method_param = 0.1;
|
||||
int max_iter = 1000;
|
||||
#endif
|
||||
@@ -386,7 +408,12 @@ build_mlp_classifier( const string& data_filename,
|
||||
Ptr<TrainData> tdata = TrainData::create(train_data, ROW_SAMPLE, train_responses);
|
||||
|
||||
cout << "Training the classifier (may take a few minutes)...\n";
|
||||
model = StatModel::train<ANN_MLP>(tdata, ANN_MLP::Params(layer_sizes, ANN_MLP::SIGMOID_SYM, 0, 0, TC(max_iter,0), method, method_param));
|
||||
model = ANN_MLP::create();
|
||||
model->setLayerSizes(layer_sizes);
|
||||
model->setActivationFunction(ANN_MLP::SIGMOID_SYM, 0, 0);
|
||||
model->setTermCriteria(TC(max_iter,0));
|
||||
model->setTrainMethod(method, method_param);
|
||||
model->train(tdata);
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
@@ -403,7 +430,6 @@ build_knearest_classifier( const string& data_filename, int K )
|
||||
if( !ok )
|
||||
return ok;
|
||||
|
||||
Ptr<KNearest> model;
|
||||
|
||||
int nsamples_all = data.rows;
|
||||
int ntrain_samples = (int)(nsamples_all*0.8);
|
||||
@@ -411,7 +437,10 @@ build_knearest_classifier( const string& data_filename, int K )
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
model = StatModel::train<KNearest>(tdata, KNearest::Params(K, true));
|
||||
Ptr<KNearest> model = KNearest::create();
|
||||
model->setDefaultK(K);
|
||||
model->setIsClassifier(true);
|
||||
model->train(tdata);
|
||||
cout << endl;
|
||||
|
||||
test_and_save_classifier(model, data, responses, ntrain_samples, 0, string());
|
||||
@@ -435,7 +464,8 @@ build_nbayes_classifier( const string& data_filename )
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
model = StatModel::train<NormalBayesClassifier>(tdata, NormalBayesClassifier::Params());
|
||||
model = NormalBayesClassifier::create();
|
||||
model->train(tdata);
|
||||
cout << endl;
|
||||
|
||||
test_and_save_classifier(model, data, responses, ntrain_samples, 0, string());
|
||||
@@ -471,13 +501,11 @@ build_svm_classifier( const string& data_filename,
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
|
||||
SVM::Params params;
|
||||
params.svmType = SVM::C_SVC;
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.C = 1;
|
||||
|
||||
model = StatModel::train<SVM>(tdata, params);
|
||||
model = SVM::create();
|
||||
model->setType(SVM::C_SVC);
|
||||
model->setKernel(SVM::LINEAR);
|
||||
model->setC(1);
|
||||
model->train(tdata);
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
|
@@ -132,20 +132,16 @@ int main()
|
||||
showImage(data_train, 28, "train data");
|
||||
showImage(data_test, 28, "test data");
|
||||
|
||||
|
||||
// simple case with batch gradient
|
||||
LogisticRegression::Params params = LogisticRegression::Params(
|
||||
0.001, 10, LogisticRegression::BATCH, LogisticRegression::REG_L2, 1, 1);
|
||||
// simple case with mini-batch gradient
|
||||
// LogisticRegression::Params params = LogisticRegression::Params(
|
||||
// 0.001, 10, LogisticRegression::MINI_BATCH, LogisticRegression::REG_L2, 1, 1);
|
||||
|
||||
// mini-batch gradient with higher accuracy
|
||||
// LogisticRegression::Params params = LogisticRegression::Params(
|
||||
// 0.000001, 10, LogisticRegression::MINI_BATCH, LogisticRegression::REG_L2, 1, 1);
|
||||
|
||||
cout << "training...";
|
||||
Ptr<StatModel> lr1 = LogisticRegression::create(params);
|
||||
//! [init]
|
||||
Ptr<LogisticRegression> lr1 = LogisticRegression::create();
|
||||
lr1->setLearningRate(0.001);
|
||||
lr1->setIterations(10);
|
||||
lr1->setRegularization(LogisticRegression::REG_L2);
|
||||
lr1->setTrainMethod(LogisticRegression::BATCH);
|
||||
lr1->setMiniBatchSize(1);
|
||||
//! [init]
|
||||
lr1->train(data_train, ROW_SAMPLE, labels_train);
|
||||
cout << "done!" << endl;
|
||||
|
||||
|
@@ -102,7 +102,7 @@ static void predict_and_paint(const Ptr<StatModel>& model, Mat& dst)
|
||||
static void find_decision_boundary_NBC()
|
||||
{
|
||||
// learn classifier
|
||||
Ptr<NormalBayesClassifier> normalBayesClassifier = StatModel::train<NormalBayesClassifier>(prepare_train_data(), NormalBayesClassifier::Params());
|
||||
Ptr<NormalBayesClassifier> normalBayesClassifier = StatModel::train<NormalBayesClassifier>(prepare_train_data());
|
||||
|
||||
predict_and_paint(normalBayesClassifier, imgDst);
|
||||
}
|
||||
@@ -112,15 +112,29 @@ static void find_decision_boundary_NBC()
|
||||
#if _KNN_
|
||||
static void find_decision_boundary_KNN( int K )
|
||||
{
|
||||
Ptr<KNearest> knn = StatModel::train<KNearest>(prepare_train_data(), KNearest::Params(K, true));
|
||||
|
||||
Ptr<KNearest> knn = KNearest::create();
|
||||
knn->setDefaultK(K);
|
||||
knn->setIsClassifier(true);
|
||||
knn->train(prepare_train_data());
|
||||
predict_and_paint(knn, imgDst);
|
||||
}
|
||||
#endif
|
||||
|
||||
#if _SVM_
|
||||
static void find_decision_boundary_SVM( SVM::Params params )
|
||||
static void find_decision_boundary_SVM( double C )
|
||||
{
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(prepare_train_data(), params);
|
||||
Ptr<SVM> svm = SVM::create();
|
||||
svm->setType(SVM::C_SVC);
|
||||
svm->setKernel(SVM::POLY); //SVM::LINEAR;
|
||||
svm->setDegree(0.5);
|
||||
svm->setGamma(1);
|
||||
svm->setCoef0(1);
|
||||
svm->setNu(0.5);
|
||||
svm->setP(0);
|
||||
svm->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 1000, 0.01));
|
||||
svm->setC(C);
|
||||
svm->train(prepare_train_data());
|
||||
predict_and_paint(svm, imgDst);
|
||||
|
||||
Mat sv = svm->getSupportVectors();
|
||||
@@ -135,16 +149,14 @@ static void find_decision_boundary_SVM( SVM::Params params )
|
||||
#if _DT_
|
||||
static void find_decision_boundary_DT()
|
||||
{
|
||||
DTrees::Params params;
|
||||
params.maxDepth = 8;
|
||||
params.minSampleCount = 2;
|
||||
params.useSurrogates = false;
|
||||
params.CVFolds = 0; // the number of cross-validation folds
|
||||
params.use1SERule = false;
|
||||
params.truncatePrunedTree = false;
|
||||
|
||||
Ptr<DTrees> dtree = StatModel::train<DTrees>(prepare_train_data(), params);
|
||||
|
||||
Ptr<DTrees> dtree = DTrees::create();
|
||||
dtree->setMaxDepth(8);
|
||||
dtree->setMinSampleCount(2);
|
||||
dtree->setUseSurrogates(false);
|
||||
dtree->setCVFolds(0); // the number of cross-validation folds
|
||||
dtree->setUse1SERule(false);
|
||||
dtree->setTruncatePrunedTree(false);
|
||||
dtree->train(prepare_train_data());
|
||||
predict_and_paint(dtree, imgDst);
|
||||
}
|
||||
#endif
|
||||
@@ -152,15 +164,14 @@ static void find_decision_boundary_DT()
|
||||
#if _BT_
|
||||
static void find_decision_boundary_BT()
|
||||
{
|
||||
Boost::Params params( Boost::DISCRETE, // boost_type
|
||||
100, // weak_count
|
||||
0.95, // weight_trim_rate
|
||||
2, // max_depth
|
||||
false, //use_surrogates
|
||||
Mat() // priors
|
||||
);
|
||||
|
||||
Ptr<Boost> boost = StatModel::train<Boost>(prepare_train_data(), params);
|
||||
Ptr<Boost> boost = Boost::create();
|
||||
boost->setBoostType(Boost::DISCRETE);
|
||||
boost->setWeakCount(100);
|
||||
boost->setWeightTrimRate(0.95);
|
||||
boost->setMaxDepth(2);
|
||||
boost->setUseSurrogates(false);
|
||||
boost->setPriors(Mat());
|
||||
boost->train(prepare_train_data());
|
||||
predict_and_paint(boost, imgDst);
|
||||
}
|
||||
|
||||
@@ -185,18 +196,17 @@ static void find_decision_boundary_GBT()
|
||||
#if _RF_
|
||||
static void find_decision_boundary_RF()
|
||||
{
|
||||
RTrees::Params params( 4, // max_depth,
|
||||
2, // min_sample_count,
|
||||
0.f, // regression_accuracy,
|
||||
false, // use_surrogates,
|
||||
16, // max_categories,
|
||||
Mat(), // priors,
|
||||
false, // calc_var_importance,
|
||||
1, // nactive_vars,
|
||||
TermCriteria(TermCriteria::MAX_ITER, 5, 0) // max_num_of_trees_in_the_forest,
|
||||
);
|
||||
|
||||
Ptr<RTrees> rtrees = StatModel::train<RTrees>(prepare_train_data(), params);
|
||||
Ptr<RTrees> rtrees = RTrees::create();
|
||||
rtrees->setMaxDepth(4);
|
||||
rtrees->setMinSampleCount(2);
|
||||
rtrees->setRegressionAccuracy(0.f);
|
||||
rtrees->setUseSurrogates(false);
|
||||
rtrees->setMaxCategories(16);
|
||||
rtrees->setPriors(Mat());
|
||||
rtrees->setCalculateVarImportance(false);
|
||||
rtrees->setActiveVarCount(1);
|
||||
rtrees->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER, 5, 0));
|
||||
rtrees->train(prepare_train_data());
|
||||
predict_and_paint(rtrees, imgDst);
|
||||
}
|
||||
|
||||
@@ -205,9 +215,6 @@ static void find_decision_boundary_RF()
|
||||
#if _ANN_
|
||||
static void find_decision_boundary_ANN( const Mat& layer_sizes )
|
||||
{
|
||||
ANN_MLP::Params params(layer_sizes, ANN_MLP::SIGMOID_SYM, 1, 1, TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 300, FLT_EPSILON),
|
||||
ANN_MLP::Params::BACKPROP, 0.001);
|
||||
|
||||
Mat trainClasses = Mat::zeros( (int)trainedPoints.size(), (int)classColors.size(), CV_32FC1 );
|
||||
for( int i = 0; i < trainClasses.rows; i++ )
|
||||
{
|
||||
@@ -217,7 +224,12 @@ static void find_decision_boundary_ANN( const Mat& layer_sizes )
|
||||
Mat samples = prepare_train_samples(trainedPoints);
|
||||
Ptr<TrainData> tdata = TrainData::create(samples, ROW_SAMPLE, trainClasses);
|
||||
|
||||
Ptr<ANN_MLP> ann = StatModel::train<ANN_MLP>(tdata, params);
|
||||
Ptr<ANN_MLP> ann = ANN_MLP::create();
|
||||
ann->setLayerSizes(layer_sizes);
|
||||
ann->setActivationFunction(ANN_MLP::SIGMOID_SYM, 1, 1);
|
||||
ann->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 300, FLT_EPSILON));
|
||||
ann->setTrainMethod(ANN_MLP::BACKPROP, 0.001);
|
||||
ann->train(tdata);
|
||||
predict_and_paint(ann, imgDst);
|
||||
}
|
||||
#endif
|
||||
@@ -247,8 +259,11 @@ static void find_decision_boundary_EM()
|
||||
// learn models
|
||||
if( !modelSamples.empty() )
|
||||
{
|
||||
em_models[i] = EM::train(modelSamples, noArray(), noArray(), noArray(),
|
||||
EM::Params(componentCount, EM::COV_MAT_DIAGONAL));
|
||||
Ptr<EM> em = EM::create();
|
||||
em->setClustersNumber(componentCount);
|
||||
em->setCovarianceMatrixType(EM::COV_MAT_DIAGONAL);
|
||||
em->trainEM(modelSamples, noArray(), noArray(), noArray());
|
||||
em_models[i] = em;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,33 +347,20 @@ int main()
|
||||
imshow( "NormalBayesClassifier", imgDst );
|
||||
#endif
|
||||
#if _KNN_
|
||||
int K = 3;
|
||||
find_decision_boundary_KNN( K );
|
||||
find_decision_boundary_KNN( 3 );
|
||||
imshow( "kNN", imgDst );
|
||||
|
||||
K = 15;
|
||||
find_decision_boundary_KNN( K );
|
||||
find_decision_boundary_KNN( 15 );
|
||||
imshow( "kNN2", imgDst );
|
||||
#endif
|
||||
|
||||
#if _SVM_
|
||||
//(1)-(2)separable and not sets
|
||||
SVM::Params params;
|
||||
params.svmType = SVM::C_SVC;
|
||||
params.kernelType = SVM::POLY; //CvSVM::LINEAR;
|
||||
params.degree = 0.5;
|
||||
params.gamma = 1;
|
||||
params.coef0 = 1;
|
||||
params.C = 1;
|
||||
params.nu = 0.5;
|
||||
params.p = 0;
|
||||
params.termCrit = TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 1000, 0.01);
|
||||
|
||||
find_decision_boundary_SVM( params );
|
||||
find_decision_boundary_SVM( 1 );
|
||||
imshow( "classificationSVM1", imgDst );
|
||||
|
||||
params.C = 10;
|
||||
find_decision_boundary_SVM( params );
|
||||
find_decision_boundary_SVM( 10 );
|
||||
imshow( "classificationSVM2", imgDst );
|
||||
#endif
|
||||
|
||||
|
@@ -141,7 +141,7 @@ Mat get_hogdescriptor_visu(const Mat& color_origImg, vector<float>& descriptorVa
|
||||
|
||||
int cellSize = 8;
|
||||
int gradientBinSize = 9;
|
||||
float radRangeForOneBin = (float)(CV_PI/(float)gradientBinSize); // dividing 180<EFBFBD> into 9 bins, how large (in rad) is one bin?
|
||||
float radRangeForOneBin = (float)(CV_PI/(float)gradientBinSize); // dividing 180 into 9 bins, how large (in rad) is one bin?
|
||||
|
||||
// prepare data structure: 9 orientation / gradient strenghts for each cell
|
||||
int cells_in_x_dir = DIMX / cellSize;
|
||||
@@ -313,23 +313,23 @@ void compute_hog( const vector< Mat > & img_lst, vector< Mat > & gradient_lst, c
|
||||
|
||||
void train_svm( const vector< Mat > & gradient_lst, const vector< int > & labels )
|
||||
{
|
||||
/* Default values to train SVM */
|
||||
SVM::Params params;
|
||||
params.coef0 = 0.0;
|
||||
params.degree = 3;
|
||||
params.termCrit.epsilon = 1e-3;
|
||||
params.gamma = 0;
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.nu = 0.5;
|
||||
params.p = 0.1; // for EPSILON_SVR, epsilon in loss function?
|
||||
params.C = 0.01; // From paper, soft classifier
|
||||
params.svmType = SVM::EPS_SVR; // C_SVC; // EPSILON_SVR; // may be also NU_SVR; // do regression task
|
||||
|
||||
Mat train_data;
|
||||
convert_to_ml( gradient_lst, train_data );
|
||||
|
||||
clog << "Start training...";
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(train_data, ROW_SAMPLE, Mat(labels), params);
|
||||
Ptr<SVM> svm = SVM::create();
|
||||
/* Default values to train SVM */
|
||||
svm->setCoef0(0.0);
|
||||
svm->setDegree(3);
|
||||
svm->setTermCriteria(TermCriteria( CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 1000, 1e-3 ));
|
||||
svm->setGamma(0);
|
||||
svm->setKernel(SVM::LINEAR);
|
||||
svm->setNu(0.5);
|
||||
svm->setP(0.1); // for EPSILON_SVR, epsilon in loss function?
|
||||
svm->setC(0.01); // From paper, soft classifier
|
||||
svm->setType(SVM::EPS_SVR); // C_SVC; // EPSILON_SVR; // may be also NU_SVR; // do regression task
|
||||
svm->train(train_data, ROW_SAMPLE, Mat(labels));
|
||||
clog << "...[done]" << endl;
|
||||
|
||||
svm->save( "my_people_detector.yml" );
|
||||
|
@@ -73,18 +73,42 @@ int main(int argc, char** argv)
|
||||
data->setTrainTestSplitRatio(train_test_split_ratio);
|
||||
|
||||
printf("======DTREE=====\n");
|
||||
Ptr<DTrees> dtree = DTrees::create(DTrees::Params( 10, 2, 0, false, 16, 0, false, false, Mat() ));
|
||||
Ptr<DTrees> dtree = DTrees::create();
|
||||
dtree->setMaxDepth(10);
|
||||
dtree->setMinSampleCount(2);
|
||||
dtree->setRegressionAccuracy(0);
|
||||
dtree->setUseSurrogates(false);
|
||||
dtree->setMaxCategories(16);
|
||||
dtree->setCVFolds(0);
|
||||
dtree->setUse1SERule(false);
|
||||
dtree->setTruncatePrunedTree(false);
|
||||
dtree->setPriors(Mat());
|
||||
train_and_print_errs(dtree, data);
|
||||
|
||||
if( (int)data->getClassLabels().total() <= 2 ) // regression or 2-class classification problem
|
||||
{
|
||||
printf("======BOOST=====\n");
|
||||
Ptr<Boost> boost = Boost::create(Boost::Params(Boost::GENTLE, 100, 0.95, 2, false, Mat()));
|
||||
Ptr<Boost> boost = Boost::create();
|
||||
boost->setBoostType(Boost::GENTLE);
|
||||
boost->setWeakCount(100);
|
||||
boost->setWeightTrimRate(0.95);
|
||||
boost->setMaxDepth(2);
|
||||
boost->setUseSurrogates(false);
|
||||
boost->setPriors(Mat());
|
||||
train_and_print_errs(boost, data);
|
||||
}
|
||||
|
||||
printf("======RTREES=====\n");
|
||||
Ptr<RTrees> rtrees = RTrees::create(RTrees::Params(10, 2, 0, false, 16, Mat(), false, 0, TermCriteria(TermCriteria::MAX_ITER, 100, 0)));
|
||||
Ptr<RTrees> rtrees = RTrees::create();
|
||||
rtrees->setMaxDepth(10);
|
||||
rtrees->setMinSampleCount(2);
|
||||
rtrees->setRegressionAccuracy(0);
|
||||
rtrees->setUseSurrogates(false);
|
||||
rtrees->setMaxCategories(16);
|
||||
rtrees->setPriors(Mat());
|
||||
rtrees->setCalculateVarImportance(false);
|
||||
rtrees->setActiveVarCount(0);
|
||||
rtrees->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER, 100, 0));
|
||||
train_and_print_errs(rtrees, data);
|
||||
|
||||
return 0;
|
||||
|
@@ -14,23 +14,30 @@ int main(int, char**)
|
||||
Mat image = Mat::zeros(height, width, CV_8UC3);
|
||||
|
||||
// Set up training data
|
||||
//! [setup1]
|
||||
int labels[4] = {1, -1, -1, -1};
|
||||
Mat labelsMat(4, 1, CV_32SC1, labels);
|
||||
|
||||
float trainingData[4][2] = { {501, 10}, {255, 10}, {501, 255}, {10, 501} };
|
||||
//! [setup1]
|
||||
//! [setup2]
|
||||
Mat trainingDataMat(4, 2, CV_32FC1, trainingData);
|
||||
Mat labelsMat(4, 1, CV_32SC1, labels);
|
||||
//! [setup2]
|
||||
|
||||
// Set up SVM's parameters
|
||||
SVM::Params params;
|
||||
params.svmType = SVM::C_SVC;
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.termCrit = TermCriteria(TermCriteria::MAX_ITER, 100, 1e-6);
|
||||
|
||||
// Train the SVM
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(trainingDataMat, ROW_SAMPLE, labelsMat, params);
|
||||
//! [init]
|
||||
Ptr<SVM> svm = SVM::create();
|
||||
svm->setType(SVM::C_SVC);
|
||||
svm->setKernel(SVM::LINEAR);
|
||||
svm->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER, 100, 1e-6));
|
||||
//! [init]
|
||||
//! [train]
|
||||
svm->train(trainingDataMat, ROW_SAMPLE, labelsMat);
|
||||
//! [train]
|
||||
|
||||
Vec3b green(0,255,0), blue (255,0,0);
|
||||
// Show the decision regions given by the SVM
|
||||
//! [show]
|
||||
Vec3b green(0,255,0), blue (255,0,0);
|
||||
for (int i = 0; i < image.rows; ++i)
|
||||
for (int j = 0; j < image.cols; ++j)
|
||||
{
|
||||
@@ -42,16 +49,20 @@ int main(int, char**)
|
||||
else if (response == -1)
|
||||
image.at<Vec3b>(i,j) = blue;
|
||||
}
|
||||
//! [show]
|
||||
|
||||
// Show the training data
|
||||
//! [show_data]
|
||||
int thickness = -1;
|
||||
int lineType = 8;
|
||||
circle( image, Point(501, 10), 5, Scalar( 0, 0, 0), thickness, lineType );
|
||||
circle( image, Point(255, 10), 5, Scalar(255, 255, 255), thickness, lineType );
|
||||
circle( image, Point(501, 255), 5, Scalar(255, 255, 255), thickness, lineType );
|
||||
circle( image, Point( 10, 501), 5, Scalar(255, 255, 255), thickness, lineType );
|
||||
//! [show_data]
|
||||
|
||||
// Show support vectors
|
||||
//! [show_vectors]
|
||||
thickness = 2;
|
||||
lineType = 8;
|
||||
Mat sv = svm->getSupportVectors();
|
||||
@@ -61,6 +72,7 @@ int main(int, char**)
|
||||
const float* v = sv.ptr<float>(i);
|
||||
circle( image, Point( (int) v[0], (int) v[1]), 6, Scalar(128, 128, 128), thickness, lineType);
|
||||
}
|
||||
//! [show_vectors]
|
||||
|
||||
imwrite("result.png", image); // save the image
|
||||
|
||||
|
@@ -39,6 +39,7 @@ int main()
|
||||
// Set up the linearly separable part of the training data
|
||||
int nLinearSamples = (int) (FRAC_LINEAR_SEP * NTRAINING_SAMPLES);
|
||||
|
||||
//! [setup1]
|
||||
// Generate random points for the class 1
|
||||
Mat trainClass = trainData.rowRange(0, nLinearSamples);
|
||||
// The x coordinate of the points is in [0, 0.4)
|
||||
@@ -56,9 +57,10 @@ int main()
|
||||
// The y coordinate of the points is in [0, 1)
|
||||
c = trainClass.colRange(1,2);
|
||||
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
|
||||
//! [setup1]
|
||||
|
||||
//------------------ Set up the non-linearly separable part of the training data ---------------
|
||||
|
||||
//! [setup2]
|
||||
// Generate random points for the classes 1 and 2
|
||||
trainClass = trainData.rowRange( nLinearSamples, 2*NTRAINING_SAMPLES-nLinearSamples);
|
||||
// The x coordinate of the points is in [0.4, 0.6)
|
||||
@@ -67,24 +69,28 @@ int main()
|
||||
// The y coordinate of the points is in [0, 1)
|
||||
c = trainClass.colRange(1,2);
|
||||
rng.fill(c, RNG::UNIFORM, Scalar(1), Scalar(HEIGHT));
|
||||
|
||||
//! [setup2]
|
||||
//------------------------- Set up the labels for the classes ---------------------------------
|
||||
labels.rowRange( 0, NTRAINING_SAMPLES).setTo(1); // Class 1
|
||||
labels.rowRange(NTRAINING_SAMPLES, 2*NTRAINING_SAMPLES).setTo(2); // Class 2
|
||||
|
||||
//------------------------ 2. Set up the support vector machines parameters --------------------
|
||||
SVM::Params params;
|
||||
params.svmType = SVM::C_SVC;
|
||||
params.C = 0.1;
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.termCrit = TermCriteria(TermCriteria::MAX_ITER, (int)1e7, 1e-6);
|
||||
|
||||
//------------------------ 3. Train the svm ----------------------------------------------------
|
||||
cout << "Starting training process" << endl;
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(trainData, ROW_SAMPLE, labels, params);
|
||||
//! [init]
|
||||
Ptr<SVM> svm = SVM::create();
|
||||
svm->setType(SVM::C_SVC);
|
||||
svm->setC(0.1);
|
||||
svm->setKernel(SVM::LINEAR);
|
||||
svm->setTermCriteria(TermCriteria(TermCriteria::MAX_ITER, (int)1e7, 1e-6));
|
||||
//! [init]
|
||||
//! [train]
|
||||
svm->train(trainData, ROW_SAMPLE, labels);
|
||||
//! [train]
|
||||
cout << "Finished training process" << endl;
|
||||
|
||||
//------------------------ 4. Show the decision regions ----------------------------------------
|
||||
//! [show]
|
||||
Vec3b green(0,100,0), blue (100,0,0);
|
||||
for (int i = 0; i < I.rows; ++i)
|
||||
for (int j = 0; j < I.cols; ++j)
|
||||
@@ -95,8 +101,10 @@ int main()
|
||||
if (response == 1) I.at<Vec3b>(j, i) = green;
|
||||
else if (response == 2) I.at<Vec3b>(j, i) = blue;
|
||||
}
|
||||
//! [show]
|
||||
|
||||
//----------------------- 5. Show the training data --------------------------------------------
|
||||
//! [show_data]
|
||||
int thick = -1;
|
||||
int lineType = 8;
|
||||
float px, py;
|
||||
@@ -114,8 +122,10 @@ int main()
|
||||
py = trainData.at<float>(i,1);
|
||||
circle(I, Point( (int) px, (int) py ), 3, Scalar(255, 0, 0), thick, lineType);
|
||||
}
|
||||
//! [show_data]
|
||||
|
||||
//------------------------- 6. Show support vectors --------------------------------------------
|
||||
//! [show_vectors]
|
||||
thick = 2;
|
||||
lineType = 8;
|
||||
Mat sv = svm->getSupportVectors();
|
||||
@@ -125,6 +135,7 @@ int main()
|
||||
const float* v = sv.ptr<float>(i);
|
||||
circle( I, Point( (int) v[0], (int) v[1]), 6, Scalar(128, 128, 128), thick, lineType);
|
||||
}
|
||||
//! [show_vectors]
|
||||
|
||||
imwrite("result.png", I); // save the Image
|
||||
imshow("SVM for Non-Linear Training Data", I); // show it to the user
|
||||
|
Reference in New Issue
Block a user