made everything compile and even run somehow
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -2326,14 +2326,14 @@ static void removeBowImageDescriptorsByCount( vector<ObdImage>& images, vector<M
|
||||
CV_Assert( bowImageDescriptors.size() == objectPresent.size() );
|
||||
}
|
||||
|
||||
static void setSVMParams( const SVM::Params& svmParams, Mat& class_wts_cv, const Mat& responses, bool balanceClasses )
|
||||
static void setSVMParams( SVM::Params& svmParams, Mat& class_wts_cv, const Mat& responses, bool balanceClasses )
|
||||
{
|
||||
int pos_ex = countNonZero(responses == 1);
|
||||
int neg_ex = countNonZero(responses == -1);
|
||||
cout << pos_ex << " positive training samples; " << neg_ex << " negative training samples" << endl;
|
||||
|
||||
svmParams.svm_type = CvSVM::C_SVC;
|
||||
svmParams.kernel_type = CvSVM::RBF;
|
||||
svmParams.svmType = SVM::C_SVC;
|
||||
svmParams.kernelType = SVM::RBF;
|
||||
if( balanceClasses )
|
||||
{
|
||||
Mat class_wts( 2, 1, CV_32FC1 );
|
||||
@@ -2351,43 +2351,44 @@ static void setSVMParams( const SVM::Params& svmParams, Mat& class_wts_cv, const
|
||||
class_wts.at<float>(1) = static_cast<float>(pos_ex)/static_cast<float>(pos_ex+neg_ex);
|
||||
}
|
||||
class_wts_cv = class_wts;
|
||||
svmParams.class_weights = &class_wts_cv;
|
||||
svmParams.classWeights = class_wts_cv;
|
||||
}
|
||||
}
|
||||
|
||||
static void setSVMTrainAutoParams( CvParamGrid& c_grid, CvParamGrid& gamma_grid,
|
||||
CvParamGrid& p_grid, CvParamGrid& nu_grid,
|
||||
CvParamGrid& coef_grid, CvParamGrid& degree_grid )
|
||||
static void setSVMTrainAutoParams( ParamGrid& c_grid, ParamGrid& gamma_grid,
|
||||
ParamGrid& p_grid, ParamGrid& nu_grid,
|
||||
ParamGrid& coef_grid, ParamGrid& degree_grid )
|
||||
{
|
||||
c_grid = CvSVM::get_default_grid(CvSVM::C);
|
||||
c_grid = SVM::getDefaultGrid(SVM::C);
|
||||
|
||||
gamma_grid = CvSVM::get_default_grid(CvSVM::GAMMA);
|
||||
gamma_grid = SVM::getDefaultGrid(SVM::GAMMA);
|
||||
|
||||
p_grid = CvSVM::get_default_grid(CvSVM::P);
|
||||
p_grid.step = 0;
|
||||
p_grid = SVM::getDefaultGrid(SVM::P);
|
||||
p_grid.logStep = 0;
|
||||
|
||||
nu_grid = CvSVM::get_default_grid(CvSVM::NU);
|
||||
nu_grid.step = 0;
|
||||
nu_grid = SVM::getDefaultGrid(SVM::NU);
|
||||
nu_grid.logStep = 0;
|
||||
|
||||
coef_grid = CvSVM::get_default_grid(CvSVM::COEF);
|
||||
coef_grid.step = 0;
|
||||
coef_grid = SVM::getDefaultGrid(SVM::COEF);
|
||||
coef_grid.logStep = 0;
|
||||
|
||||
degree_grid = CvSVM::get_default_grid(CvSVM::DEGREE);
|
||||
degree_grid.step = 0;
|
||||
degree_grid = SVM::getDefaultGrid(SVM::DEGREE);
|
||||
degree_grid.logStep = 0;
|
||||
}
|
||||
|
||||
static void trainSVMClassifier( CvSVM& svm, const SVMTrainParamsExt& svmParamsExt, const string& objClassName, VocData& vocData,
|
||||
static Ptr<SVM> trainSVMClassifier( const SVMTrainParamsExt& svmParamsExt, const string& objClassName, VocData& vocData,
|
||||
Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
|
||||
const string& resPath )
|
||||
{
|
||||
/* first check if a previously trained svm for the current class has been saved to file */
|
||||
string svmFilename = resPath + svmsDir + "/" + objClassName + ".xml.gz";
|
||||
Ptr<SVM> svm;
|
||||
|
||||
FileStorage fs( svmFilename, FileStorage::READ);
|
||||
if( fs.isOpened() )
|
||||
{
|
||||
cout << "*** LOADING SVM CLASSIFIER FOR CLASS " << objClassName << " ***" << endl;
|
||||
svm.load( svmFilename.c_str() );
|
||||
svm = StatModel::load<SVM>( svmFilename );
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -2438,20 +2439,24 @@ static void trainSVMClassifier( CvSVM& svm, const SVMTrainParamsExt& svmParamsEx
|
||||
}
|
||||
|
||||
cout << "TRAINING SVM FOR CLASS ..." << objClassName << "..." << endl;
|
||||
CvSVMParams svmParams;
|
||||
CvMat class_wts_cv;
|
||||
SVM::Params svmParams;
|
||||
Mat class_wts_cv;
|
||||
setSVMParams( svmParams, class_wts_cv, responses, svmParamsExt.balanceClasses );
|
||||
CvParamGrid c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid;
|
||||
svm = SVM::create(svmParams);
|
||||
ParamGrid c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid;
|
||||
setSVMTrainAutoParams( c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid );
|
||||
svm.train_auto( trainData, responses, Mat(), Mat(), svmParams, 10, c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid );
|
||||
|
||||
svm->trainAuto(TrainData::create(trainData, ROW_SAMPLE, responses), 10,
|
||||
c_grid, gamma_grid, p_grid, nu_grid, coef_grid, degree_grid);
|
||||
cout << "SVM TRAINING FOR CLASS " << objClassName << " COMPLETED" << endl;
|
||||
|
||||
svm.save( svmFilename.c_str() );
|
||||
svm->save( svmFilename );
|
||||
cout << "SAVED CLASSIFIER TO FILE" << endl;
|
||||
}
|
||||
return svm;
|
||||
}
|
||||
|
||||
static void computeConfidences( CvSVM& svm, const string& objClassName, VocData& vocData,
|
||||
static void computeConfidences( const Ptr<SVM>& svm, const string& objClassName, VocData& vocData,
|
||||
Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
|
||||
const string& resPath )
|
||||
{
|
||||
@@ -2477,12 +2482,12 @@ static void computeConfidences( CvSVM& svm, const string& objClassName, VocData&
|
||||
if( imageIdx == 0 )
|
||||
{
|
||||
// In the first iteration, determine the sign of the positive class
|
||||
float classVal = confidences[imageIdx] = svm.predict( bowImageDescriptors[imageIdx], false );
|
||||
float scoreVal = confidences[imageIdx] = svm.predict( bowImageDescriptors[imageIdx], true );
|
||||
float classVal = confidences[imageIdx] = svm->predict( bowImageDescriptors[imageIdx], noArray(), 0 );
|
||||
float scoreVal = confidences[imageIdx] = svm->predict( bowImageDescriptors[imageIdx], noArray(), StatModel::RAW_OUTPUT );
|
||||
signMul = (classVal < 0) == (scoreVal < 0) ? 1.f : -1.f;
|
||||
}
|
||||
// svm output of decision function
|
||||
confidences[imageIdx] = signMul * svm.predict( bowImageDescriptors[imageIdx], true );
|
||||
confidences[imageIdx] = signMul * svm->predict( bowImageDescriptors[imageIdx], noArray(), StatModel::RAW_OUTPUT );
|
||||
}
|
||||
|
||||
cout << "WRITING QUERY RESULTS TO VOC RESULTS FILE FOR CLASS " << objClassName << "..." << endl;
|
||||
@@ -2592,9 +2597,8 @@ int main(int argc, char** argv)
|
||||
for( size_t classIdx = 0; classIdx < objClasses.size(); ++classIdx )
|
||||
{
|
||||
// Train a classifier on train dataset
|
||||
CvSVM svm;
|
||||
trainSVMClassifier( svm, svmTrainParamsExt, objClasses[classIdx], vocData,
|
||||
bowExtractor, featureDetector, resPath );
|
||||
Ptr<SVM> svm = trainSVMClassifier( svmTrainParamsExt, objClasses[classIdx], vocData,
|
||||
bowExtractor, featureDetector, resPath );
|
||||
|
||||
// Now use the classifier over all images on the test dataset and rank according to score order
|
||||
// also calculating precision-recall etc.
|
||||
|
@@ -179,10 +179,7 @@ build_rtrees_classifier( const string& data_filename,
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
|
||||
// 3. train classifier
|
||||
model = RTrees::create(RTrees::Params(10,10,0,false,15,Mat(),true,4,TC(100,0.01f)));
|
||||
model->train( tdata );
|
||||
model = StatModel::train<RTrees>(tdata, RTrees::Params(10,10,0,false,15,Mat(),true,4,TC(100,0.01f)));
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
@@ -267,10 +264,12 @@ build_boost_classifier( const string& data_filename,
|
||||
|
||||
Ptr<TrainData> tdata = TrainData::create(new_data, ROW_SAMPLE, new_responses,
|
||||
noArray(), noArray(), noArray(), var_type);
|
||||
model = Boost::create(Boost::Params(Boost::REAL, 100, 0.95, 5, false, Mat() ));
|
||||
vector<double> priors(2);
|
||||
priors[0] = 1;
|
||||
priors[1] = 26;
|
||||
|
||||
cout << "Training the classifier (may take a few minutes)...\n";
|
||||
model->train(tdata);
|
||||
model = StatModel::train<Boost>(tdata, Boost::Params(Boost::GENTLE, 100, 0.95, 5, false, Mat(priors) ));
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
@@ -333,7 +332,6 @@ build_mlp_classifier( const string& data_filename,
|
||||
if( !ok )
|
||||
return ok;
|
||||
|
||||
int i, j;
|
||||
Ptr<ANN_MLP> model;
|
||||
|
||||
int nsamples_all = data.rows;
|
||||
@@ -360,14 +358,14 @@ build_mlp_classifier( const string& data_filename,
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
|
||||
Mat train_data = data.rowRange(0, ntrain_samples);
|
||||
Mat new_responses = Mat::zeros( ntrain_samples, class_count, CV_32F );
|
||||
Mat train_responses = Mat::zeros( ntrain_samples, class_count, CV_32F );
|
||||
|
||||
// 1. unroll the responses
|
||||
cout << "Unrolling the responses...\n";
|
||||
for( i = 0; i < ntrain_samples; i++ )
|
||||
for( int i = 0; i < ntrain_samples; i++ )
|
||||
{
|
||||
int cls_label = responses.at<int>(i) - 'A'
|
||||
new_responses.at<float>(i, cls_label) = 1.f;
|
||||
int cls_label = responses.at<int>(i) - 'A';
|
||||
train_responses.at<float>(i, cls_label) = 1.f;
|
||||
}
|
||||
|
||||
// 2. train classifier
|
||||
@@ -385,180 +383,63 @@ build_mlp_classifier( const string& data_filename,
|
||||
int max_iter = 1000;
|
||||
#endif
|
||||
|
||||
mlp.train( &train_data, new_responses, 0, 0,
|
||||
ANN_MLP::Params(TC(max_iter,0), method, method_param));
|
||||
Ptr<TrainData> tdata = TrainData::create(train_data, ROW_SAMPLE, train_responses);
|
||||
|
||||
|
||||
model = ANN_MLP::create() mlp.create( &layer_sizes );
|
||||
printf( "Training the classifier (may take a few minutes)...\n");
|
||||
|
||||
cvReleaseMat( &new_responses );
|
||||
printf("\n");
|
||||
cout << "Training the classifier (may take a few minutes)...\n";
|
||||
model = StatModel::train<ANN_MLP>(tdata, ANN_MLP::Params(layer_sizes, ANN_MLP::SIGMOID_SYM, 0, 0, TC(max_iter,0), method, method_param));
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
Mat mlp_response;
|
||||
|
||||
// compute prediction error on train and test data
|
||||
for( i = 0; i < nsamples_all; i++ )
|
||||
{
|
||||
int best_class;
|
||||
CvMat sample;
|
||||
cvGetRow( data, &sample, i );
|
||||
CvPoint max_loc;
|
||||
mlp.predict( &sample, mlp_response );
|
||||
cvMinMaxLoc( mlp_response, 0, 0, 0, &max_loc, 0 );
|
||||
best_class = max_loc.x + 'A';
|
||||
|
||||
int r = fabs((double)best_class - responses->data.fl[i]) < FLT_EPSILON ? 1 : 0;
|
||||
|
||||
if( i < ntrain_samples )
|
||||
train_hr += r;
|
||||
else
|
||||
test_hr += r;
|
||||
}
|
||||
|
||||
test_hr /= (double)(nsamples_all-ntrain_samples);
|
||||
train_hr /= (double)ntrain_samples;
|
||||
printf( "Recognition rate: train = %.1f%%, test = %.1f%%\n",
|
||||
train_hr*100., test_hr*100. );
|
||||
|
||||
if( !filename_to_save.empty() )
|
||||
model->save( filename_to_save );
|
||||
|
||||
test_and_save_classifier(model, data, responses, ntrain_samples, 'A', filename_to_save);
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
build_knearest_classifier( const string& data_filename, int K )
|
||||
{
|
||||
const int var_count = 16;
|
||||
Mat data;
|
||||
CvMat train_data;
|
||||
Mat responses;
|
||||
|
||||
bool ok = read_num_class_data( data_filename, 16, &data, &responses );
|
||||
if( !ok )
|
||||
return ok;
|
||||
|
||||
int nsamples_all = 0, ntrain_samples = 0;
|
||||
Ptr<KNearest> model;
|
||||
|
||||
nsamples_all = data->rows;
|
||||
ntrain_samples = (int)(nsamples_all*0.8);
|
||||
int nsamples_all = data.rows;
|
||||
int ntrain_samples = (int)(nsamples_all*0.8);
|
||||
|
||||
// 1. unroll the responses
|
||||
printf( "Unrolling the responses...\n");
|
||||
cvGetRows( data, &train_data, 0, ntrain_samples );
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
model = StatModel::train<KNearest>(tdata, KNearest::Params(K, true));
|
||||
cout << endl;
|
||||
|
||||
// 2. train classifier
|
||||
Mat train_resp = cvCreateMat( ntrain_samples, 1, CV_32FC1);
|
||||
for (int i = 0; i < ntrain_samples; i++)
|
||||
train_resp->data.fl[i] = responses->data.fl[i];
|
||||
Ptr<KNearest> model = KNearest::create(true);
|
||||
model->train(train_data, train_resp);
|
||||
|
||||
Mat nearests = cvCreateMat( (nsamples_all - ntrain_samples), K, CV_32FC1);
|
||||
float* _sample = new float[var_count * (nsamples_all - ntrain_samples)];
|
||||
CvMat sample = cvMat( nsamples_all - ntrain_samples, 16, CV_32FC1, _sample );
|
||||
float* true_results = new float[nsamples_all - ntrain_samples];
|
||||
for (int j = ntrain_samples; j < nsamples_all; j++)
|
||||
{
|
||||
float *s = data->data.fl + j * var_count;
|
||||
|
||||
for (int i = 0; i < var_count; i++)
|
||||
{
|
||||
sample.data.fl[(j - ntrain_samples) * var_count + i] = s[i];
|
||||
}
|
||||
true_results[j - ntrain_samples] = responses->data.fl[j];
|
||||
}
|
||||
CvMat *result = cvCreateMat(1, nsamples_all - ntrain_samples, CV_32FC1);
|
||||
knearest.find_nearest(&sample, K, result, 0, nearests, 0);
|
||||
int true_resp = 0;
|
||||
int accuracy = 0;
|
||||
for (int i = 0; i < nsamples_all - ntrain_samples; i++)
|
||||
{
|
||||
if (result->data.fl[i] == true_results[i])
|
||||
true_resp++;
|
||||
for(int k = 0; k < K; k++ )
|
||||
{
|
||||
if( nearests->data.fl[i * K + k] == true_results[i])
|
||||
accuracy++;
|
||||
}
|
||||
}
|
||||
|
||||
printf("true_resp = %f%%\tavg accuracy = %f%%\n", (float)true_resp / (nsamples_all - ntrain_samples) * 100,
|
||||
(float)accuracy / (nsamples_all - ntrain_samples) / K * 100);
|
||||
|
||||
delete[] true_results;
|
||||
delete[] _sample;
|
||||
cvReleaseMat( &train_resp );
|
||||
cvReleaseMat( &nearests );
|
||||
cvReleaseMat( &result );
|
||||
cvReleaseMat( &data );
|
||||
cvReleaseMat( &responses );
|
||||
|
||||
return 0;
|
||||
test_and_save_classifier(model, data, responses, ntrain_samples, 0, string());
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
build_nbayes_classifier( const string& data_filename )
|
||||
{
|
||||
const int var_count = 16;
|
||||
Mat data;
|
||||
CvMat train_data;
|
||||
Mat responses;
|
||||
|
||||
bool ok = read_num_class_data( data_filename, 16, &data, &responses );
|
||||
if( !ok )
|
||||
return ok;
|
||||
|
||||
int nsamples_all = 0, ntrain_samples = 0;
|
||||
Ptr<NormalBayesClassifier> model;
|
||||
|
||||
nsamples_all = data->rows;
|
||||
ntrain_samples = (int)(nsamples_all*0.5);
|
||||
int nsamples_all = data.rows;
|
||||
int ntrain_samples = (int)(nsamples_all*0.8);
|
||||
|
||||
// 1. unroll the responses
|
||||
printf( "Unrolling the responses...\n");
|
||||
cvGetRows( data, &train_data, 0, ntrain_samples );
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
model = StatModel::train<NormalBayesClassifier>(tdata, NormalBayesClassifier::Params());
|
||||
cout << endl;
|
||||
|
||||
// 2. train classifier
|
||||
Mat train_resp = cvCreateMat( ntrain_samples, 1, CV_32FC1);
|
||||
for (int i = 0; i < ntrain_samples; i++)
|
||||
train_resp->data.fl[i] = responses->data.fl[i];
|
||||
CvNormalBayesClassifier nbayes(&train_data, train_resp);
|
||||
|
||||
float* _sample = new float[var_count * (nsamples_all - ntrain_samples)];
|
||||
CvMat sample = cvMat( nsamples_all - ntrain_samples, 16, CV_32FC1, _sample );
|
||||
float* true_results = new float[nsamples_all - ntrain_samples];
|
||||
for (int j = ntrain_samples; j < nsamples_all; j++)
|
||||
{
|
||||
float *s = data->data.fl + j * var_count;
|
||||
|
||||
for (int i = 0; i < var_count; i++)
|
||||
{
|
||||
sample.data.fl[(j - ntrain_samples) * var_count + i] = s[i];
|
||||
}
|
||||
true_results[j - ntrain_samples] = responses->data.fl[j];
|
||||
}
|
||||
CvMat *result = cvCreateMat(1, nsamples_all - ntrain_samples, CV_32FC1);
|
||||
nbayes.predict(&sample, result);
|
||||
int true_resp = 0;
|
||||
//int accuracy = 0;
|
||||
for (int i = 0; i < nsamples_all - ntrain_samples; i++)
|
||||
{
|
||||
if (result->data.fl[i] == true_results[i])
|
||||
true_resp++;
|
||||
}
|
||||
|
||||
printf("true_resp = %f%%\n", (float)true_resp / (nsamples_all - ntrain_samples) * 100);
|
||||
|
||||
delete[] true_results;
|
||||
delete[] _sample;
|
||||
cvReleaseMat( &train_resp );
|
||||
cvReleaseMat( &result );
|
||||
cvReleaseMat( &data );
|
||||
cvReleaseMat( &responses );
|
||||
|
||||
return 0;
|
||||
test_and_save_classifier(model, data, responses, ntrain_samples, 0, string());
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool
|
||||
@@ -568,95 +449,47 @@ build_svm_classifier( const string& data_filename,
|
||||
{
|
||||
Mat data;
|
||||
Mat responses;
|
||||
Mat train_resp;
|
||||
CvMat train_data;
|
||||
int nsamples_all = 0, ntrain_samples = 0;
|
||||
int var_count;
|
||||
Ptr<SVM> model;
|
||||
|
||||
bool ok = read_num_class_data( data_filename, 16, &data, &responses );
|
||||
if( !ok )
|
||||
return ok;
|
||||
|
||||
////////// SVM parameters ///////////////////////////////
|
||||
CvSVMParams param;
|
||||
param.kernel_type=CvSVM::LINEAR;
|
||||
param.svm_type=CvSVM::C_SVC;
|
||||
param.C=1;
|
||||
///////////////////////////////////////////////////////////
|
||||
Ptr<SVM> model;
|
||||
|
||||
printf( "The database %s is loaded.\n", data_filename );
|
||||
nsamples_all = data->rows;
|
||||
ntrain_samples = (int)(nsamples_all*0.1);
|
||||
var_count = data->cols;
|
||||
int nsamples_all = data.rows;
|
||||
int ntrain_samples = (int)(nsamples_all*0.8);
|
||||
|
||||
// Create or load Random Trees classifier
|
||||
if( filename_to_load )
|
||||
if( !filename_to_load.empty() )
|
||||
{
|
||||
// load classifier from the specified file
|
||||
svm.load( filename_to_load );
|
||||
model = load_classifier<SVM>(filename_to_load);
|
||||
if( model.empty() )
|
||||
return false;
|
||||
ntrain_samples = 0;
|
||||
if( svm.get_var_count() == 0 )
|
||||
{
|
||||
printf( "Could not read the classifier %s\n", filename_to_load );
|
||||
return -1;
|
||||
}
|
||||
printf( "The classifier %s is loaded.\n", filename_to_load );
|
||||
}
|
||||
else
|
||||
{
|
||||
// train classifier
|
||||
printf( "Training the classifier (may take a few minutes)...\n");
|
||||
cvGetRows( data, &train_data, 0, ntrain_samples );
|
||||
train_resp = cvCreateMat( ntrain_samples, 1, CV_32FC1);
|
||||
for (int i = 0; i < ntrain_samples; i++)
|
||||
train_resp->data.fl[i] = responses->data.fl[i];
|
||||
svm.train(&train_data, train_resp, 0, 0, param);
|
||||
// create classifier by using <data> and <responses>
|
||||
cout << "Training the classifier ...\n";
|
||||
Ptr<TrainData> tdata = prepare_train_data(data, responses, ntrain_samples);
|
||||
|
||||
SVM::Params params;
|
||||
params.svmType = SVM::C_SVC;
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.C = 1;
|
||||
|
||||
model = StatModel::train<SVM>(tdata, params);
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
// classification
|
||||
std::vector<float> _sample(var_count * (nsamples_all - ntrain_samples));
|
||||
CvMat sample = cvMat( nsamples_all - ntrain_samples, 16, CV_32FC1, &_sample[0] );
|
||||
std::vector<float> true_results(nsamples_all - ntrain_samples);
|
||||
for (int j = ntrain_samples; j < nsamples_all; j++)
|
||||
{
|
||||
float *s = data->data.fl + j * var_count;
|
||||
|
||||
for (int i = 0; i < var_count; i++)
|
||||
{
|
||||
sample.data.fl[(j - ntrain_samples) * var_count + i] = s[i];
|
||||
}
|
||||
true_results[j - ntrain_samples] = responses->data.fl[j];
|
||||
}
|
||||
CvMat *result = cvCreateMat(1, nsamples_all - ntrain_samples, CV_32FC1);
|
||||
|
||||
printf("Classification (may take a few minutes)...\n");
|
||||
double t = (double)cvGetTickCount();
|
||||
svm.predict(&sample, result);
|
||||
t = (double)cvGetTickCount() - t;
|
||||
printf("Prediction type: %gms\n", t/(cvGetTickFrequency()*1000.));
|
||||
|
||||
int true_resp = 0;
|
||||
for (int i = 0; i < nsamples_all - ntrain_samples; i++)
|
||||
{
|
||||
if (result->data.fl[i] == true_results[i])
|
||||
true_resp++;
|
||||
}
|
||||
|
||||
printf("true_resp = %f%%\n", (float)true_resp / (nsamples_all - ntrain_samples) * 100);
|
||||
|
||||
if( !filename_to_save.empty() )
|
||||
model->save( filename_to_save );
|
||||
|
||||
test_and_save_classifier(model, data, responses, ntrain_samples, 0, filename_to_save);
|
||||
return true;
|
||||
}
|
||||
|
||||
int main( int argc, char *argv[] )
|
||||
{
|
||||
char* filename_to_save = 0;
|
||||
char* filename_to_load = 0;
|
||||
char default_data_filename[] = "./letter-recognition.data";
|
||||
char* data_filename = default_data_filename;
|
||||
string filename_to_save = "";
|
||||
string filename_to_load = "";
|
||||
string data_filename = "./letter-recognition.data";
|
||||
int method = 0;
|
||||
|
||||
int i;
|
||||
@@ -685,15 +518,15 @@ int main( int argc, char *argv[] )
|
||||
{
|
||||
method = 2;
|
||||
}
|
||||
else if ( strcmp(argv[i], "-knearest") == 0)
|
||||
else if( strcmp(argv[i], "-knearest") == 0 || strcmp(argv[i], "-knn") == 0 )
|
||||
{
|
||||
method = 3;
|
||||
}
|
||||
else if ( strcmp(argv[i], "-nbayes") == 0)
|
||||
else if( strcmp(argv[i], "-nbayes") == 0)
|
||||
{
|
||||
method = 4;
|
||||
}
|
||||
else if ( strcmp(argv[i], "-svm") == 0)
|
||||
else if( strcmp(argv[i], "-svm") == 0)
|
||||
{
|
||||
method = 5;
|
||||
}
|
||||
|
@@ -1,322 +0,0 @@
|
||||
#include "opencv2/core/core_c.h"
|
||||
#include "opencv2/ml/ml.hpp"
|
||||
#include <stdio.h>
|
||||
|
||||
static void help()
|
||||
{
|
||||
printf("\nThis program demonstrated the use of OpenCV's decision tree function for learning and predicting data\n"
|
||||
"Usage :\n"
|
||||
"./mushroom <path to agaricus-lepiota.data>\n"
|
||||
"\n"
|
||||
"The sample demonstrates how to build a decision tree for classifying mushrooms.\n"
|
||||
"It uses the sample base agaricus-lepiota.data from UCI Repository, here is the link:\n"
|
||||
"\n"
|
||||
"Newman, D.J. & Hettich, S. & Blake, C.L. & Merz, C.J. (1998).\n"
|
||||
"UCI Repository of machine learning databases\n"
|
||||
"[http://www.ics.uci.edu/~mlearn/MLRepository.html].\n"
|
||||
"Irvine, CA: University of California, Department of Information and Computer Science.\n"
|
||||
"\n"
|
||||
"// loads the mushroom database, which is a text file, containing\n"
|
||||
"// one training sample per row, all the input variables and the output variable are categorical,\n"
|
||||
"// the values are encoded by characters.\n\n");
|
||||
}
|
||||
|
||||
static int mushroom_read_database( const char* filename, CvMat** data, CvMat** missing, CvMat** responses )
|
||||
{
|
||||
const int M = 1024;
|
||||
FILE* f = fopen( filename, "rt" );
|
||||
CvMemStorage* storage;
|
||||
CvSeq* seq;
|
||||
char buf[M+2], *ptr;
|
||||
float* el_ptr;
|
||||
CvSeqReader reader;
|
||||
int i, j, var_count = 0;
|
||||
|
||||
if( !f )
|
||||
return 0;
|
||||
|
||||
// read the first line and determine the number of variables
|
||||
if( !fgets( buf, M, f ))
|
||||
{
|
||||
fclose(f);
|
||||
return 0;
|
||||
}
|
||||
|
||||
for( ptr = buf; *ptr != '\0'; ptr++ )
|
||||
var_count += *ptr == ',';
|
||||
assert( ptr - buf == (var_count+1)*2 );
|
||||
|
||||
// create temporary memory storage to store the whole database
|
||||
el_ptr = new float[var_count+1];
|
||||
storage = cvCreateMemStorage();
|
||||
seq = cvCreateSeq( 0, sizeof(*seq), (var_count+1)*sizeof(float), storage );
|
||||
|
||||
for(;;)
|
||||
{
|
||||
for( i = 0; i <= var_count; i++ )
|
||||
{
|
||||
int c = buf[i*2];
|
||||
el_ptr[i] = c == '?' ? -1.f : (float)c;
|
||||
}
|
||||
if( i != var_count+1 )
|
||||
break;
|
||||
cvSeqPush( seq, el_ptr );
|
||||
if( !fgets( buf, M, f ) || !strchr( buf, ',' ) )
|
||||
break;
|
||||
}
|
||||
fclose(f);
|
||||
|
||||
// allocate the output matrices and copy the base there
|
||||
*data = cvCreateMat( seq->total, var_count, CV_32F );
|
||||
*missing = cvCreateMat( seq->total, var_count, CV_8U );
|
||||
*responses = cvCreateMat( seq->total, 1, CV_32F );
|
||||
|
||||
cvStartReadSeq( seq, &reader );
|
||||
|
||||
for( i = 0; i < seq->total; i++ )
|
||||
{
|
||||
const float* sdata = (float*)reader.ptr + 1;
|
||||
float* ddata = data[0]->data.fl + var_count*i;
|
||||
float* dr = responses[0]->data.fl + i;
|
||||
uchar* dm = missing[0]->data.ptr + var_count*i;
|
||||
|
||||
for( j = 0; j < var_count; j++ )
|
||||
{
|
||||
ddata[j] = sdata[j];
|
||||
dm[j] = sdata[j] < 0;
|
||||
}
|
||||
*dr = sdata[-1];
|
||||
CV_NEXT_SEQ_ELEM( seq->elem_size, reader );
|
||||
}
|
||||
|
||||
cvReleaseMemStorage( &storage );
|
||||
delete [] el_ptr;
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
static CvDTree* mushroom_create_dtree( const CvMat* data, const CvMat* missing,
|
||||
const CvMat* responses, float p_weight )
|
||||
{
|
||||
CvDTree* dtree;
|
||||
CvMat* var_type;
|
||||
int i, hr1 = 0, hr2 = 0, p_total = 0;
|
||||
float priors[] = { 1, p_weight };
|
||||
|
||||
var_type = cvCreateMat( data->cols + 1, 1, CV_8U );
|
||||
cvSet( var_type, cvScalarAll(CV_VAR_CATEGORICAL) ); // all the variables are categorical
|
||||
|
||||
dtree = new CvDTree;
|
||||
|
||||
dtree->train( data, CV_ROW_SAMPLE, responses, 0, 0, var_type, missing,
|
||||
CvDTreeParams( 8, // max depth
|
||||
10, // min sample count
|
||||
0, // regression accuracy: N/A here
|
||||
true, // compute surrogate split, as we have missing data
|
||||
15, // max number of categories (use sub-optimal algorithm for larger numbers)
|
||||
10, // the number of cross-validation folds
|
||||
true, // use 1SE rule => smaller tree
|
||||
true, // throw away the pruned tree branches
|
||||
priors // the array of priors, the bigger p_weight, the more attention
|
||||
// to the poisonous mushrooms
|
||||
// (a mushroom will be judjed to be poisonous with bigger chance)
|
||||
));
|
||||
|
||||
// compute hit-rate on the training database, demonstrates predict usage.
|
||||
for( i = 0; i < data->rows; i++ )
|
||||
{
|
||||
CvMat sample, mask;
|
||||
cvGetRow( data, &sample, i );
|
||||
cvGetRow( missing, &mask, i );
|
||||
double r = dtree->predict( &sample, &mask )->value;
|
||||
int d = fabs(r - responses->data.fl[i]) >= FLT_EPSILON;
|
||||
if( d )
|
||||
{
|
||||
if( r != 'p' )
|
||||
hr1++;
|
||||
else
|
||||
hr2++;
|
||||
}
|
||||
p_total += responses->data.fl[i] == 'p';
|
||||
}
|
||||
|
||||
printf( "Results on the training database:\n"
|
||||
"\tPoisonous mushrooms mis-predicted: %d (%g%%)\n"
|
||||
"\tFalse-alarms: %d (%g%%)\n", hr1, (double)hr1*100/p_total,
|
||||
hr2, (double)hr2*100/(data->rows - p_total) );
|
||||
|
||||
cvReleaseMat( &var_type );
|
||||
|
||||
return dtree;
|
||||
}
|
||||
|
||||
|
||||
static const char* var_desc[] =
|
||||
{
|
||||
"cap shape (bell=b,conical=c,convex=x,flat=f)",
|
||||
"cap surface (fibrous=f,grooves=g,scaly=y,smooth=s)",
|
||||
"cap color (brown=n,buff=b,cinnamon=c,gray=g,green=r,\n\tpink=p,purple=u,red=e,white=w,yellow=y)",
|
||||
"bruises? (bruises=t,no=f)",
|
||||
"odor (almond=a,anise=l,creosote=c,fishy=y,foul=f,\n\tmusty=m,none=n,pungent=p,spicy=s)",
|
||||
"gill attachment (attached=a,descending=d,free=f,notched=n)",
|
||||
"gill spacing (close=c,crowded=w,distant=d)",
|
||||
"gill size (broad=b,narrow=n)",
|
||||
"gill color (black=k,brown=n,buff=b,chocolate=h,gray=g,\n\tgreen=r,orange=o,pink=p,purple=u,red=e,white=w,yellow=y)",
|
||||
"stalk shape (enlarging=e,tapering=t)",
|
||||
"stalk root (bulbous=b,club=c,cup=u,equal=e,rhizomorphs=z,rooted=r)",
|
||||
"stalk surface above ring (ibrous=f,scaly=y,silky=k,smooth=s)",
|
||||
"stalk surface below ring (ibrous=f,scaly=y,silky=k,smooth=s)",
|
||||
"stalk color above ring (brown=n,buff=b,cinnamon=c,gray=g,orange=o,\n\tpink=p,red=e,white=w,yellow=y)",
|
||||
"stalk color below ring (brown=n,buff=b,cinnamon=c,gray=g,orange=o,\n\tpink=p,red=e,white=w,yellow=y)",
|
||||
"veil type (partial=p,universal=u)",
|
||||
"veil color (brown=n,orange=o,white=w,yellow=y)",
|
||||
"ring number (none=n,one=o,two=t)",
|
||||
"ring type (cobwebby=c,evanescent=e,flaring=f,large=l,\n\tnone=n,pendant=p,sheathing=s,zone=z)",
|
||||
"spore print color (black=k,brown=n,buff=b,chocolate=h,green=r,\n\torange=o,purple=u,white=w,yellow=y)",
|
||||
"population (abundant=a,clustered=c,numerous=n,\n\tscattered=s,several=v,solitary=y)",
|
||||
"habitat (grasses=g,leaves=l,meadows=m,paths=p\n\turban=u,waste=w,woods=d)",
|
||||
0
|
||||
};
|
||||
|
||||
|
||||
static void print_variable_importance( CvDTree* dtree )
|
||||
{
|
||||
const CvMat* var_importance = dtree->get_var_importance();
|
||||
int i;
|
||||
char input[1000];
|
||||
|
||||
if( !var_importance )
|
||||
{
|
||||
printf( "Error: Variable importance can not be retrieved\n" );
|
||||
return;
|
||||
}
|
||||
|
||||
printf( "Print variable importance information? (y/n) " );
|
||||
int values_read = scanf( "%1s", input );
|
||||
CV_Assert(values_read == 1);
|
||||
|
||||
if( input[0] != 'y' && input[0] != 'Y' )
|
||||
return;
|
||||
|
||||
for( i = 0; i < var_importance->cols*var_importance->rows; i++ )
|
||||
{
|
||||
double val = var_importance->data.db[i];
|
||||
char buf[100];
|
||||
int len = (int)(strchr( var_desc[i], '(' ) - var_desc[i] - 1);
|
||||
strncpy( buf, var_desc[i], len );
|
||||
buf[len] = '\0';
|
||||
printf( "%s", buf );
|
||||
printf( ": %g%%\n", val*100. );
|
||||
}
|
||||
}
|
||||
|
||||
static void interactive_classification( CvDTree* dtree )
|
||||
{
|
||||
char input[1000];
|
||||
const CvDTreeNode* root;
|
||||
CvDTreeTrainData* data;
|
||||
|
||||
if( !dtree )
|
||||
return;
|
||||
|
||||
root = dtree->get_root();
|
||||
data = dtree->get_data();
|
||||
|
||||
for(;;)
|
||||
{
|
||||
const CvDTreeNode* node;
|
||||
|
||||
printf( "Start/Proceed with interactive mushroom classification (y/n): " );
|
||||
int values_read = scanf( "%1s", input );
|
||||
CV_Assert(values_read == 1);
|
||||
|
||||
if( input[0] != 'y' && input[0] != 'Y' )
|
||||
break;
|
||||
printf( "Enter 1-letter answers, '?' for missing/unknown value...\n" );
|
||||
|
||||
// custom version of predict
|
||||
node = root;
|
||||
for(;;)
|
||||
{
|
||||
CvDTreeSplit* split = node->split;
|
||||
int dir = 0;
|
||||
|
||||
if( !node->left || node->Tn <= dtree->get_pruned_tree_idx() || !node->split )
|
||||
break;
|
||||
|
||||
for( ; split != 0; )
|
||||
{
|
||||
int vi = split->var_idx, j;
|
||||
int count = data->cat_count->data.i[vi];
|
||||
const int* map = data->cat_map->data.i + data->cat_ofs->data.i[vi];
|
||||
|
||||
printf( "%s: ", var_desc[vi] );
|
||||
values_read = scanf( "%1s", input );
|
||||
CV_Assert(values_read == 1);
|
||||
|
||||
if( input[0] == '?' )
|
||||
{
|
||||
split = split->next;
|
||||
continue;
|
||||
}
|
||||
|
||||
// convert the input character to the normalized value of the variable
|
||||
for( j = 0; j < count; j++ )
|
||||
if( map[j] == input[0] )
|
||||
break;
|
||||
if( j < count )
|
||||
{
|
||||
dir = (split->subset[j>>5] & (1 << (j&31))) ? -1 : 1;
|
||||
if( split->inversed )
|
||||
dir = -dir;
|
||||
break;
|
||||
}
|
||||
else
|
||||
printf( "Error: unrecognized value\n" );
|
||||
}
|
||||
|
||||
if( !dir )
|
||||
{
|
||||
printf( "Impossible to classify the sample\n");
|
||||
node = 0;
|
||||
break;
|
||||
}
|
||||
node = dir < 0 ? node->left : node->right;
|
||||
}
|
||||
|
||||
if( node )
|
||||
printf( "Prediction result: the mushroom is %s\n",
|
||||
node->class_idx == 0 ? "EDIBLE" : "POISONOUS" );
|
||||
printf( "\n-----------------------------\n" );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int main( int argc, char** argv )
|
||||
{
|
||||
CvMat *data = 0, *missing = 0, *responses = 0;
|
||||
CvDTree* dtree;
|
||||
const char* base_path = argc >= 2 ? argv[1] : "agaricus-lepiota.data";
|
||||
|
||||
help();
|
||||
|
||||
if( !mushroom_read_database( base_path, &data, &missing, &responses ) )
|
||||
{
|
||||
printf( "\nUnable to load the training database\n\n");
|
||||
help();
|
||||
return -1;
|
||||
}
|
||||
|
||||
dtree = mushroom_create_dtree( data, missing, responses,
|
||||
10 // poisonous mushrooms will have 10x higher weight in the decision tree
|
||||
);
|
||||
cvReleaseMat( &data );
|
||||
cvReleaseMat( &missing );
|
||||
cvReleaseMat( &responses );
|
||||
|
||||
print_variable_importance( dtree );
|
||||
interactive_classification( dtree );
|
||||
delete dtree;
|
||||
|
||||
return 0;
|
||||
}
|
@@ -102,8 +102,7 @@ static void predict_and_paint(const Ptr<StatModel>& model, Mat& dst)
|
||||
static void find_decision_boundary_NBC()
|
||||
{
|
||||
// learn classifier
|
||||
Ptr<NormalBayesClassifier> normalBayesClassifier = NormalBayesClassifier::create();
|
||||
normalBayesClassifier->train(prepare_train_data());
|
||||
Ptr<NormalBayesClassifier> normalBayesClassifier = StatModel::train<NormalBayesClassifier>(prepare_train_data(), NormalBayesClassifier::Params());
|
||||
|
||||
predict_and_paint(normalBayesClassifier, imgDst);
|
||||
}
|
||||
@@ -113,10 +112,7 @@ static void find_decision_boundary_NBC()
|
||||
#if _KNN_
|
||||
static void find_decision_boundary_KNN( int K )
|
||||
{
|
||||
Ptr<KNearest> knn = KNearest::create(true);
|
||||
knn->setDefaultK(K);
|
||||
knn->train(prepare_train_data());
|
||||
|
||||
Ptr<KNearest> knn = StatModel::train<KNearest>(prepare_train_data(), KNearest::Params(K, true));
|
||||
predict_and_paint(knn, imgDst);
|
||||
}
|
||||
#endif
|
||||
@@ -124,9 +120,7 @@ static void find_decision_boundary_KNN( int K )
|
||||
#if _SVM_
|
||||
static void find_decision_boundary_SVM( SVM::Params params )
|
||||
{
|
||||
Ptr<SVM> svm = SVM::create(params);
|
||||
svm->train(prepare_train_data());
|
||||
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(prepare_train_data(), params);
|
||||
predict_and_paint(svm, imgDst);
|
||||
|
||||
Mat sv = svm->getSupportVectors();
|
||||
@@ -149,8 +143,7 @@ static void find_decision_boundary_DT()
|
||||
params.use1SERule = false;
|
||||
params.truncatePrunedTree = false;
|
||||
|
||||
Ptr<DTrees> dtree = DTrees::create(params);
|
||||
dtree->train(prepare_train_data());
|
||||
Ptr<DTrees> dtree = StatModel::train<DTrees>(prepare_train_data(), params);
|
||||
|
||||
predict_and_paint(dtree, imgDst);
|
||||
}
|
||||
@@ -167,8 +160,7 @@ static void find_decision_boundary_BT()
|
||||
Mat() // priors
|
||||
);
|
||||
|
||||
Ptr<Boost> boost = Boost::create(params);
|
||||
boost->train(prepare_train_data());
|
||||
Ptr<Boost> boost = StatModel::train<Boost>(prepare_train_data(), params);
|
||||
predict_and_paint(boost, imgDst);
|
||||
}
|
||||
|
||||
@@ -185,8 +177,7 @@ static void find_decision_boundary_GBT()
|
||||
false // use_surrogates )
|
||||
);
|
||||
|
||||
Ptr<GBTrees> gbtrees = GBTrees::create(params);
|
||||
gbtrees->train(prepare_train_data());
|
||||
Ptr<GBTrees> gbtrees = StatModel::train<GBTrees>(prepare_train_data(), params);
|
||||
predict_and_paint(gbtrees, imgDst);
|
||||
}
|
||||
#endif
|
||||
@@ -205,8 +196,7 @@ static void find_decision_boundary_RF()
|
||||
TermCriteria(TermCriteria::MAX_ITER, 5, 0) // max_num_of_trees_in_the_forest,
|
||||
);
|
||||
|
||||
Ptr<RTrees> rtrees = RTrees::create(params);
|
||||
rtrees->train(prepare_train_data());
|
||||
Ptr<RTrees> rtrees = StatModel::train<RTrees>(prepare_train_data(), params);
|
||||
predict_and_paint(rtrees, imgDst);
|
||||
}
|
||||
|
||||
@@ -215,9 +205,8 @@ static void find_decision_boundary_RF()
|
||||
#if _ANN_
|
||||
static void find_decision_boundary_ANN( const Mat& layer_sizes )
|
||||
{
|
||||
ANN_MLP::Params params(TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 300, FLT_EPSILON),
|
||||
ANN_MLP::Params params(layer_sizes, ANN_MLP::SIGMOID_SYM, 1, 1, TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 300, FLT_EPSILON),
|
||||
ANN_MLP::Params::BACKPROP, 0.001);
|
||||
Ptr<ANN_MLP> ann = ANN_MLP::create(layer_sizes, params, ANN_MLP::SIGMOID_SYM, 1, 1 );
|
||||
|
||||
Mat trainClasses = Mat::zeros( trainedPoints.size(), classColors.size(), CV_32FC1 );
|
||||
for( int i = 0; i < trainClasses.rows; i++ )
|
||||
@@ -228,7 +217,7 @@ static void find_decision_boundary_ANN( const Mat& layer_sizes )
|
||||
Mat samples = prepare_train_samples(trainedPoints);
|
||||
Ptr<TrainData> tdata = TrainData::create(samples, ROW_SAMPLE, trainClasses);
|
||||
|
||||
ann->train(tdata);
|
||||
Ptr<ANN_MLP> ann = StatModel::train<ANN_MLP>(tdata, params);
|
||||
predict_and_paint(ann, imgDst);
|
||||
}
|
||||
#endif
|
||||
@@ -340,18 +329,15 @@ int main()
|
||||
img.copyTo( imgDst );
|
||||
#if _NBC_
|
||||
find_decision_boundary_NBC();
|
||||
namedWindow( "NormalBayesClassifier", WINDOW_AUTOSIZE );
|
||||
imshow( "NormalBayesClassifier", imgDst );
|
||||
#endif
|
||||
#if _KNN_
|
||||
int K = 3;
|
||||
find_decision_boundary_KNN( K );
|
||||
namedWindow( "kNN", WINDOW_AUTOSIZE );
|
||||
imshow( "kNN", imgDst );
|
||||
|
||||
K = 15;
|
||||
find_decision_boundary_KNN( K );
|
||||
namedWindow( "kNN2", WINDOW_AUTOSIZE );
|
||||
imshow( "kNN2", imgDst );
|
||||
#endif
|
||||
|
||||
@@ -369,36 +355,30 @@ int main()
|
||||
params.termCrit = TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 1000, 0.01);
|
||||
|
||||
find_decision_boundary_SVM( params );
|
||||
namedWindow( "classificationSVM1", WINDOW_AUTOSIZE );
|
||||
imshow( "classificationSVM1", imgDst );
|
||||
|
||||
params.C = 10;
|
||||
find_decision_boundary_SVM( params );
|
||||
namedWindow( "classificationSVM2", WINDOW_AUTOSIZE );
|
||||
imshow( "classificationSVM2", imgDst );
|
||||
#endif
|
||||
|
||||
#if _DT_
|
||||
find_decision_boundary_DT();
|
||||
namedWindow( "DT", WINDOW_AUTOSIZE );
|
||||
imshow( "DT", imgDst );
|
||||
#endif
|
||||
|
||||
#if _BT_
|
||||
find_decision_boundary_BT();
|
||||
namedWindow( "BT", WINDOW_AUTOSIZE );
|
||||
imshow( "BT", imgDst);
|
||||
#endif
|
||||
|
||||
#if _GBT_
|
||||
find_decision_boundary_GBT();
|
||||
namedWindow( "GBT", WINDOW_AUTOSIZE );
|
||||
imshow( "GBT", imgDst);
|
||||
#endif
|
||||
|
||||
#if _RF_
|
||||
find_decision_boundary_RF();
|
||||
namedWindow( "RF", WINDOW_AUTOSIZE );
|
||||
imshow( "RF", imgDst);
|
||||
#endif
|
||||
|
||||
@@ -408,13 +388,11 @@ int main()
|
||||
layer_sizes1.at<int>(1) = 5;
|
||||
layer_sizes1.at<int>(2) = classColors.size();
|
||||
find_decision_boundary_ANN( layer_sizes1 );
|
||||
namedWindow( "ANN", WINDOW_AUTOSIZE );
|
||||
imshow( "ANN", imgDst );
|
||||
#endif
|
||||
|
||||
#if _EM_
|
||||
find_decision_boundary_EM();
|
||||
namedWindow( "EM", WINDOW_AUTOSIZE );
|
||||
imshow( "EM", imgDst );
|
||||
#endif
|
||||
}
|
||||
|
@@ -8,9 +8,10 @@
|
||||
#include <time.h>
|
||||
|
||||
using namespace cv;
|
||||
using namespace cv::ml;
|
||||
using namespace std;
|
||||
|
||||
void get_svm_detector(const SVM& svm, vector< float > & hog_detector );
|
||||
void get_svm_detector(const Ptr<SVM>& svm, vector< float > & hog_detector );
|
||||
void convert_to_ml(const std::vector< cv::Mat > & train_samples, cv::Mat& trainData );
|
||||
void load_images( const string & prefix, const string & filename, vector< Mat > & img_lst );
|
||||
void sample_neg( const vector< Mat > & full_neg_lst, vector< Mat > & neg_lst, const Size & size );
|
||||
@@ -20,49 +21,24 @@ void train_svm( const vector< Mat > & gradient_lst, const vector< int > & labels
|
||||
void draw_locations( Mat & img, const vector< Rect > & locations, const Scalar & color );
|
||||
void test_it( const Size & size );
|
||||
|
||||
void get_svm_detector(const SVM& svm, vector< float > & hog_detector )
|
||||
void get_svm_detector(const Ptr<SVM>& svm, vector< float > & hog_detector )
|
||||
{
|
||||
// get the number of variables
|
||||
const int var_all = svm.get_var_count();
|
||||
// get the number of support vectors
|
||||
const int sv_total = svm.get_support_vector_count();
|
||||
// get the decision function
|
||||
const CvSVMDecisionFunc* decision_func = svm.get_decision_function();
|
||||
// get the support vectors
|
||||
const float** sv = new const float*[ sv_total ];
|
||||
for( int i = 0 ; i < sv_total ; ++i )
|
||||
sv[ i ] = svm.get_support_vector(i);
|
||||
Mat sv = svm->getSupportVectors();
|
||||
const int sv_total = sv.rows;
|
||||
// get the decision function
|
||||
Mat alpha, svidx;
|
||||
double rho = svm->getDecisionFunction(0, alpha, svidx);
|
||||
|
||||
CV_Assert( var_all > 0 &&
|
||||
sv_total > 0 &&
|
||||
decision_func != 0 &&
|
||||
decision_func->alpha != 0 &&
|
||||
decision_func->sv_count == sv_total );
|
||||
CV_Assert( alpha.total() == 1 && svidx.total() == 1 && sv_total == 1 );
|
||||
CV_Assert( (alpha.type() == CV_64F && alpha.at<double>(0) == 1.) ||
|
||||
(alpha.type() == CV_32F && alpha.at<float>(0) == 1.f) );
|
||||
CV_Assert( sv.type() == CV_32F );
|
||||
hog_detector.clear();
|
||||
|
||||
float svi = 0.f;
|
||||
|
||||
hog_detector.clear(); //clear stuff in vector.
|
||||
hog_detector.reserve( var_all + 1 ); //reserve place for memory efficiency.
|
||||
|
||||
/**
|
||||
* hog_detector^i = \sum_j support_vector_j^i * \alpha_j
|
||||
* hog_detector^dim = -\rho
|
||||
*/
|
||||
for( int i = 0 ; i < var_all ; ++i )
|
||||
{
|
||||
svi = 0.f;
|
||||
for( int j = 0 ; j < sv_total ; ++j )
|
||||
{
|
||||
if( decision_func->sv_index != NULL ) // sometime the sv_index isn't store on YML/XML.
|
||||
svi += (float)( sv[decision_func->sv_index[j]][i] * decision_func->alpha[ j ] );
|
||||
else
|
||||
svi += (float)( sv[j][i] * decision_func->alpha[ j ] );
|
||||
}
|
||||
hog_detector.push_back( svi );
|
||||
}
|
||||
hog_detector.push_back( (float)-decision_func->rho );
|
||||
|
||||
delete[] sv;
|
||||
hog_detector.resize(sv.cols + 1);
|
||||
memcpy(&hog_detector[0], sv.data, sv.cols*sizeof(hog_detector[0]));
|
||||
hog_detector[sv.cols] = (float)-rho;
|
||||
}
|
||||
|
||||
|
||||
@@ -263,7 +239,7 @@ Mat get_hogdescriptor_visu(const Mat& color_origImg, vector<float>& descriptorVa
|
||||
int mx = drawX + cellSize/2;
|
||||
int my = drawY + cellSize/2;
|
||||
|
||||
rectangle(visu, Point((int)(drawX*zoomFac), (int)(drawY*zoomFac)), Point((int)((drawX+cellSize)*zoomFac), (int)((drawY+cellSize)*zoomFac)), CV_RGB(100,100,100), 1);
|
||||
rectangle(visu, Point((int)(drawX*zoomFac), (int)(drawY*zoomFac)), Point((int)((drawX+cellSize)*zoomFac), (int)((drawY+cellSize)*zoomFac)), Scalar(100,100,100), 1);
|
||||
|
||||
// draw in each cell all 9 gradient strengths
|
||||
for (int bin=0; bin<gradientBinSize; bin++)
|
||||
@@ -288,7 +264,7 @@ Mat get_hogdescriptor_visu(const Mat& color_origImg, vector<float>& descriptorVa
|
||||
float y2 = my + dirVecY * currentGradStrength * maxVecLen * scale;
|
||||
|
||||
// draw gradient visualization
|
||||
line(visu, Point((int)(x1*zoomFac),(int)(y1*zoomFac)), Point((int)(x2*zoomFac),(int)(y2*zoomFac)), CV_RGB(0,255,0), 1);
|
||||
line(visu, Point((int)(x1*zoomFac),(int)(y1*zoomFac)), Point((int)(x2*zoomFac),(int)(y2*zoomFac)), Scalar(0,255,0), 1);
|
||||
|
||||
} // for (all bins)
|
||||
|
||||
@@ -337,28 +313,26 @@ void compute_hog( const vector< Mat > & img_lst, vector< Mat > & gradient_lst, c
|
||||
|
||||
void train_svm( const vector< Mat > & gradient_lst, const vector< int > & labels )
|
||||
{
|
||||
SVM svm;
|
||||
|
||||
/* Default values to train SVM */
|
||||
SVMParams params;
|
||||
SVM::Params params;
|
||||
params.coef0 = 0.0;
|
||||
params.degree = 3;
|
||||
params.term_crit.epsilon = 1e-3;
|
||||
params.termCrit.epsilon = 1e-3;
|
||||
params.gamma = 0;
|
||||
params.kernel_type = SVM::LINEAR;
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.nu = 0.5;
|
||||
params.p = 0.1; // for EPSILON_SVR, epsilon in loss function?
|
||||
params.C = 0.01; // From paper, soft classifier
|
||||
params.svm_type = SVM::EPS_SVR; // C_SVC; // EPSILON_SVR; // may be also NU_SVR; // do regression task
|
||||
params.svmType = SVM::EPS_SVR; // C_SVC; // EPSILON_SVR; // may be also NU_SVR; // do regression task
|
||||
|
||||
Mat train_data;
|
||||
convert_to_ml( gradient_lst, train_data );
|
||||
|
||||
clog << "Start training...";
|
||||
svm.train( train_data, Mat( labels ), Mat(), Mat(), params );
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(train_data, ROW_SAMPLE, Mat(labels), params);
|
||||
clog << "...[done]" << endl;
|
||||
|
||||
svm.save( "my_people_detector.yml" );
|
||||
svm->save( "my_people_detector.yml" );
|
||||
}
|
||||
|
||||
void draw_locations( Mat & img, const vector< Rect > & locations, const Scalar & color )
|
||||
@@ -380,7 +354,7 @@ void test_it( const Size & size )
|
||||
Scalar reference( 0, 255, 0 );
|
||||
Scalar trained( 0, 0, 255 );
|
||||
Mat img, draw;
|
||||
SVM svm;
|
||||
Ptr<SVM> svm;
|
||||
HOGDescriptor hog;
|
||||
HOGDescriptor my_hog;
|
||||
my_hog.winSize = size;
|
||||
@@ -388,7 +362,7 @@ void test_it( const Size & size )
|
||||
vector< Rect > locations;
|
||||
|
||||
// Load the trained SVM.
|
||||
svm.load( "my_people_detector.yml" );
|
||||
svm = StatModel::load<SVM>( "my_people_detector.yml" );
|
||||
// Set the trained svm to my_hog
|
||||
vector< float > hog_detector;
|
||||
get_svm_detector( svm, hog_detector );
|
||||
|
@@ -1,63 +1,35 @@
|
||||
#include "opencv2/ml/ml.hpp"
|
||||
#include "opencv2/core/core_c.h"
|
||||
#include "opencv2/core/core.hpp"
|
||||
#include "opencv2/core/utility.hpp"
|
||||
#include <stdio.h>
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
using namespace cv;
|
||||
using namespace cv::ml;
|
||||
|
||||
static void help()
|
||||
{
|
||||
printf(
|
||||
"\nThis sample demonstrates how to use different decision trees and forests including boosting and random trees:\n"
|
||||
"CvDTree dtree;\n"
|
||||
"CvBoost boost;\n"
|
||||
"CvRTrees rtrees;\n"
|
||||
"CvERTrees ertrees;\n"
|
||||
"CvGBTrees gbtrees;\n"
|
||||
"Call:\n\t./tree_engine [-r <response_column>] [-c] <csv filename>\n"
|
||||
"\nThis sample demonstrates how to use different decision trees and forests including boosting and random trees.\n"
|
||||
"Usage:\n\t./tree_engine [-r <response_column>] [-ts type_spec] <csv filename>\n"
|
||||
"where -r <response_column> specified the 0-based index of the response (0 by default)\n"
|
||||
"-c specifies that the response is categorical (it's ordered by default) and\n"
|
||||
"-ts specifies the var type spec in the form ord[n1,n2-n3,n4-n5,...]cat[m1-m2,m3,m4-m5,...]\n"
|
||||
"<csv filename> is the name of training data file in comma-separated value format\n\n");
|
||||
}
|
||||
|
||||
|
||||
static int count_classes(CvMLData& data)
|
||||
static void train_and_print_errs(Ptr<StatModel> model, const Ptr<TrainData>& data)
|
||||
{
|
||||
cv::Mat r = cv::cvarrToMat(data.get_responses());
|
||||
std::map<int, int> rmap;
|
||||
int i, n = (int)r.total();
|
||||
for( i = 0; i < n; i++ )
|
||||
bool ok = model->train(data);
|
||||
if( !ok )
|
||||
{
|
||||
float val = r.at<float>(i);
|
||||
int ival = cvRound(val);
|
||||
if( ival != val )
|
||||
return -1;
|
||||
rmap[ival] = 1;
|
||||
printf("Training failed\n");
|
||||
}
|
||||
return (int)rmap.size();
|
||||
}
|
||||
|
||||
static void print_result(float train_err, float test_err, const CvMat* _var_imp)
|
||||
{
|
||||
printf( "train error %f\n", train_err );
|
||||
printf( "test error %f\n\n", test_err );
|
||||
|
||||
if (_var_imp)
|
||||
else
|
||||
{
|
||||
cv::Mat var_imp = cv::cvarrToMat(_var_imp), sorted_idx;
|
||||
cv::sortIdx(var_imp, sorted_idx, CV_SORT_EVERY_ROW + CV_SORT_DESCENDING);
|
||||
|
||||
printf( "variable importance:\n" );
|
||||
int i, n = (int)var_imp.total();
|
||||
int type = var_imp.type();
|
||||
CV_Assert(type == CV_32F || type == CV_64F);
|
||||
|
||||
for( i = 0; i < n; i++)
|
||||
{
|
||||
int k = sorted_idx.at<int>(i);
|
||||
printf( "%d\t%f\n", k, type == CV_32F ? var_imp.at<float>(k) : var_imp.at<double>(k));
|
||||
}
|
||||
printf( "train error: %f\n", model->calcError(data, false, noArray()) );
|
||||
printf( "test error: %f\n\n", model->calcError(data, true, noArray()) );
|
||||
}
|
||||
printf("\n");
|
||||
}
|
||||
|
||||
int main(int argc, char** argv)
|
||||
@@ -69,14 +41,14 @@ int main(int argc, char** argv)
|
||||
}
|
||||
const char* filename = 0;
|
||||
int response_idx = 0;
|
||||
bool categorical_response = false;
|
||||
std::string typespec;
|
||||
|
||||
for(int i = 1; i < argc; i++)
|
||||
{
|
||||
if(strcmp(argv[i], "-r") == 0)
|
||||
sscanf(argv[++i], "%d", &response_idx);
|
||||
else if(strcmp(argv[i], "-c") == 0)
|
||||
categorical_response = true;
|
||||
else if(strcmp(argv[i], "-ts") == 0)
|
||||
typespec = argv[++i];
|
||||
else if(argv[i][0] != '-' )
|
||||
filename = argv[i];
|
||||
else
|
||||
@@ -88,52 +60,32 @@ int main(int argc, char** argv)
|
||||
}
|
||||
|
||||
printf("\nReading in %s...\n\n",filename);
|
||||
CvDTree dtree;
|
||||
CvBoost boost;
|
||||
CvRTrees rtrees;
|
||||
CvERTrees ertrees;
|
||||
CvGBTrees gbtrees;
|
||||
const double train_test_split_ratio = 0.5;
|
||||
|
||||
CvMLData data;
|
||||
Ptr<TrainData> data = TrainData::loadFromCSV(filename, 0, response_idx, response_idx+1, typespec);
|
||||
|
||||
|
||||
CvTrainTestSplit spl( 0.5f );
|
||||
|
||||
if ( data.read_csv( filename ) == 0)
|
||||
if( data.empty() )
|
||||
{
|
||||
data.set_response_idx( response_idx );
|
||||
if(categorical_response)
|
||||
data.change_var_type( response_idx, CV_VAR_CATEGORICAL );
|
||||
data.set_train_test_split( &spl );
|
||||
|
||||
printf("======DTREE=====\n");
|
||||
dtree.train( &data, CvDTreeParams( 10, 2, 0, false, 16, 0, false, false, 0 ));
|
||||
print_result( dtree.calc_error( &data, CV_TRAIN_ERROR), dtree.calc_error( &data, CV_TEST_ERROR ), dtree.get_var_importance() );
|
||||
|
||||
if( categorical_response && count_classes(data) == 2 )
|
||||
{
|
||||
printf("======BOOST=====\n");
|
||||
boost.train( &data, CvBoostParams(CvBoost::DISCRETE, 100, 0.95, 2, false, 0));
|
||||
print_result( boost.calc_error( &data, CV_TRAIN_ERROR ), boost.calc_error( &data, CV_TEST_ERROR ), 0 ); //doesn't compute importance
|
||||
}
|
||||
|
||||
printf("======RTREES=====\n");
|
||||
rtrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
|
||||
print_result( rtrees.calc_error( &data, CV_TRAIN_ERROR), rtrees.calc_error( &data, CV_TEST_ERROR ), rtrees.get_var_importance() );
|
||||
|
||||
printf("======ERTREES=====\n");
|
||||
ertrees.train( &data, CvRTParams( 18, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
|
||||
print_result( ertrees.calc_error( &data, CV_TRAIN_ERROR), ertrees.calc_error( &data, CV_TEST_ERROR ), ertrees.get_var_importance() );
|
||||
|
||||
printf("======GBTREES=====\n");
|
||||
if (categorical_response)
|
||||
gbtrees.train( &data, CvGBTreesParams(CvGBTrees::DEVIANCE_LOSS, 100, 0.1f, 0.8f, 5, false));
|
||||
else
|
||||
gbtrees.train( &data, CvGBTreesParams(CvGBTrees::SQUARED_LOSS, 100, 0.1f, 0.8f, 5, false));
|
||||
print_result( gbtrees.calc_error( &data, CV_TRAIN_ERROR), gbtrees.calc_error( &data, CV_TEST_ERROR ), 0 ); //doesn't compute importance
|
||||
printf("ERROR: File %s can not be read\n", filename);
|
||||
return 0;
|
||||
}
|
||||
else
|
||||
printf("File can not be read");
|
||||
|
||||
data->setTrainTestSplitRatio(train_test_split_ratio);
|
||||
|
||||
printf("======DTREE=====\n");
|
||||
Ptr<DTrees> dtree = DTrees::create(DTrees::Params( 10, 2, 0, false, 16, 0, false, false, Mat() ));
|
||||
train_and_print_errs(dtree, data);
|
||||
|
||||
if( (int)data->getClassLabels().total() <= 2 ) // regression or 2-class classification problem
|
||||
{
|
||||
printf("======BOOST=====\n");
|
||||
Ptr<Boost> boost = Boost::create(Boost::Params(Boost::GENTLE, 100, 0.95, 2, false, Mat()));
|
||||
train_and_print_errs(boost, data);
|
||||
}
|
||||
|
||||
printf("======RTREES=====\n");
|
||||
Ptr<RTrees> rtrees = RTrees::create(RTrees::Params(10, 2, 0, false, 16, Mat(), false, 0, TermCriteria(TermCriteria::MAX_ITER, 100, 0)));
|
||||
train_and_print_errs(rtrees, data);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@@ -4,29 +4,29 @@
|
||||
#include <opencv2/ml/ml.hpp>
|
||||
|
||||
using namespace cv;
|
||||
using namespace cv::ml;
|
||||
|
||||
int main()
|
||||
int main(int, char**)
|
||||
{
|
||||
// Data for visual representation
|
||||
int width = 512, height = 512;
|
||||
Mat image = Mat::zeros(height, width, CV_8UC3);
|
||||
|
||||
// Set up training data
|
||||
float labels[4] = {1.0, -1.0, -1.0, -1.0};
|
||||
Mat labelsMat(4, 1, CV_32FC1, labels);
|
||||
int labels[4] = {1, -1, -1, -1};
|
||||
Mat labelsMat(4, 1, CV_32SC1, labels);
|
||||
|
||||
float trainingData[4][2] = { {501, 10}, {255, 10}, {501, 255}, {10, 501} };
|
||||
Mat trainingDataMat(4, 2, CV_32FC1, trainingData);
|
||||
|
||||
// Set up SVM's parameters
|
||||
CvSVMParams params;
|
||||
params.svm_type = CvSVM::C_SVC;
|
||||
params.kernel_type = CvSVM::LINEAR;
|
||||
params.term_crit = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6);
|
||||
SVM::Params params;
|
||||
params.svmType = SVM::C_SVC;
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.termCrit = TermCriteria(TermCriteria::MAX_ITER, 100, 1e-6);
|
||||
|
||||
// Train the SVM
|
||||
CvSVM SVM;
|
||||
SVM.train(trainingDataMat, labelsMat, Mat(), Mat(), params);
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(trainingDataMat, ROW_SAMPLE, labelsMat, params);
|
||||
|
||||
Vec3b green(0,255,0), blue (255,0,0);
|
||||
// Show the decision regions given by the SVM
|
||||
@@ -34,30 +34,30 @@ int main()
|
||||
for (int j = 0; j < image.cols; ++j)
|
||||
{
|
||||
Mat sampleMat = (Mat_<float>(1,2) << j,i);
|
||||
float response = SVM.predict(sampleMat);
|
||||
float response = svm->predict(sampleMat);
|
||||
|
||||
if (response == 1)
|
||||
image.at<Vec3b>(i,j) = green;
|
||||
else if (response == -1)
|
||||
image.at<Vec3b>(i,j) = blue;
|
||||
image.at<Vec3b>(i,j) = blue;
|
||||
}
|
||||
|
||||
// Show the training data
|
||||
int thickness = -1;
|
||||
int lineType = 8;
|
||||
circle( image, Point(501, 10), 5, Scalar( 0, 0, 0), thickness, lineType);
|
||||
circle( image, Point(255, 10), 5, Scalar(255, 255, 255), thickness, lineType);
|
||||
circle( image, Point(501, 255), 5, Scalar(255, 255, 255), thickness, lineType);
|
||||
circle( image, Point( 10, 501), 5, Scalar(255, 255, 255), thickness, lineType);
|
||||
circle( image, Point(501, 10), 5, Scalar( 0, 0, 0), thickness, lineType );
|
||||
circle( image, Point(255, 10), 5, Scalar(255, 255, 255), thickness, lineType );
|
||||
circle( image, Point(501, 255), 5, Scalar(255, 255, 255), thickness, lineType );
|
||||
circle( image, Point( 10, 501), 5, Scalar(255, 255, 255), thickness, lineType );
|
||||
|
||||
// Show support vectors
|
||||
thickness = 2;
|
||||
lineType = 8;
|
||||
int c = SVM.get_support_vector_count();
|
||||
Mat sv = svm->getSupportVectors();
|
||||
|
||||
for (int i = 0; i < c; ++i)
|
||||
for (int i = 0; i < sv.rows; ++i)
|
||||
{
|
||||
const float* v = SVM.get_support_vector(i);
|
||||
const float* v = sv.ptr<float>(i);
|
||||
circle( image, Point( (int) v[0], (int) v[1]), 6, Scalar(128, 128, 128), thickness, lineType);
|
||||
}
|
||||
|
||||
|
@@ -8,6 +8,7 @@
|
||||
#define FRAC_LINEAR_SEP 0.9f // Fraction of samples which compose the linear separable part
|
||||
|
||||
using namespace cv;
|
||||
using namespace cv::ml;
|
||||
using namespace std;
|
||||
|
||||
static void help()
|
||||
@@ -30,7 +31,7 @@ int main()
|
||||
|
||||
//--------------------- 1. Set up training data randomly ---------------------------------------
|
||||
Mat trainData(2*NTRAINING_SAMPLES, 2, CV_32FC1);
|
||||
Mat labels (2*NTRAINING_SAMPLES, 1, CV_32FC1);
|
||||
Mat labels (2*NTRAINING_SAMPLES, 1, CV_32SC1);
|
||||
|
||||
RNG rng(100); // Random value generation class
|
||||
|
||||
@@ -71,16 +72,15 @@ int main()
|
||||
labels.rowRange(NTRAINING_SAMPLES, 2*NTRAINING_SAMPLES).setTo(2); // Class 2
|
||||
|
||||
//------------------------ 2. Set up the support vector machines parameters --------------------
|
||||
CvSVMParams params;
|
||||
params.svm_type = SVM::C_SVC;
|
||||
SVM::Params params;
|
||||
params.svmType = SVM::C_SVC;
|
||||
params.C = 0.1;
|
||||
params.kernel_type = SVM::LINEAR;
|
||||
params.term_crit = TermCriteria(CV_TERMCRIT_ITER, (int)1e7, 1e-6);
|
||||
params.kernelType = SVM::LINEAR;
|
||||
params.termCrit = TermCriteria(TermCriteria::MAX_ITER, (int)1e7, 1e-6);
|
||||
|
||||
//------------------------ 3. Train the svm ----------------------------------------------------
|
||||
cout << "Starting training process" << endl;
|
||||
CvSVM svm;
|
||||
svm.train(trainData, labels, Mat(), Mat(), params);
|
||||
Ptr<SVM> svm = StatModel::train<SVM>(trainData, ROW_SAMPLE, labels, params);
|
||||
cout << "Finished training process" << endl;
|
||||
|
||||
//------------------------ 4. Show the decision regions ----------------------------------------
|
||||
@@ -89,7 +89,7 @@ int main()
|
||||
for (int j = 0; j < I.cols; ++j)
|
||||
{
|
||||
Mat sampleMat = (Mat_<float>(1,2) << i, j);
|
||||
float response = svm.predict(sampleMat);
|
||||
float response = svm->predict(sampleMat);
|
||||
|
||||
if (response == 1) I.at<Vec3b>(j, i) = green;
|
||||
else if (response == 2) I.at<Vec3b>(j, i) = blue;
|
||||
@@ -117,11 +117,11 @@ int main()
|
||||
//------------------------- 6. Show support vectors --------------------------------------------
|
||||
thick = 2;
|
||||
lineType = 8;
|
||||
int x = svm.get_support_vector_count();
|
||||
Mat sv = svm->getSupportVectors();
|
||||
|
||||
for (int i = 0; i < x; ++i)
|
||||
for (int i = 0; i < sv.rows; ++i)
|
||||
{
|
||||
const float* v = svm.get_support_vector(i);
|
||||
const float* v = sv.ptr<float>(i);
|
||||
circle( I, Point( (int) v[0], (int) v[1]), 6, Scalar(128, 128, 128), thick, lineType);
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user