Set stricter warning rules for gcc

This commit is contained in:
Andrey Kamaev
2012-06-07 17:21:29 +00:00
parent 0395f7c63f
commit 49a1ba6038
241 changed files with 9054 additions and 8947 deletions

View File

@@ -1,3 +1,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "HOGfeatures.h"
#include "cascadeclassifier.h"
@@ -54,7 +57,7 @@ void CvHOGEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) con
features[featIdx].write( fs, componentIdx );
fs << "}";
}
fs << "]";
fs << "]";
}
void CvHOGEvaluator::generateFeatures()
@@ -85,11 +88,11 @@ void CvHOGEvaluator::generateFeatures()
}
}
w = 4*t;
h = 2*t;
h = 2*t;
for (x = 0; x <= winSize.width - w; x += blockStep.width)
{
for (y = 0; y <= winSize.height - h; y += blockStep.height)
{
{
features.push_back(Feature(offset, x, y, 2*t, t));
}
}
@@ -136,7 +139,7 @@ void CvHOGEvaluator::Feature::write(FileStorage &fs) const
// int cellIdx = featComponent / N_BINS;
// int binIdx = featComponent % N_BINS;
//
// fs << CC_RECTS << "[:" << rect[cellIdx].x << rect[cellIdx].y <<
// fs << CC_RECTS << "[:" << rect[cellIdx].x << rect[cellIdx].y <<
// rect[cellIdx].width << rect[cellIdx].height << binIdx << "]";
//}
@@ -144,7 +147,7 @@ void CvHOGEvaluator::Feature::write(FileStorage &fs) const
//All block is nessesary for block normalization
void CvHOGEvaluator::Feature::write(FileStorage &fs, int featComponentIdx) const
{
fs << CC_RECT << "[:" << rect[0].x << rect[0].y <<
fs << CC_RECT << "[:" << rect[0].x << rect[0].y <<
rect[0].width << rect[0].height << featComponentIdx << "]";
}
@@ -228,7 +231,7 @@ void CvHOGEvaluator::integralHistogram(const Mat &img, vector<Mat> &histogram, M
memset( histBuf, 0, histSize.width * sizeof(histBuf[0]) );
histBuf += histStep + 1;
for( y = 0; y < qangle.rows; y++ )
{
{
histBuf[-1] = 0.f;
float strSum = 0.f;
for( x = 0; x < qangle.cols; x++ )

View File

@@ -1,3 +1,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "boost.h"
#include "cascadeclassifier.h"
#include <queue>
@@ -139,7 +142,7 @@ static CvMat* cvPreprocessIndexArray( const CvMat* idx_arr, int data_arr_size, b
//----------------------------- CascadeBoostParams -------------------------------------------------
CvCascadeBoostParams::CvCascadeBoostParams() : minHitRate( 0.995F), maxFalseAlarm( 0.5F )
{
{
boost_type = CvBoost::GENTLE;
use_surrogates = use_1se_rule = truncate_pruned_tree = false;
}
@@ -157,7 +160,7 @@ CvCascadeBoostParams::CvCascadeBoostParams( int _boostType,
void CvCascadeBoostParams::write( FileStorage &fs ) const
{
String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
boost_type == CvBoost::REAL ? CC_REAL_BOOST :
boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : String();
@@ -197,7 +200,7 @@ bool CvCascadeBoostParams::read( const FileNode &node )
void CvCascadeBoostParams::printDefaults() const
{
cout << "--boostParams--" << endl;
cout << " [-bt <{" << CC_DISCRETE_BOOST << ", "
cout << " [-bt <{" << CC_DISCRETE_BOOST << ", "
<< CC_REAL_BOOST << ", "
<< CC_LOGIT_BOOST ", "
<< CC_GENTLE_BOOST << "(default)}>]" << endl;
@@ -210,7 +213,7 @@ void CvCascadeBoostParams::printDefaults() const
void CvCascadeBoostParams::printAttrs() const
{
String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
boost_type == CvBoost::REAL ? CC_REAL_BOOST :
boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : String();
@@ -259,7 +262,7 @@ bool CvCascadeBoostParams::scanAttr( const String prmName, const String val)
else
res = false;
return res;
return res;
}
CvDTreeNode* CvCascadeBoostTrainData::subsample_data( const CvMat* _subsample_idx )
@@ -440,7 +443,7 @@ CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _fea
set_params( _params );
max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() );
var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
if ( featureEvaluator->getMaxCatCount() > 0 )
if ( featureEvaluator->getMaxCatCount() > 0 )
{
numPrecalcIdx = 0;
cat_var_count = var_count;
@@ -448,7 +451,7 @@ CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _fea
for( int vi = 0; vi < var_count; vi++ )
{
var_type->data.i[vi] = vi;
}
}
}
else
{
@@ -457,8 +460,8 @@ CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _fea
for( int vi = 1; vi <= var_count; vi++ )
{
var_type->data.i[vi-1] = -vi;
}
}
}
}
var_type->data.i[var_count] = cat_var_count;
var_type->data.i[var_count+1] = cat_var_count+1;
@@ -467,7 +470,7 @@ CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _fea
treeBlockSize = MAX(treeBlockSize + BlockSizeDelta, MinBlockSize);
tree_storage = cvCreateMemStorage( treeBlockSize );
node_heap = cvCreateSet( 0, sizeof(node_heap[0]), sizeof(CvDTreeNode), tree_storage );
split_heap = cvCreateSet( 0, sizeof(split_heap[0]), maxSplitSize, tree_storage );
split_heap = cvCreateSet( 0, sizeof(split_heap[0]), maxSplitSize, tree_storage );
}
CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
@@ -477,15 +480,15 @@ CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _fea
{
setData( _featureEvaluator, _numSamples, _precalcValBufSize, _precalcIdxBufSize, _params );
}
void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluator,
int _numSamples,
int _precalcValBufSize, int _precalcIdxBufSize,
const CvDTreeParams& _params )
{
const CvDTreeParams& _params )
{
int* idst = 0;
unsigned short* udst = 0;
clear();
shared = true;
have_labels = true;
@@ -503,16 +506,16 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
_resp = featureEvaluator->getCls();
responses = &_resp;
// TODO: check responses: elements must be 0 or 1
if( _precalcValBufSize < 0 || _precalcIdxBufSize < 0)
if( _precalcValBufSize < 0 || _precalcIdxBufSize < 0)
CV_Error( CV_StsOutOfRange, "_numPrecalcVal and _numPrecalcIdx must be positive or 0" );
var_count = var_all = featureEvaluator->getNumFeatures() * featureEvaluator->getFeatureSize();
var_count = var_all = featureEvaluator->getNumFeatures() * featureEvaluator->getFeatureSize();
sample_count = _numSamples;
is_buf_16u = false;
if (sample_count < 65536)
is_buf_16u = true;
is_buf_16u = false;
if (sample_count < 65536)
is_buf_16u = true;
numPrecalcVal = min( cvRound((double)_precalcValBufSize*1048576. / (sizeof(float)*sample_count)), var_count );
numPrecalcIdx = min( cvRound((double)_precalcIdxBufSize*1048576. /
@@ -522,8 +525,8 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
valCache.create( numPrecalcVal, sample_count, CV_32FC1 );
var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
if ( featureEvaluator->getMaxCatCount() > 0 )
if ( featureEvaluator->getMaxCatCount() > 0 )
{
numPrecalcIdx = 0;
cat_var_count = var_count;
@@ -531,7 +534,7 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
for( int vi = 0; vi < var_count; vi++ )
{
var_type->data.i[vi] = vi;
}
}
}
else
{
@@ -540,14 +543,14 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
for( int vi = 1; vi <= var_count; vi++ )
{
var_type->data.i[vi-1] = -vi;
}
}
}
var_type->data.i[var_count] = cat_var_count;
var_type->data.i[var_count+1] = cat_var_count+1;
work_var_count = ( cat_var_count ? 0 : numPrecalcIdx ) + 1/*cv_lables*/;
buf_size = (work_var_count + 1) * sample_count/*sample_indices*/;
buf_count = 2;
if ( is_buf_16u )
buf = cvCreateMat( buf_count, buf_size, CV_16UC1 );
else
@@ -556,7 +559,7 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
cat_count = cvCreateMat( 1, cat_var_count + 1, CV_32SC1 );
// precalculate valCache and set indices in buf
precalculate();
precalculate();
// now calculate the maximum size of split,
// create memory storage that will keep nodes and splits of the decision tree
@@ -574,7 +577,7 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
tempBlockSize = MAX( tempBlockSize + BlockSizeDelta, MinBlockSize );
temp_storage = cvCreateMemStorage( tempBlockSize );
nv_heap = cvCreateSet( 0, sizeof(*nv_heap), nvSize, temp_storage );
data_root = new_node( 0, sample_count, 0, 0 );
// set sample labels
@@ -617,7 +620,7 @@ void CvCascadeBoostTrainData::free_train_data()
const int* CvCascadeBoostTrainData::get_class_labels( CvDTreeNode* n, int* labelsBuf)
{
int nodeSampleCount = n->sample_count;
int nodeSampleCount = n->sample_count;
int rStep = CV_IS_MAT_CONT( responses->type ) ? 1 : responses->step / CV_ELEM_SIZE( responses->type );
int* sampleIndicesBuf = labelsBuf; //
@@ -626,7 +629,7 @@ const int* CvCascadeBoostTrainData::get_class_labels( CvDTreeNode* n, int* label
{
int sidx = sampleIndices[si];
labelsBuf[si] = (int)responses->data.fl[sidx*rStep];
}
}
return labelsBuf;
}
@@ -643,9 +646,9 @@ const int* CvCascadeBoostTrainData::get_cv_labels( CvDTreeNode* n, int* labels_b
void CvCascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* ordValuesBuf, int* sortedIndicesBuf,
const float** ordValues, const int** sortedIndices, int* sampleIndicesBuf )
{
int nodeSampleCount = n->sample_count;
int nodeSampleCount = n->sample_count;
const int* sampleIndices = get_sample_indices(n, sampleIndicesBuf);
if ( vi < numPrecalcIdx )
{
if( !is_buf_16u )
@@ -659,7 +662,7 @@ void CvCascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* o
*sortedIndices = sortedIndicesBuf;
}
if( vi < numPrecalcVal )
{
for( int i = 0; i < nodeSampleCount; i++ )
@@ -705,10 +708,10 @@ void CvCascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* o
ordValuesBuf[i] = (&sampleValues[0])[sortedIndicesBuf[i]];
*sortedIndices = sortedIndicesBuf;
}
*ordValues = ordValuesBuf;
}
const int* CvCascadeBoostTrainData::get_cat_var_data( CvDTreeNode* n, int vi, int* catValuesBuf )
{
int nodeSampleCount = n->sample_count;
@@ -739,8 +742,8 @@ const int* CvCascadeBoostTrainData::get_cat_var_data( CvDTreeNode* n, int vi, in
float CvCascadeBoostTrainData::getVarValue( int vi, int si )
{
if ( vi < numPrecalcVal && !valCache.empty() )
return valCache.at<float>( vi, si );
return (*featureEvaluator)( vi, si );
return valCache.at<float>( vi, si );
return (*featureEvaluator)( vi, si );
}
@@ -858,7 +861,7 @@ CvDTreeNode* CvCascadeBoostTree::predict( int sampleIdx ) const
CvDTreeNode* node = root;
if( !node )
CV_Error( CV_StsError, "The tree has not been trained yet" );
if ( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount() == 0 ) // ordered
{
while( node->left )
@@ -946,7 +949,7 @@ void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
int maxCatCount = ((CvCascadeBoostTrainData*)_data)->featureEvaluator->getMaxCatCount();
int subsetN = (maxCatCount + 31)/32;
int step = 3 + ( maxCatCount>0 ? subsetN : 1 );
queue<CvDTreeNode*> internalNodesQueue;
FileNodeIterator internalNodesIt, leafValsuesIt;
CvDTreeNode* prntNode, *cldNode;
@@ -986,11 +989,11 @@ void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
{
prntNode->right = cldNode = data->new_node( 0, 0, 0, 0 );
*leafValsuesIt >> cldNode->value; leafValsuesIt--;
cldNode->parent = prntNode;
cldNode->parent = prntNode;
}
else
{
prntNode->right = internalNodesQueue.front();
prntNode->right = internalNodesQueue.front();
prntNode->right->parent = prntNode;
internalNodesQueue.pop();
}
@@ -999,7 +1002,7 @@ void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
{
prntNode->left = cldNode = data->new_node( 0, 0, 0, 0 );
*leafValsuesIt >> cldNode->value; leafValsuesIt--;
cldNode->parent = prntNode;
cldNode->parent = prntNode;
}
else
{
@@ -1089,7 +1092,7 @@ void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
}
}
CV_Assert( n1 == n );
}
}
else
{
int *ldst, *rdst;
@@ -1116,7 +1119,7 @@ void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
}
}
CV_Assert( n1 == n );
}
}
}
// split cv_labels using newIdx relocation table
@@ -1171,7 +1174,7 @@ void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
}
}
}
// split sample indices
int *sampleIdx_src_buf = tempBuf + n;
const int* sampleIdx_src = data->get_sample_indices(node, sampleIdx_src_buf);
@@ -1181,9 +1184,9 @@ void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
if (data->is_buf_16u)
{
unsigned short* ldst = (unsigned short*)(buf->data.s + left->buf_idx*buf->cols +
unsigned short* ldst = (unsigned short*)(buf->data.s + left->buf_idx*buf->cols +
workVarCount*scount + left->offset);
unsigned short* rdst = (unsigned short*)(buf->data.s + right->buf_idx*buf->cols +
unsigned short* rdst = (unsigned short*)(buf->data.s + right->buf_idx*buf->cols +
workVarCount*scount + right->offset);
for (int i = 0; i < n; i++)
{
@@ -1202,9 +1205,9 @@ void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
}
else
{
int* ldst = buf->data.i + left->buf_idx*buf->cols +
int* ldst = buf->data.i + left->buf_idx*buf->cols +
workVarCount*scount + left->offset;
int* rdst = buf->data.i + right->buf_idx*buf->cols +
int* rdst = buf->data.i + right->buf_idx*buf->cols +
workVarCount*scount + right->offset;
for (int i = 0; i < n; i++)
{
@@ -1229,10 +1232,10 @@ void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
}
// deallocate the parent node data that is not needed anymore
data->free_node_data(node);
data->free_node_data(node);
}
void auxMarkFeaturesInMap( const CvDTreeNode* node, Mat& featureMap)
static void auxMarkFeaturesInMap( const CvDTreeNode* node, Mat& featureMap)
{
if ( node && node->split )
{
@@ -1265,7 +1268,7 @@ bool CvCascadeBoost::train( const CvFeatureEvaluator* _featureEvaluator,
set_params( _params );
if ( (_params.boost_type == LOGIT) || (_params.boost_type == GENTLE) )
data->do_responses_copy();
update_weights( 0 );
cout << "+----+---------+---------+" << endl;
@@ -1316,7 +1319,7 @@ bool CvCascadeBoost::set_params( const CvBoostParams& _params )
minHitRate = ((CvCascadeBoostParams&)_params).minHitRate;
maxFalseAlarm = ((CvCascadeBoostParams&)_params).maxFalseAlarm;
return ( ( minHitRate > 0 ) && ( minHitRate < 1) &&
( maxFalseAlarm > 0 ) && ( maxFalseAlarm < 1) &&
( maxFalseAlarm > 0 ) && ( maxFalseAlarm < 1) &&
CvBoost::set_params( _params ));
}
@@ -1364,7 +1367,7 @@ void CvCascadeBoost::update_weights( CvBoostTree* tree )
if (data->is_buf_16u)
{
unsigned short* labels = (unsigned short*)(buf->data.s + data->data_root->buf_idx*buf->cols +
unsigned short* labels = (unsigned short*)(buf->data.s + data->data_root->buf_idx*buf->cols +
data->data_root->offset + (data->work_var_count-1)*data->sample_count);
for( int i = 0; i < n; i++ )
{
@@ -1382,7 +1385,7 @@ void CvCascadeBoost::update_weights( CvBoostTree* tree )
}
else
{
int* labels = buf->data.i + data->data_root->buf_idx*buf->cols +
int* labels = buf->data.i + data->data_root->buf_idx*buf->cols +
data->data_root->offset + (data->work_var_count-1)*data->sample_count;
for( int i = 0; i < n; i++ )
@@ -1425,7 +1428,7 @@ void CvCascadeBoost::update_weights( CvBoostTree* tree )
{
// invert the subsample mask
cvXorS( subsample_mask, cvScalar(1.), subsample_mask );
// run tree through all the non-processed samples
for( int i = 0; i < n; i++ )
if( subsample_mask->data.ptr[i] )
@@ -1565,7 +1568,7 @@ bool CvCascadeBoost::isErrDesired()
int sCount = data->sample_count,
numPos = 0, numNeg = 0, numFalse = 0, numPosTrue = 0;
vector<float> eval(sCount);
for( int i = 0; i < sCount; i++ )
if( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 1.0F )
eval[numPos++] = predict( i, true );
@@ -1625,7 +1628,7 @@ bool CvCascadeBoost::read( const FileNode &node,
set_params( _params );
node[CC_STAGE_THRESHOLD] >> threshold;
FileNode rnode = node[CC_WEAK_CLASSIFIERS];
FileNode rnode = node[CC_WEAK_CLASSIFIERS];
storage = cvCreateMemStorage();
weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );

View File

@@ -1,3 +1,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "cascadeclassifier.h"
#include <queue>
@@ -6,14 +9,14 @@ using namespace std;
static const char* stageTypes[] = { CC_BOOST };
static const char* featureTypes[] = { CC_HAAR, CC_LBP, CC_HOG };
CvCascadeParams::CvCascadeParams() : stageType( defaultStageType ),
CvCascadeParams::CvCascadeParams() : stageType( defaultStageType ),
featureType( defaultFeatureType ), winSize( cvSize(24, 24) )
{
name = CC_CASCADE_PARAMS;
{
name = CC_CASCADE_PARAMS;
}
CvCascadeParams::CvCascadeParams( int _stageType, int _featureType ) : stageType( _stageType ),
featureType( _featureType ), winSize( cvSize(24, 24) )
{
{
name = CC_CASCADE_PARAMS;
}
@@ -25,7 +28,7 @@ void CvCascadeParams::write( FileStorage &fs ) const
CV_Assert( !stageTypeStr.empty() );
fs << CC_STAGE_TYPE << stageTypeStr;
String featureTypeStr = featureType == CvFeatureParams::HAAR ? CC_HAAR :
featureType == CvFeatureParams::LBP ? CC_LBP :
featureType == CvFeatureParams::LBP ? CC_LBP :
featureType == CvFeatureParams::HOG ? CC_HOG :
0;
CV_Assert( !stageTypeStr.empty() );
@@ -51,7 +54,7 @@ bool CvCascadeParams::read( const FileNode &node )
return false;
rnode >> featureTypeStr;
featureType = !featureTypeStr.compare( CC_HAAR ) ? CvFeatureParams::HAAR :
!featureTypeStr.compare( CC_LBP ) ? CvFeatureParams::LBP :
!featureTypeStr.compare( CC_LBP ) ? CvFeatureParams::LBP :
!featureTypeStr.compare( CC_HOG ) ? CvFeatureParams::HOG :
-1;
if (featureType == -1)
@@ -125,15 +128,15 @@ bool CvCascadeParams::scanAttr( const String prmName, const String val )
bool CvCascadeClassifier::train( const String _cascadeDirName,
const String _posFilename,
const String _negFilename,
int _numPos, int _numNeg,
const String _negFilename,
int _numPos, int _numNeg,
int _precalcValBufSize, int _precalcIdxBufSize,
int _numStages,
const CvCascadeParams& _cascadeParams,
const CvFeatureParams& _featureParams,
const CvCascadeBoostParams& _stageParams,
bool baseFormatSave )
{
{
if( _cascadeDirName.empty() || _posFilename.empty() || _negFilename.empty() )
CV_Error( CV_StsBadArg, "_cascadeDirName or _bgfileName or _vecFileName is NULL" );
@@ -181,17 +184,17 @@ bool CvCascadeClassifier::train( const String _cascadeDirName,
cout << endl << "Stages 0-" << startNumStages-1 << " are loaded" << endl;
else if ( startNumStages == 1)
cout << endl << "Stage 0 is loaded" << endl;
double requiredLeafFARate = pow( (double) stageParams->maxFalseAlarm, (double) numStages ) /
(double)stageParams->max_depth;
double tempLeafFARate;
for( int i = startNumStages; i < numStages; i++ )
{
cout << endl << "===== TRAINING " << i << "-stage =====" << endl;
cout << "<BEGIN" << endl;
if ( !updateTrainingSet( tempLeafFARate ) )
if ( !updateTrainingSet( tempLeafFARate ) )
{
cout << "Train dataset for temp stage can not be filled. "
"Branch training terminated." << endl;
@@ -211,10 +214,10 @@ bool CvCascadeClassifier::train( const String _cascadeDirName,
stageClassifiers.push_back( tempStage );
cout << "END>" << endl;
// save params
String filename;
if ( i == 0)
if ( i == 0)
{
filename = dirName + CC_PARAMS_FILENAME;
FileStorage fs( filename, FileStorage::WRITE);
@@ -289,7 +292,7 @@ int CvCascadeClassifier::fillPassedSamples( int first, int count, bool isPositiv
{
bool isGetImg = isPositive ? imgReader.getPos( img ) :
imgReader.getNeg( img );
if( !isGetImg )
if( !isGetImg )
return getcount;
consumed++;
@@ -313,14 +316,14 @@ void CvCascadeClassifier::writeParams( FileStorage &fs ) const
void CvCascadeClassifier::writeFeatures( FileStorage &fs, const Mat& featureMap ) const
{
((CvFeatureEvaluator*)((Ptr<CvFeatureEvaluator>)featureEvaluator))->writeFeatures( fs, featureMap );
((CvFeatureEvaluator*)((Ptr<CvFeatureEvaluator>)featureEvaluator))->writeFeatures( fs, featureMap );
}
void CvCascadeClassifier::writeStages( FileStorage &fs, const Mat& featureMap ) const
{
char cmnt[30];
int i = 0;
fs << CC_STAGES << "[";
fs << CC_STAGES << "[";
for( vector< Ptr<CvCascadeBoost> >::const_iterator it = stageClassifiers.begin();
it != stageClassifiers.end(); it++, i++ )
{
@@ -337,17 +340,17 @@ bool CvCascadeClassifier::readParams( const FileNode &node )
{
if ( !node.isMap() || !cascadeParams.read( node ) )
return false;
stageParams = new CvCascadeBoostParams;
FileNode rnode = node[CC_STAGE_PARAMS];
if ( !stageParams->read( rnode ) )
return false;
featureParams = CvFeatureParams::create(cascadeParams.featureType);
rnode = node[CC_FEATURE_PARAMS];
if ( !featureParams->read( rnode ) )
return false;
return true;
return true;
}
bool CvCascadeClassifier::readStages( const FileNode &node)
@@ -396,7 +399,7 @@ void CvCascadeClassifier::save( const String filename, bool baseFormat )
fs << FileStorage::getDefaultObjectName(filename) << "{";
if ( !baseFormat )
{
Mat featureMap;
Mat featureMap;
getUsedFeaturesIdxMap( featureMap );
writeParams( fs );
fs << CC_STAGE_NUM << (int)stageClassifiers.size();
@@ -409,7 +412,7 @@ void CvCascadeClassifier::save( const String filename, bool baseFormat )
CvSeq* weak;
if ( cascadeParams.featureType != CvFeatureParams::HAAR )
CV_Error( CV_StsBadFunc, "old file format is used for Haar-like features only");
fs << ICV_HAAR_SIZE_NAME << "[:" << cascadeParams.winSize.width <<
fs << ICV_HAAR_SIZE_NAME << "[:" << cascadeParams.winSize.width <<
cascadeParams.winSize.height << "]";
fs << ICV_HAAR_STAGES_NAME << "[";
for( size_t si = 0; si < stageClassifiers.size(); si++ )
@@ -424,16 +427,16 @@ void CvCascadeClassifier::save( const String filename, bool baseFormat )
int inner_node_idx = -1, total_inner_node_idx = -1;
queue<const CvDTreeNode*> inner_nodes_queue;
CvCascadeBoostTree* tree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
fs << "[";
/*sprintf( buf, "tree %d", wi );
CV_CALL( cvWriteComment( fs, buf, 1 ) );*/
const CvDTreeNode* tempNode;
inner_nodes_queue.push( tree->get_root() );
total_inner_node_idx++;
while (!inner_nodes_queue.empty())
{
tempNode = inner_nodes_queue.front();
@@ -498,7 +501,7 @@ bool CvCascadeClassifier::load( const String cascadeDirName )
node = fs.getFirstTopLevelNode();
if ( !fs.isOpened() )
break;
CvCascadeBoost *tempStage = new CvCascadeBoost;
CvCascadeBoost *tempStage = new CvCascadeBoost;
if ( !tempStage->read( node, (CvFeatureEvaluator*)featureEvaluator, *((CvCascadeBoostParams*)stageParams )) )
{
@@ -516,11 +519,11 @@ void CvCascadeClassifier::getUsedFeaturesIdxMap( Mat& featureMap )
int varCount = featureEvaluator->getNumFeatures() * featureEvaluator->getFeatureSize();
featureMap.create( 1, varCount, CV_32SC1 );
featureMap.setTo(Scalar(-1));
for( vector< Ptr<CvCascadeBoost> >::const_iterator it = stageClassifiers.begin();
it != stageClassifiers.end(); it++ )
((CvCascadeBoost*)((Ptr<CvCascadeBoost>)(*it)))->markUsedFeaturesInMap( featureMap );
for( int fi = 0, idx = 0; fi < varCount; fi++ )
if ( featureMap.at<int>(0, fi) >= 0 )
featureMap.ptr<int>(0)[fi] = idx++;

View File

@@ -1,3 +1,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "traincascade_features.h"
#include "cascadeclassifier.h"
@@ -28,7 +31,7 @@ bool CvParams::scanAttr( const String prmName, const String val ) { return false
CvFeatureParams::CvFeatureParams() : maxCatCount( 0 ), featSize( 1 )
{
name = CC_FEATURE_PARAMS;
name = CC_FEATURE_PARAMS;
}
void CvFeatureParams::init( const CvFeatureParams& fp )
@@ -55,7 +58,7 @@ bool CvFeatureParams::read( const FileNode &node )
Ptr<CvFeatureParams> CvFeatureParams::create( int featureType )
{
return featureType == HAAR ? Ptr<CvFeatureParams>(new CvHaarFeatureParams) :
featureType == LBP ? Ptr<CvFeatureParams>(new CvLBPFeatureParams) :
featureType == LBP ? Ptr<CvFeatureParams>(new CvLBPFeatureParams) :
featureType == HOG ? Ptr<CvFeatureParams>(new CvHOGFeatureParams) :
Ptr<CvFeatureParams>();
}
@@ -84,7 +87,7 @@ void CvFeatureEvaluator::setImage(const Mat &img, uchar clsLabel, int idx)
Ptr<CvFeatureEvaluator> CvFeatureEvaluator::create(int type)
{
return type == CvFeatureParams::HAAR ? Ptr<CvFeatureEvaluator>(new CvHaarEvaluator) :
type == CvFeatureParams::LBP ? Ptr<CvFeatureEvaluator>(new CvLBPEvaluator) :
type == CvFeatureParams::LBP ? Ptr<CvFeatureEvaluator>(new CvLBPEvaluator) :
type == CvFeatureParams::HOG ? Ptr<CvFeatureEvaluator>(new CvHOGEvaluator) :
Ptr<CvFeatureEvaluator>();
}

View File

@@ -1,16 +1,19 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "haarfeatures.h"
#include "cascadeclassifier.h"
using namespace std;
CvHaarFeatureParams::CvHaarFeatureParams() : mode(BASIC)
{
{
name = HFP_NAME;
}
CvHaarFeatureParams::CvHaarFeatureParams( int _mode ) : mode( _mode )
{
name = HFP_NAME;
name = HFP_NAME;
}
void CvHaarFeatureParams::init( const CvFeatureParams& fp )
@@ -22,7 +25,7 @@ void CvHaarFeatureParams::init( const CvFeatureParams& fp )
void CvHaarFeatureParams::write( FileStorage &fs ) const
{
CvFeatureParams::write( fs );
String modeStr = mode == BASIC ? CC_MODE_BASIC :
String modeStr = mode == BASIC ? CC_MODE_BASIC :
mode == CORE ? CC_MODE_CORE :
mode == ALL ? CC_MODE_ALL : String();
CV_Assert( !modeStr.empty() );
@@ -55,7 +58,7 @@ void CvHaarFeatureParams::printDefaults() const
void CvHaarFeatureParams::printAttrs() const
{
CvFeatureParams::printAttrs();
String mode_str = mode == BASIC ? CC_MODE_BASIC :
String mode_str = mode == BASIC ? CC_MODE_BASIC :
mode == CORE ? CC_MODE_CORE :
mode == ALL ? CC_MODE_ALL : 0;
cout << "mode: " << mode_str << endl;
@@ -156,7 +159,7 @@ void CvHaarEvaluator::generateFeatures()
if( mode != CvHaarFeatureParams::BASIC )
{
// haar_x4
if ( (x+dx*4 <= winSize.width) && (y+dy <= winSize.height) )
if ( (x+dx*4 <= winSize.width) && (y+dy <= winSize.height) )
{
features.push_back( Feature( offset, false,
x, y, dx*4, dy, -1,
@@ -171,61 +174,61 @@ void CvHaarEvaluator::generateFeatures()
}
}
// x2_y2
if ( (x+dx*2 <= winSize.width) && (y+dy*2 <= winSize.height) )
if ( (x+dx*2 <= winSize.width) && (y+dy*2 <= winSize.height) )
{
features.push_back( Feature( offset, false,
x, y, dx*2, dy*2, -1,
x, y, dx, dy, +2,
x+dx, y+dy, dx, dy, +2 ) );
}
if (mode != CvHaarFeatureParams::BASIC)
{
if ( (x+dx*3 <= winSize.width) && (y+dy*3 <= winSize.height) )
if (mode != CvHaarFeatureParams::BASIC)
{
if ( (x+dx*3 <= winSize.width) && (y+dy*3 <= winSize.height) )
{
features.push_back( Feature( offset, false,
x , y , dx*3, dy*3, -1,
x+dx, y+dy, dx , dy , +9) );
}
}
if (mode == CvHaarFeatureParams::ALL)
{
if (mode == CvHaarFeatureParams::ALL)
{
// tilted haar_x2
if ( (x+2*dx <= winSize.width) && (y+2*dx+dy <= winSize.height) && (x-dy>= 0) )
if ( (x+2*dx <= winSize.width) && (y+2*dx+dy <= winSize.height) && (x-dy>= 0) )
{
features.push_back( Feature( offset, true,
x, y, dx*2, dy, -1,
x, y, dx, dy, +2 ) );
}
// tilted haar_y2
if ( (x+dx <= winSize.width) && (y+dx+2*dy <= winSize.height) && (x-2*dy>= 0) )
if ( (x+dx <= winSize.width) && (y+dx+2*dy <= winSize.height) && (x-2*dy>= 0) )
{
features.push_back( Feature( offset, true,
x, y, dx, 2*dy, -1,
x, y, dx, dy, +2 ) );
}
// tilted haar_x3
if ( (x+3*dx <= winSize.width) && (y+3*dx+dy <= winSize.height) && (x-dy>= 0) )
if ( (x+3*dx <= winSize.width) && (y+3*dx+dy <= winSize.height) && (x-dy>= 0) )
{
features.push_back( Feature( offset, true,
x, y, dx*3, dy, -1,
x+dx, y+dx, dx, dy, +3 ) );
}
// tilted haar_y3
if ( (x+dx <= winSize.width) && (y+dx+3*dy <= winSize.height) && (x-3*dy>= 0) )
if ( (x+dx <= winSize.width) && (y+dx+3*dy <= winSize.height) && (x-3*dy>= 0) )
{
features.push_back( Feature( offset, true,
x, y, dx, 3*dy, -1,
x-dy, y+dy, dx, dy, +3 ) );
}
// tilted haar_x4
if ( (x+4*dx <= winSize.width) && (y+4*dx+dy <= winSize.height) && (x-dy>= 0) )
if ( (x+4*dx <= winSize.width) && (y+4*dx+dy <= winSize.height) && (x-dy>= 0) )
{
features.push_back( Feature( offset, true,
x, y, dx*4, dy, -1,
x+dx, y+dx, dx*2, dy, +2 ) );
}
// tilted haar_y4
if ( (x+dx <= winSize.width) && (y+dx+4*dy <= winSize.height) && (x-4*dy>= 0) )
if ( (x+dx <= winSize.width) && (y+dx+4*dy <= winSize.height) && (x-4*dy>= 0) )
{
features.push_back( Feature( offset, true,
x, y, dx, 4*dy, -1,
@@ -296,7 +299,7 @@ void CvHaarEvaluator::Feature::write( FileStorage &fs ) const
fs << CC_RECTS << "[";
for( int ri = 0; ri < CV_HAAR_FEATURE_MAX && rect[ri].r.width != 0; ++ri )
{
fs << "[:" << rect[ri].r.x << rect[ri].r.y <<
fs << "[:" << rect[ri].r.x << rect[ri].r.y <<
rect[ri].r.width << rect[ri].r.height << rect[ri].weight << "]";
}
fs << "]" << CC_TILTED << tilted;

View File

@@ -1,3 +1,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "cv.h"
#include "imagestorage.h"
#include <stdio.h>
@@ -55,7 +58,7 @@ bool CvCascadeImageReader::NegReader::nextImg()
for( size_t i = 0; i < count; i++ )
{
src = imread( imgFilenames[last++], 0 );
if( src.empty() )
if( src.empty() )
continue;
round += last / count;
round = round % (winSize.width * winSize.height);
@@ -63,7 +66,7 @@ bool CvCascadeImageReader::NegReader::nextImg()
_offset.x = min( (int)round % winSize.width, src.cols - winSize.width );
_offset.y = min( (int)round / winSize.width, src.rows - winSize.height );
if( !src.empty() && src.type() == CV_8UC1
if( !src.empty() && src.type() == CV_8UC1
&& offset.x >= 0 && offset.y >= 0 )
break;
}
@@ -73,7 +76,7 @@ bool CvCascadeImageReader::NegReader::nextImg()
point = offset = _offset;
scale = max( ((float)winSize.width + point.x) / ((float)src.cols),
((float)winSize.height + point.y) / ((float)src.rows) );
Size sz( (int)(scale*src.cols + 0.5F), (int)(scale*src.rows + 0.5F) );
resize( src, img, sz );
return true;
@@ -87,7 +90,7 @@ bool CvCascadeImageReader::NegReader::get( Mat& _img )
CV_Assert( _img.rows == winSize.height );
if( img.empty() )
if ( !nextImg() )
if ( !nextImg() )
return false;
Mat mat( winSize.height, winSize.width, CV_8UC1,
@@ -109,7 +112,7 @@ bool CvCascadeImageReader::NegReader::get( Mat& _img )
resize( src, img, Size( (int)(scale*src.cols), (int)(scale*src.rows) ) );
else
{
if ( !nextImg() )
if ( !nextImg() )
return false;
}
}
@@ -131,7 +134,7 @@ bool CvCascadeImageReader::PosReader::create( const String _filename )
if( !file )
return false;
short tmp = 0;
short tmp = 0;
if( fread( &count, sizeof( count ), 1, file ) != 1 ||
fread( &vecSize, sizeof( vecSize ), 1, file ) != 1 ||
fread( &tmp, sizeof( tmp ), 1, file ) != 1 ||

View File

@@ -1,3 +1,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "lbpfeatures.h"
#include "cascadeclassifier.h"

View File

@@ -1,3 +1,6 @@
#include "opencv2/core/core.hpp"
#include "opencv2/core/internal.hpp"
#include "cv.h"
#include "cascadeclassifier.h"
@@ -13,11 +16,11 @@ int main( int argc, char* argv[] )
int precalcValBufSize = 256,
precalcIdxBufSize = 256;
bool baseFormatSave = false;
CvCascadeParams cascadeParams;
CvCascadeBoostParams stageParams;
Ptr<CvFeatureParams> featureParams[] = { Ptr<CvFeatureParams>(new CvHaarFeatureParams),
Ptr<CvFeatureParams>(new CvLBPFeatureParams),
Ptr<CvFeatureParams>(new CvLBPFeatureParams),
Ptr<CvFeatureParams>(new CvHOGFeatureParams)
};
int fc = sizeof(featureParams)/sizeof(featureParams[0]);
@@ -85,7 +88,7 @@ int main( int argc, char* argv[] )
{
for( int fi = 0; fi < fc; fi++ )
{
set = featureParams[fi]->scanAttr(argv[i], argv[i+1]);
set = featureParams[fi]->scanAttr(argv[i], argv[i+1]);
if ( !set )
{
i++;
@@ -94,11 +97,11 @@ int main( int argc, char* argv[] )
}
}
}
classifier.train( cascadeDirName,
vecName,
bgName,
numPos, numNeg,
bgName,
numPos, numNeg,
precalcValBufSize, precalcIdxBufSize,
numStages,
cascadeParams,