CascadeClassifier refactored. Most of the members and methods are private now.
This commit is contained in:
parent
e7cf541f5f
commit
e7f491ae1a
@ -278,6 +278,7 @@ class CV_EXPORTS FeatureEvaluator
|
|||||||
public:
|
public:
|
||||||
enum { HAAR = 0, LBP = 1 };
|
enum { HAAR = 0, LBP = 1 };
|
||||||
virtual ~FeatureEvaluator();
|
virtual ~FeatureEvaluator();
|
||||||
|
|
||||||
virtual bool read(const FileNode& node);
|
virtual bool read(const FileNode& node);
|
||||||
virtual Ptr<FeatureEvaluator> clone() const;
|
virtual Ptr<FeatureEvaluator> clone() const;
|
||||||
virtual int getFeatureType() const;
|
virtual int getFeatureType() const;
|
||||||
@ -296,6 +297,55 @@ template<> CV_EXPORTS void Ptr<CvHaarClassifierCascade>::delete_obj();
|
|||||||
class CV_EXPORTS_W CascadeClassifier
|
class CV_EXPORTS_W CascadeClassifier
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
CV_WRAP CascadeClassifier();
|
||||||
|
CV_WRAP CascadeClassifier( const string& filename );
|
||||||
|
virtual ~CascadeClassifier();
|
||||||
|
|
||||||
|
CV_WRAP virtual bool empty() const;
|
||||||
|
CV_WRAP bool load( const string& filename );
|
||||||
|
bool read( const FileNode& node );
|
||||||
|
CV_WRAP void detectMultiScale( const Mat& image,
|
||||||
|
CV_OUT vector<Rect>& objects,
|
||||||
|
double scaleFactor=1.1,
|
||||||
|
int minNeighbors=3, int flags=0,
|
||||||
|
Size minSize=Size(),
|
||||||
|
Size maxSize=Size() );
|
||||||
|
|
||||||
|
|
||||||
|
bool isOldFormatCascade() const;
|
||||||
|
virtual Size getOriginalWindowSize() const;
|
||||||
|
int getFeatureType() const;
|
||||||
|
bool setImage(const Mat&);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
virtual bool detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
|
||||||
|
int stripSize, int yStep, double factor, vector<Rect>& candidates );
|
||||||
|
|
||||||
|
private:
|
||||||
|
enum { BOOST = 0 };
|
||||||
|
enum { DO_CANNY_PRUNING = 1, SCALE_IMAGE = 2,
|
||||||
|
FIND_BIGGEST_OBJECT = 4, DO_ROUGH_SEARCH = 8 };
|
||||||
|
|
||||||
|
friend class CascadeClassifierInvoker;
|
||||||
|
|
||||||
|
template<class FEval>
|
||||||
|
friend int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
|
||||||
|
|
||||||
|
template<class FEval>
|
||||||
|
friend int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
|
||||||
|
|
||||||
|
template<class FEval>
|
||||||
|
friend int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
|
||||||
|
|
||||||
|
template<class FEval>
|
||||||
|
friend int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
|
||||||
|
|
||||||
|
bool setImage( Ptr<FeatureEvaluator>&, const Mat& );
|
||||||
|
int runAt( Ptr<FeatureEvaluator>&, Point );
|
||||||
|
|
||||||
|
class Data
|
||||||
|
{
|
||||||
|
public:
|
||||||
struct CV_EXPORTS DTreeNode
|
struct CV_EXPORTS DTreeNode
|
||||||
{
|
{
|
||||||
int featureIdx;
|
int featureIdx;
|
||||||
@ -316,26 +366,7 @@ public:
|
|||||||
float threshold;
|
float threshold;
|
||||||
};
|
};
|
||||||
|
|
||||||
enum { BOOST = 0 };
|
bool read(const FileNode &node);
|
||||||
enum { DO_CANNY_PRUNING = 1, SCALE_IMAGE = 2,
|
|
||||||
FIND_BIGGEST_OBJECT = 4, DO_ROUGH_SEARCH = 8 };
|
|
||||||
|
|
||||||
CV_WRAP CascadeClassifier();
|
|
||||||
CV_WRAP CascadeClassifier(const string& filename);
|
|
||||||
~CascadeClassifier();
|
|
||||||
|
|
||||||
CV_WRAP bool empty() const;
|
|
||||||
CV_WRAP bool load(const string& filename);
|
|
||||||
bool read(const FileNode& node);
|
|
||||||
CV_WRAP void detectMultiScale( const Mat& image,
|
|
||||||
CV_OUT vector<Rect>& objects,
|
|
||||||
double scaleFactor=1.1,
|
|
||||||
int minNeighbors=3, int flags=0,
|
|
||||||
Size minSize=Size(),
|
|
||||||
Size maxSize=Size());
|
|
||||||
|
|
||||||
bool setImage( Ptr<FeatureEvaluator>&, const Mat& );
|
|
||||||
int runAt( Ptr<FeatureEvaluator>&, Point );
|
|
||||||
|
|
||||||
bool isStumpBased;
|
bool isStumpBased;
|
||||||
|
|
||||||
@ -349,12 +380,13 @@ public:
|
|||||||
vector<DTreeNode> nodes;
|
vector<DTreeNode> nodes;
|
||||||
vector<float> leaves;
|
vector<float> leaves;
|
||||||
vector<int> subsets;
|
vector<int> subsets;
|
||||||
|
};
|
||||||
|
|
||||||
Ptr<FeatureEvaluator> feval;
|
Data data;
|
||||||
|
Ptr<FeatureEvaluator> featureEvaluator;
|
||||||
Ptr<CvHaarClassifierCascade> oldCascade;
|
Ptr<CvHaarClassifierCascade> oldCascade;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
//////////////// HOG (Histogram-of-Oriented-Gradients) Descriptor and Object Detector //////////////
|
//////////////// HOG (Histogram-of-Oriented-Gradients) Descriptor and Object Detector //////////////
|
||||||
|
|
||||||
struct CV_EXPORTS_W HOGDescriptor
|
struct CV_EXPORTS_W HOGDescriptor
|
||||||
|
@ -258,6 +258,7 @@ public:
|
|||||||
{ return featuresPtr[featureIdx].calc(offset) * varianceNormFactor; }
|
{ return featuresPtr[featureIdx].calc(offset) * varianceNormFactor; }
|
||||||
virtual double calcOrd(int featureIdx) const
|
virtual double calcOrd(int featureIdx) const
|
||||||
{ return (*this)(featureIdx); }
|
{ return (*this)(featureIdx); }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Size origWinSize;
|
Size origWinSize;
|
||||||
Ptr<vector<Feature> > features;
|
Ptr<vector<Feature> > features;
|
||||||
@ -440,6 +441,7 @@ bool HaarEvaluator::setWindow( Point pt )
|
|||||||
nf = 1.;
|
nf = 1.;
|
||||||
varianceNormFactor = 1./nf;
|
varianceNormFactor = 1./nf;
|
||||||
offset = (int)pOffset;
|
offset = (int)pOffset;
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -614,7 +616,7 @@ CascadeClassifier::~CascadeClassifier()
|
|||||||
|
|
||||||
bool CascadeClassifier::empty() const
|
bool CascadeClassifier::empty() const
|
||||||
{
|
{
|
||||||
return oldCascade.empty() && stages.empty();
|
return oldCascade.empty() && data.stages.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool CascadeClassifier::load(const string& filename)
|
bool CascadeClassifier::load(const string& filename)
|
||||||
@ -635,31 +637,31 @@ bool CascadeClassifier::load(const string& filename)
|
|||||||
}
|
}
|
||||||
|
|
||||||
template<class FEval>
|
template<class FEval>
|
||||||
inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_feval )
|
inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
|
||||||
{
|
{
|
||||||
int si, nstages = (int)cascade.stages.size();
|
int nstages = (int)cascade.data.stages.size();
|
||||||
int nodeOfs = 0, leafOfs = 0;
|
int nodeOfs = 0, leafOfs = 0;
|
||||||
FEval& feval = (FEval&)*_feval;
|
FEval& featureEvaluator = (FEval&)*_featureEvaluator;
|
||||||
float* cascadeLeaves = &cascade.leaves[0];
|
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||||
CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0];
|
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||||
CascadeClassifier::DTree* cascadeWeaks = &cascade.classifiers[0];
|
CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
|
||||||
CascadeClassifier::Stage* cascadeStages = &cascade.stages[0];
|
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||||
|
|
||||||
for( si = 0; si < nstages; si++ )
|
for( int si = 0; si < nstages; si++ )
|
||||||
{
|
{
|
||||||
CascadeClassifier::Stage& stage = cascadeStages[si];
|
CascadeClassifier::Data::Stage& stage = cascadeStages[si];
|
||||||
int wi, ntrees = stage.ntrees;
|
int wi, ntrees = stage.ntrees;
|
||||||
double sum = 0;
|
double sum = 0;
|
||||||
|
|
||||||
for( wi = 0; wi < ntrees; wi++ )
|
for( wi = 0; wi < ntrees; wi++ )
|
||||||
{
|
{
|
||||||
CascadeClassifier::DTree& weak = cascadeWeaks[stage.first + wi];
|
CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi];
|
||||||
int idx = 0, root = nodeOfs;
|
int idx = 0, root = nodeOfs;
|
||||||
|
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
CascadeClassifier::DTreeNode& node = cascadeNodes[root + idx];
|
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx];
|
||||||
double val = feval(node.featureIdx);
|
double val = featureEvaluator(node.featureIdx);
|
||||||
idx = val < node.threshold ? node.left : node.right;
|
idx = val < node.threshold ? node.left : node.right;
|
||||||
}
|
}
|
||||||
while( idx > 0 );
|
while( idx > 0 );
|
||||||
@ -674,32 +676,32 @@ inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_f
|
|||||||
}
|
}
|
||||||
|
|
||||||
template<class FEval>
|
template<class FEval>
|
||||||
inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_feval )
|
inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
|
||||||
{
|
{
|
||||||
int si, nstages = (int)cascade.stages.size();
|
int nstages = (int)cascade.data.stages.size();
|
||||||
int nodeOfs = 0, leafOfs = 0;
|
int nodeOfs = 0, leafOfs = 0;
|
||||||
FEval& feval = (FEval&)*_feval;
|
FEval& featureEvaluator = (FEval&)*_featureEvaluator;
|
||||||
size_t subsetSize = (cascade.ncategories + 31)/32;
|
size_t subsetSize = (cascade.data.ncategories + 31)/32;
|
||||||
int* cascadeSubsets = &cascade.subsets[0];
|
int* cascadeSubsets = &cascade.data.subsets[0];
|
||||||
float* cascadeLeaves = &cascade.leaves[0];
|
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||||
CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0];
|
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||||
CascadeClassifier::DTree* cascadeWeaks = &cascade.classifiers[0];
|
CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0];
|
||||||
CascadeClassifier::Stage* cascadeStages = &cascade.stages[0];
|
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||||
|
|
||||||
for( si = 0; si < nstages; si++ )
|
for(int si = 0; si < nstages; si++ )
|
||||||
{
|
{
|
||||||
CascadeClassifier::Stage& stage = cascadeStages[si];
|
CascadeClassifier::Data::Stage& stage = cascadeStages[si];
|
||||||
int wi, ntrees = stage.ntrees;
|
int wi, ntrees = stage.ntrees;
|
||||||
double sum = 0;
|
double sum = 0;
|
||||||
|
|
||||||
for( wi = 0; wi < ntrees; wi++ )
|
for( wi = 0; wi < ntrees; wi++ )
|
||||||
{
|
{
|
||||||
CascadeClassifier::DTree& weak = cascadeWeaks[stage.first + wi];
|
CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi];
|
||||||
int idx = 0, root = nodeOfs;
|
int idx = 0, root = nodeOfs;
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
CascadeClassifier::DTreeNode& node = cascadeNodes[root + idx];
|
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx];
|
||||||
int c = feval(node.featureIdx);
|
int c = featureEvaluator(node.featureIdx);
|
||||||
const int* subset = &cascadeSubsets[(root + idx)*subsetSize];
|
const int* subset = &cascadeSubsets[(root + idx)*subsetSize];
|
||||||
idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right;
|
idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right;
|
||||||
}
|
}
|
||||||
@ -715,25 +717,25 @@ inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator>
|
|||||||
}
|
}
|
||||||
|
|
||||||
template<class FEval>
|
template<class FEval>
|
||||||
inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_feval )
|
inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
|
||||||
{
|
{
|
||||||
int nodeOfs = 0, leafOfs = 0;
|
int nodeOfs = 0, leafOfs = 0;
|
||||||
FEval& feval = (FEval&)*_feval;
|
FEval& featureEvaluator = (FEval&)*_featureEvaluator;
|
||||||
float* cascadeLeaves = &cascade.leaves[0];
|
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||||
CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0];
|
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||||
CascadeClassifier::Stage* cascadeStages = &cascade.stages[0];
|
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||||
|
|
||||||
int nstages = (int)cascade.stages.size();
|
int nstages = (int)cascade.data.stages.size();
|
||||||
for( int stageIdx = 0; stageIdx < nstages; stageIdx++ )
|
for( int stageIdx = 0; stageIdx < nstages; stageIdx++ )
|
||||||
{
|
{
|
||||||
CascadeClassifier::Stage& stage = cascadeStages[stageIdx];
|
CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx];
|
||||||
double sum = 0.0;
|
double sum = 0.0;
|
||||||
|
|
||||||
int ntrees = stage.ntrees;
|
int ntrees = stage.ntrees;
|
||||||
for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 )
|
for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 )
|
||||||
{
|
{
|
||||||
CascadeClassifier::DTreeNode& node = cascadeNodes[nodeOfs];
|
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs];
|
||||||
double value = feval(node.featureIdx);
|
double value = featureEvaluator(node.featureIdx);
|
||||||
sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ];
|
sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -745,27 +747,27 @@ inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator
|
|||||||
}
|
}
|
||||||
|
|
||||||
template<class FEval>
|
template<class FEval>
|
||||||
inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_feval )
|
inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
|
||||||
{
|
{
|
||||||
int si, nstages = (int)cascade.stages.size();
|
int nstages = (int)cascade.data.stages.size();
|
||||||
int nodeOfs = 0, leafOfs = 0;
|
int nodeOfs = 0, leafOfs = 0;
|
||||||
FEval& feval = (FEval&)*_feval;
|
FEval& featureEvaluator = (FEval&)*_featureEvaluator;
|
||||||
size_t subsetSize = (cascade.ncategories + 31)/32;
|
size_t subsetSize = (cascade.data.ncategories + 31)/32;
|
||||||
int* cascadeSubsets = &cascade.subsets[0];
|
int* cascadeSubsets = &cascade.data.subsets[0];
|
||||||
float* cascadeLeaves = &cascade.leaves[0];
|
float* cascadeLeaves = &cascade.data.leaves[0];
|
||||||
CascadeClassifier::DTreeNode* cascadeNodes = &cascade.nodes[0];
|
CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0];
|
||||||
CascadeClassifier::Stage* cascadeStages = &cascade.stages[0];
|
CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0];
|
||||||
|
|
||||||
for( si = 0; si < nstages; si++ )
|
for( int si = 0; si < nstages; si++ )
|
||||||
{
|
{
|
||||||
CascadeClassifier::Stage& stage = cascadeStages[si];
|
CascadeClassifier::Data::Stage& stage = cascadeStages[si];
|
||||||
int wi, ntrees = stage.ntrees;
|
int wi, ntrees = stage.ntrees;
|
||||||
double sum = 0;
|
double sum = 0;
|
||||||
|
|
||||||
for( wi = 0; wi < ntrees; wi++ )
|
for( wi = 0; wi < ntrees; wi++ )
|
||||||
{
|
{
|
||||||
CascadeClassifier::DTreeNode& node = cascadeNodes[nodeOfs];
|
CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs];
|
||||||
int c = feval(node.featureIdx);
|
int c = featureEvaluator(node.featureIdx);
|
||||||
const int* subset = &cascadeSubsets[nodeOfs*subsetSize];
|
const int* subset = &cascadeSubsets[nodeOfs*subsetSize];
|
||||||
sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1];
|
sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1];
|
||||||
nodeOfs++;
|
nodeOfs++;
|
||||||
@ -780,43 +782,30 @@ inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvalu
|
|||||||
int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& featureEvaluator, Point pt )
|
int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& featureEvaluator, Point pt )
|
||||||
{
|
{
|
||||||
CV_Assert( oldCascade.empty() );
|
CV_Assert( oldCascade.empty() );
|
||||||
/*if( !oldCascade.empty() )
|
|
||||||
return cvRunHaarClassifierCascade(oldCascade, pt, 0);*/
|
|
||||||
|
|
||||||
assert(featureType == FeatureEvaluator::HAAR ||
|
assert(data.featureType == FeatureEvaluator::HAAR ||
|
||||||
featureType == FeatureEvaluator::LBP);
|
data.featureType == FeatureEvaluator::LBP);
|
||||||
|
|
||||||
return !featureEvaluator->setWindow(pt) ? -1 :
|
return !featureEvaluator->setWindow(pt) ? -1 :
|
||||||
isStumpBased ? ( featureType == FeatureEvaluator::HAAR ?
|
data.isStumpBased ? ( data.featureType == FeatureEvaluator::HAAR ?
|
||||||
predictOrderedStump<HaarEvaluator>( *this, featureEvaluator ) :
|
predictOrderedStump<HaarEvaluator>( *this, featureEvaluator ) :
|
||||||
predictCategoricalStump<LBPEvaluator>( *this, featureEvaluator ) ) :
|
predictCategoricalStump<LBPEvaluator>( *this, featureEvaluator ) ) :
|
||||||
( featureType == FeatureEvaluator::HAAR ?
|
( data.featureType == FeatureEvaluator::HAAR ?
|
||||||
predictOrdered<HaarEvaluator>( *this, featureEvaluator ) :
|
predictOrdered<HaarEvaluator>( *this, featureEvaluator ) :
|
||||||
predictCategorical<LBPEvaluator>( *this, featureEvaluator ) );
|
predictCategorical<LBPEvaluator>( *this, featureEvaluator ) );
|
||||||
}
|
}
|
||||||
|
|
||||||
bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& featureEvaluator, const Mat& image )
|
bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& featureEvaluator, const Mat& image )
|
||||||
{
|
{
|
||||||
/*if( !oldCascade.empty() )
|
return empty() ? false : featureEvaluator->setImage(image, data.origWinSize);
|
||||||
{
|
|
||||||
Mat sum(image.rows+1, image.cols+1, CV_32S);
|
|
||||||
Mat tilted(image.rows+1, image.cols+1, CV_32S);
|
|
||||||
Mat sqsum(image.rows+1, image.cols+1, CV_64F);
|
|
||||||
integral(image, sum, sqsum, tilted);
|
|
||||||
CvMat _sum = sum, _sqsum = sqsum, _tilted = tilted;
|
|
||||||
cvSetImagesForHaarClassifierCascade( oldCascade, &_sum, &_sqsum, &_tilted, 1. );
|
|
||||||
return true;
|
|
||||||
}*/
|
|
||||||
return empty() ? false : featureEvaluator->setImage(image, origWinSize);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
struct CascadeClassifierInvoker
|
struct CascadeClassifierInvoker
|
||||||
{
|
{
|
||||||
CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor, ConcurrentRectVector& _vec )
|
CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor, ConcurrentRectVector& _vec )
|
||||||
{
|
{
|
||||||
classifier = &_cc;
|
classifier = &_cc;
|
||||||
processingAreaSize = _sz1;
|
processingRectSize = _sz1;
|
||||||
stripSize = _stripSize;
|
stripSize = _stripSize;
|
||||||
yStep = _yStep;
|
yStep = _yStep;
|
||||||
scalingFactor = _factor;
|
scalingFactor = _factor;
|
||||||
@ -825,14 +814,14 @@ struct CascadeClassifierInvoker
|
|||||||
|
|
||||||
void operator()(const BlockedRange& range) const
|
void operator()(const BlockedRange& range) const
|
||||||
{
|
{
|
||||||
Ptr<FeatureEvaluator> evaluator = classifier->feval->clone();
|
Ptr<FeatureEvaluator> evaluator = classifier->featureEvaluator->clone();
|
||||||
Size winSize(cvRound(classifier->origWinSize.width * scalingFactor), cvRound(classifier->origWinSize.height * scalingFactor));
|
Size winSize(cvRound(classifier->data.origWinSize.width * scalingFactor), cvRound(classifier->data.origWinSize.height * scalingFactor));
|
||||||
|
|
||||||
int y1 = range.begin() * stripSize;
|
int y1 = range.begin() * stripSize;
|
||||||
int y2 = min(range.end() * stripSize, processingAreaSize.height);
|
int y2 = min(range.end() * stripSize, processingRectSize.height);
|
||||||
for( int y = y1; y < y2; y += yStep )
|
for( int y = y1; y < y2; y += yStep )
|
||||||
{
|
{
|
||||||
for( int x = 0; x < processingAreaSize.width; x += yStep )
|
for( int x = 0; x < processingRectSize.width; x += yStep )
|
||||||
{
|
{
|
||||||
int result = classifier->runAt(evaluator, Point(x, y));
|
int result = classifier->runAt(evaluator, Point(x, y));
|
||||||
if( result > 0 )
|
if( result > 0 )
|
||||||
@ -846,14 +835,46 @@ struct CascadeClassifierInvoker
|
|||||||
|
|
||||||
CascadeClassifier* classifier;
|
CascadeClassifier* classifier;
|
||||||
ConcurrentRectVector* rectangles;
|
ConcurrentRectVector* rectangles;
|
||||||
Size processingAreaSize;
|
Size processingRectSize;
|
||||||
int stripSize, yStep;
|
int stripSize, yStep;
|
||||||
double scalingFactor;
|
double scalingFactor;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } };
|
struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } };
|
||||||
|
|
||||||
|
bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
|
||||||
|
int stripSize, int yStep, double factor, vector<Rect>& candidates )
|
||||||
|
{
|
||||||
|
if( !featureEvaluator->setImage( image, data.origWinSize ) )
|
||||||
|
return false;
|
||||||
|
|
||||||
|
ConcurrentRectVector concurrentCandidates;
|
||||||
|
parallel_for(BlockedRange(0, stripCount), CascadeClassifierInvoker( *this, processingRectSize, stripSize, yStep, factor, concurrentCandidates));
|
||||||
|
candidates.insert( candidates.end(), concurrentCandidates.begin(), concurrentCandidates.end() );
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CascadeClassifier::isOldFormatCascade() const
|
||||||
|
{
|
||||||
|
return !oldCascade.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
int CascadeClassifier::getFeatureType() const
|
||||||
|
{
|
||||||
|
return featureEvaluator->getFeatureType();
|
||||||
|
}
|
||||||
|
|
||||||
|
Size CascadeClassifier::getOriginalWindowSize() const
|
||||||
|
{
|
||||||
|
return data.origWinSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CascadeClassifier::setImage(const Mat& image)
|
||||||
|
{
|
||||||
|
featureEvaluator->setImage(image, data.origWinSize);
|
||||||
|
}
|
||||||
|
|
||||||
void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
|
void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
|
||||||
double scaleFactor, int minNeighbors,
|
double scaleFactor, int minNeighbors,
|
||||||
int flags, Size minObjectSize, Size maxObjectSize )
|
int flags, Size minObjectSize, Size maxObjectSize )
|
||||||
@ -865,7 +886,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
|
|||||||
if( empty() )
|
if( empty() )
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if( !oldCascade.empty() )
|
if( isOldFormatCascade() )
|
||||||
{
|
{
|
||||||
MemStorage storage(cvCreateMemStorage(0));
|
MemStorage storage(cvCreateMemStorage(0));
|
||||||
CvMat _image = image;
|
CvMat _image = image;
|
||||||
@ -892,41 +913,41 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
|
|||||||
}
|
}
|
||||||
|
|
||||||
Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U);
|
Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U);
|
||||||
ConcurrentRectVector candidates;
|
vector<Rect> candidates;
|
||||||
|
|
||||||
for( double factor = 1; ; factor *= scaleFactor )
|
for( double factor = 1; ; factor *= scaleFactor )
|
||||||
{
|
{
|
||||||
int stripCount, stripSize;
|
Size originalWindowSize = getOriginalWindowSize();
|
||||||
|
|
||||||
Size windowSize( cvRound(origWinSize.width*factor), cvRound(origWinSize.height*factor) );
|
Size windowSize( cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) );
|
||||||
Size scaledImageSize( cvRound( grayImage.cols/factor ), cvRound( grayImage.rows/factor ) );
|
Size scaledImageSize( cvRound( grayImage.cols/factor ), cvRound( grayImage.rows/factor ) );
|
||||||
Size processingAreaSize( scaledImageSize.width - origWinSize.width, scaledImageSize.height - origWinSize.height );
|
Size processingRectSize( scaledImageSize.width - originalWindowSize.width, scaledImageSize.height - originalWindowSize.height );
|
||||||
|
|
||||||
if( processingAreaSize.width <= 0 || processingAreaSize.height <= 0 )
|
if( processingRectSize.width <= 0 || processingRectSize.height <= 0 )
|
||||||
break;
|
break;
|
||||||
if( windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height )
|
if( windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height )
|
||||||
break;
|
break;
|
||||||
if( windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height )
|
if( windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height )
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
Mat scaledImage( scaledImageSize, CV_8U, imageBuffer.data );
|
||||||
|
resize( grayImage, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR );
|
||||||
|
|
||||||
int yStep = factor > 2. ? 1 : 2;
|
int yStep = factor > 2. ? 1 : 2;
|
||||||
|
int stripCount, stripSize;
|
||||||
|
|
||||||
#ifdef HAVE_TBB
|
#ifdef HAVE_TBB
|
||||||
const int PTS_PER_THREAD = 1000;
|
const int PTS_PER_THREAD = 1000;
|
||||||
stripCount = ((processingAreaSize.width/yStep)*(processingAreaSize.height + yStep-1)/yStep + PTS_PER_THREAD/2)/PTS_PER_THREAD;
|
stripCount = ((processingRectSize.width/yStep)*(processingRectSize.height + yStep-1)/yStep + PTS_PER_THREAD/2)/PTS_PER_THREAD;
|
||||||
stripCount = std::min(std::max(stripCount, 1), 100);
|
stripCount = std::min(std::max(stripCount, 1), 100);
|
||||||
stripSize = (((processingAreaSize.height + stripCount - 1)/stripCount + yStep-1)/yStep)*yStep;
|
stripSize = (((processingRectSize.height + stripCount - 1)/stripCount + yStep-1)/yStep)*yStep;
|
||||||
#else
|
#else
|
||||||
stripCount = 1;
|
stripCount = 1;
|
||||||
stripSize = processingAreaSize.height;
|
stripSize = processingRectSize.height;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
Mat scaledImage( scaledImageSize, CV_8U, imageBuffer.data );
|
if( !detectSingleScale( scaledImage, stripCount, processingRectSize, stripSize, yStep, factor, candidates ) )
|
||||||
resize( grayImage, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR );
|
|
||||||
if( !feval->setImage( scaledImage, origWinSize ) )
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
parallel_for(BlockedRange(0, stripCount), CascadeClassifierInvoker(*this, processingAreaSize, stripSize, yStep, factor, candidates));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
objects.resize(candidates.size());
|
objects.resize(candidates.size());
|
||||||
@ -935,8 +956,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
|
|||||||
groupRectangles( objects, minNeighbors, GROUP_EPS );
|
groupRectangles( objects, minNeighbors, GROUP_EPS );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool CascadeClassifier::Data::read(const FileNode &root)
|
||||||
bool CascadeClassifier::read(const FileNode& root)
|
|
||||||
{
|
{
|
||||||
// load stage params
|
// load stage params
|
||||||
string stageTypeStr = (string)root[CC_STAGE_TYPE];
|
string stageTypeStr = (string)root[CC_STAGE_TYPE];
|
||||||
@ -1000,6 +1020,7 @@ bool CascadeClassifier::read(const FileNode& root)
|
|||||||
FileNode leafValues = fnw[CC_LEAF_VALUES];
|
FileNode leafValues = fnw[CC_LEAF_VALUES];
|
||||||
if( internalNodes.empty() || leafValues.empty() )
|
if( internalNodes.empty() || leafValues.empty() )
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
DTree tree;
|
DTree tree;
|
||||||
tree.nodeCount = (int)internalNodes.size()/nodeStep;
|
tree.nodeCount = (int)internalNodes.size()/nodeStep;
|
||||||
classifiers.push_back(tree);
|
classifiers.push_back(tree);
|
||||||
@ -1009,47 +1030,52 @@ bool CascadeClassifier::read(const FileNode& root)
|
|||||||
if( subsetSize > 0 )
|
if( subsetSize > 0 )
|
||||||
subsets.reserve(subsets.size() + tree.nodeCount*subsetSize);
|
subsets.reserve(subsets.size() + tree.nodeCount*subsetSize);
|
||||||
|
|
||||||
FileNodeIterator it2 = internalNodes.begin(), it2_end = internalNodes.end();
|
FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end();
|
||||||
|
|
||||||
for( ; it2 != it2_end; ) // nodes
|
for( ; internalNodesIter != internalNodesEnd; ) // nodes
|
||||||
{
|
{
|
||||||
DTreeNode node;
|
DTreeNode node;
|
||||||
node.left = (int)*it2; ++it2;
|
node.left = (int)*internalNodesIter; ++internalNodesIter;
|
||||||
node.right = (int)*it2; ++it2;
|
node.right = (int)*internalNodesIter; ++internalNodesIter;
|
||||||
node.featureIdx = (int)*it2; ++it2;
|
node.featureIdx = (int)*internalNodesIter; ++internalNodesIter;
|
||||||
if( subsetSize > 0 )
|
if( subsetSize > 0 )
|
||||||
{
|
{
|
||||||
for( int j = 0; j < subsetSize; j++, ++it2 )
|
for( int j = 0; j < subsetSize; j++, ++internalNodesIter )
|
||||||
subsets.push_back((int)*it2);
|
subsets.push_back((int)*internalNodesIter);
|
||||||
node.threshold = 0.f;
|
node.threshold = 0.f;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
node.threshold = (float)*it2; ++it2;
|
node.threshold = (float)*internalNodesIter; ++internalNodesIter;
|
||||||
}
|
}
|
||||||
nodes.push_back(node);
|
nodes.push_back(node);
|
||||||
}
|
}
|
||||||
|
|
||||||
it2 = leafValues.begin(), it2_end = leafValues.end();
|
internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end();
|
||||||
|
|
||||||
for( ; it2 != it2_end; ++it2 ) // leaves
|
for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves
|
||||||
leaves.push_back((float)*it2);
|
leaves.push_back((float)*internalNodesIter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CascadeClassifier::read(const FileNode& root)
|
||||||
|
{
|
||||||
|
if( !data.read(root) )
|
||||||
|
return false;
|
||||||
|
|
||||||
// load features
|
// load features
|
||||||
feval = FeatureEvaluator::create(featureType);
|
featureEvaluator = FeatureEvaluator::create(data.featureType);
|
||||||
fn = root[CC_FEATURES];
|
FileNode fn = root[CC_FEATURES];
|
||||||
if( fn.empty() )
|
if( fn.empty() )
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return feval->read(fn);
|
return featureEvaluator->read(fn);
|
||||||
}
|
}
|
||||||
|
|
||||||
template<> void Ptr<CvHaarClassifierCascade>::delete_obj()
|
template<> void Ptr<CvHaarClassifierCascade>::delete_obj()
|
||||||
{ cvReleaseHaarClassifierCascade(&obj); }
|
{ cvReleaseHaarClassifierCascade(&obj); }
|
||||||
|
|
||||||
} // namespace cv
|
} // namespace cv
|
||||||
|
|
||||||
/* End of file. */
|
|
||||||
|
|
||||||
|
@ -474,9 +474,9 @@ float CvCascadeBoostTrainData::getVarValue( int vi, int si )
|
|||||||
|
|
||||||
struct FeatureIdxOnlyPrecalc
|
struct FeatureIdxOnlyPrecalc
|
||||||
{
|
{
|
||||||
FeatureIdxOnlyPrecalc( const CvFeatureEvaluator* _feval, CvMat* _buf, int _sample_count, bool _is_buf_16u )
|
FeatureIdxOnlyPrecalc( const CvFeatureEvaluator* _featureEvaluator, CvMat* _buf, int _sample_count, bool _is_buf_16u )
|
||||||
{
|
{
|
||||||
feval = _feval;
|
featureEvaluator = _featureEvaluator;
|
||||||
sample_count = _sample_count;
|
sample_count = _sample_count;
|
||||||
udst = (unsigned short*)_buf->data.s;
|
udst = (unsigned short*)_buf->data.s;
|
||||||
idst = _buf->data.i;
|
idst = _buf->data.i;
|
||||||
@ -490,7 +490,7 @@ struct FeatureIdxOnlyPrecalc
|
|||||||
{
|
{
|
||||||
for( int si = 0; si < sample_count; si++ )
|
for( int si = 0; si < sample_count; si++ )
|
||||||
{
|
{
|
||||||
valCachePtr[si] = (*feval)( fi, si );
|
valCachePtr[si] = (*featureEvaluator)( fi, si );
|
||||||
if ( is_buf_16u )
|
if ( is_buf_16u )
|
||||||
*(udst + fi*sample_count + si) = (unsigned short)si;
|
*(udst + fi*sample_count + si) = (unsigned short)si;
|
||||||
else
|
else
|
||||||
@ -502,7 +502,7 @@ struct FeatureIdxOnlyPrecalc
|
|||||||
icvSortIntAux( idst + fi*sample_count, sample_count, valCachePtr );
|
icvSortIntAux( idst + fi*sample_count, sample_count, valCachePtr );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const CvFeatureEvaluator* feval;
|
const CvFeatureEvaluator* featureEvaluator;
|
||||||
int sample_count;
|
int sample_count;
|
||||||
int* idst;
|
int* idst;
|
||||||
unsigned short* udst;
|
unsigned short* udst;
|
||||||
@ -511,9 +511,9 @@ struct FeatureIdxOnlyPrecalc
|
|||||||
|
|
||||||
struct FeatureValAndIdxPrecalc
|
struct FeatureValAndIdxPrecalc
|
||||||
{
|
{
|
||||||
FeatureValAndIdxPrecalc( const CvFeatureEvaluator* _feval, CvMat* _buf, Mat* _valCache, int _sample_count, bool _is_buf_16u )
|
FeatureValAndIdxPrecalc( const CvFeatureEvaluator* _featureEvaluator, CvMat* _buf, Mat* _valCache, int _sample_count, bool _is_buf_16u )
|
||||||
{
|
{
|
||||||
feval = _feval;
|
featureEvaluator = _featureEvaluator;
|
||||||
valCache = _valCache;
|
valCache = _valCache;
|
||||||
sample_count = _sample_count;
|
sample_count = _sample_count;
|
||||||
udst = (unsigned short*)_buf->data.s;
|
udst = (unsigned short*)_buf->data.s;
|
||||||
@ -526,7 +526,7 @@ struct FeatureValAndIdxPrecalc
|
|||||||
{
|
{
|
||||||
for( int si = 0; si < sample_count; si++ )
|
for( int si = 0; si < sample_count; si++ )
|
||||||
{
|
{
|
||||||
valCache->at<float>(fi,si) = (*feval)( fi, si );
|
valCache->at<float>(fi,si) = (*featureEvaluator)( fi, si );
|
||||||
if ( is_buf_16u )
|
if ( is_buf_16u )
|
||||||
*(udst + fi*sample_count + si) = (unsigned short)si;
|
*(udst + fi*sample_count + si) = (unsigned short)si;
|
||||||
else
|
else
|
||||||
@ -538,7 +538,7 @@ struct FeatureValAndIdxPrecalc
|
|||||||
icvSortIntAux( idst + fi*sample_count, sample_count, valCache->ptr<float>(fi) );
|
icvSortIntAux( idst + fi*sample_count, sample_count, valCache->ptr<float>(fi) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const CvFeatureEvaluator* feval;
|
const CvFeatureEvaluator* featureEvaluator;
|
||||||
Mat* valCache;
|
Mat* valCache;
|
||||||
int sample_count;
|
int sample_count;
|
||||||
int* idst;
|
int* idst;
|
||||||
@ -548,9 +548,9 @@ struct FeatureValAndIdxPrecalc
|
|||||||
|
|
||||||
struct FeatureValOnlyPrecalc
|
struct FeatureValOnlyPrecalc
|
||||||
{
|
{
|
||||||
FeatureValOnlyPrecalc( const CvFeatureEvaluator* _feval, Mat* _valCache, int _sample_count )
|
FeatureValOnlyPrecalc( const CvFeatureEvaluator* _featureEvaluator, Mat* _valCache, int _sample_count )
|
||||||
{
|
{
|
||||||
feval = _feval;
|
featureEvaluator = _featureEvaluator;
|
||||||
valCache = _valCache;
|
valCache = _valCache;
|
||||||
sample_count = _sample_count;
|
sample_count = _sample_count;
|
||||||
}
|
}
|
||||||
@ -558,9 +558,9 @@ struct FeatureValOnlyPrecalc
|
|||||||
{
|
{
|
||||||
for ( int fi = range.begin(); fi < range.end(); fi++)
|
for ( int fi = range.begin(); fi < range.end(); fi++)
|
||||||
for( int si = 0; si < sample_count; si++ )
|
for( int si = 0; si < sample_count; si++ )
|
||||||
valCache->at<float>(fi,si) = (*feval)( fi, si );
|
valCache->at<float>(fi,si) = (*featureEvaluator)( fi, si );
|
||||||
}
|
}
|
||||||
const CvFeatureEvaluator* feval;
|
const CvFeatureEvaluator* featureEvaluator;
|
||||||
Mat* valCache;
|
Mat* valCache;
|
||||||
int sample_count;
|
int sample_count;
|
||||||
};
|
};
|
||||||
|
Loading…
x
Reference in New Issue
Block a user