Warning fixes continued

This commit is contained in:
Andrey Kamaev
2012-06-09 15:00:04 +00:00
parent f6b451c607
commit f2d3b9b4a1
127 changed files with 6298 additions and 6277 deletions

View File

@@ -636,12 +636,14 @@ struct CV_EXPORTS Feature
int label; ///< Quantization
Feature() : x(0), y(0), label(0) {}
Feature(int x, int y, int label) : x(x), y(y), label(label) {}
Feature(int x, int y, int label);
void read(const FileNode& fn);
void write(FileStorage& fs) const;
};
inline Feature::Feature(int _x, int _y, int _label) : x(_x), y(_y), label(_label) {}
struct CV_EXPORTS Template
{
int width;
@@ -688,10 +690,7 @@ protected:
/// Candidate feature with a score
struct Candidate
{
Candidate(int x, int y, int label, float score)
: f(x, y, label), score(score)
{
}
Candidate(int x, int y, int label, float score);
/// Sort candidates with high score to the front
bool operator<(const Candidate& rhs) const
@@ -716,6 +715,8 @@ protected:
size_t num_features, float distance);
};
inline QuantizedPyramid::Candidate::Candidate(int x, int y, int label, float _score) : f(x, y, label), score(_score) {}
/**
* \brief Interface for modalities that plug into the LINE template matching representation.
*
@@ -853,10 +854,7 @@ struct CV_EXPORTS Match
{
}
Match(int x, int y, float similarity, const std::string& class_id, int template_id)
: x(x), y(y), similarity(similarity), class_id(class_id), template_id(template_id)
{
}
Match(int x, int y, float similarity, const std::string& class_id, int template_id);
/// Sort matches with high similarity to the front
bool operator<(const Match& rhs) const
@@ -880,6 +878,11 @@ struct CV_EXPORTS Match
int template_id;
};
inline Match::Match(int _x, int _y, float _similarity, const std::string& _class_id, int _template_id)
: x(_x), y(_y), similarity(_similarity), class_id(_class_id), template_id(_template_id)
{
}
/**
* \brief Object detector using the LINE template matching algorithm with any set of
* modalities.

View File

@@ -46,12 +46,12 @@
namespace cv
{
// class for grouping object candidates, detected by Cascade Classifier, HOG etc.
// instance of the class is to be passed to cv::partition (see cxoperations.hpp)
class CV_EXPORTS SimilarRects
{
public:
public:
SimilarRects(double _eps) : eps(_eps) {}
inline bool operator()(const Rect& r1, const Rect& r2) const
{
@@ -62,8 +62,8 @@ public:
std::abs(r1.y + r1.height - r2.y - r2.height) <= delta;
}
double eps;
};
};
void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights)
{
@@ -78,13 +78,13 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
}
return;
}
vector<int> labels;
int nclasses = partition(rectList, labels, SimilarRects(eps));
vector<Rect> rrects(nclasses);
vector<int> rweights(nclasses, 0);
vector<int> rejectLevels(nclasses, 0);
vector<int> rejectLevels(nclasses, 0);
vector<double> rejectWeights(nclasses, DBL_MIN);
int i, j, nlabels = (int)labels.size();
for( i = 0; i < nlabels; i++ )
@@ -97,10 +97,10 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
rweights[cls]++;
}
if ( levelWeights && weights && !weights->empty() && !levelWeights->empty() )
{
for( i = 0; i < nlabels; i++ )
{
int cls = labels[i];
{
for( i = 0; i < nlabels; i++ )
{
int cls = labels[i];
if( (*weights)[i] > rejectLevels[cls] )
{
rejectLevels[cls] = (*weights)[i];
@@ -108,9 +108,9 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
}
else if( ( (*weights)[i] == rejectLevels[cls] ) && ( (*levelWeights)[i] > rejectWeights[cls] ) )
rejectWeights[cls] = (*levelWeights)[i];
}
}
}
}
for( i = 0; i < nclasses; i++ )
{
Rect r = rrects[i];
@@ -120,32 +120,32 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
saturate_cast<int>(r.width*s),
saturate_cast<int>(r.height*s));
}
rectList.clear();
if( weights )
weights->clear();
if( levelWeights )
levelWeights->clear();
if( levelWeights )
levelWeights->clear();
for( i = 0; i < nclasses; i++ )
{
Rect r1 = rrects[i];
int n1 = levelWeights ? rejectLevels[i] : rweights[i];
double w1 = rejectWeights[i];
double w1 = rejectWeights[i];
if( n1 <= groupThreshold )
continue;
// filter out small face rectangles inside large rectangles
for( j = 0; j < nclasses; j++ )
{
int n2 = rweights[j];
if( j == i || n2 <= groupThreshold )
continue;
Rect r2 = rrects[j];
int dx = saturate_cast<int>( r2.width * eps );
int dy = saturate_cast<int>( r2.height * eps );
if( i != j &&
r1.x >= r2.x - dx &&
r1.y >= r2.y - dy &&
@@ -154,14 +154,14 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
(n2 > std::max(3, n1) || n1 < 3) )
break;
}
if( j == nclasses )
{
rectList.push_back(r1);
if( weights )
weights->push_back(n1);
if( levelWeights )
levelWeights->push_back(w1);
if( levelWeights )
levelWeights->push_back(w1);
}
}
}
@@ -169,158 +169,158 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
class MeanshiftGrouping
{
public:
MeanshiftGrouping(const Point3d& densKer, const vector<Point3d>& posV,
const vector<double>& wV, double, int maxIter = 20)
MeanshiftGrouping(const Point3d& densKer, const vector<Point3d>& posV,
const vector<double>& wV, double, int maxIter = 20)
{
densityKernel = densKer;
densityKernel = densKer;
weightsV = wV;
positionsV = posV;
positionsCount = (int)posV.size();
meanshiftV.resize(positionsCount);
meanshiftV.resize(positionsCount);
distanceV.resize(positionsCount);
iterMax = maxIter;
for (unsigned i = 0; i<positionsV.size(); i++)
{
meanshiftV[i] = getNewValue(positionsV[i]);
distanceV[i] = moveToMode(meanshiftV[i]);
meanshiftV[i] -= positionsV[i];
}
}
void getModes(vector<Point3d>& modesV, vector<double>& resWeightsV, const double eps)
{
for (size_t i=0; i <distanceV.size(); i++)
{
bool is_found = false;
for(size_t j=0; j<modesV.size(); j++)
{
if ( getDistance(distanceV[i], modesV[j]) < eps)
{
is_found=true;
break;
}
}
if (!is_found)
{
modesV.push_back(distanceV[i]);
}
}
resWeightsV.resize(modesV.size());
iterMax = maxIter;
for (size_t i=0; i<modesV.size(); i++)
{
resWeightsV[i] = getResultWeight(modesV[i]);
}
for (unsigned i = 0; i<positionsV.size(); i++)
{
meanshiftV[i] = getNewValue(positionsV[i]);
distanceV[i] = moveToMode(meanshiftV[i]);
meanshiftV[i] -= positionsV[i];
}
}
void getModes(vector<Point3d>& modesV, vector<double>& resWeightsV, const double eps)
{
for (size_t i=0; i <distanceV.size(); i++)
{
bool is_found = false;
for(size_t j=0; j<modesV.size(); j++)
{
if ( getDistance(distanceV[i], modesV[j]) < eps)
{
is_found=true;
break;
}
}
if (!is_found)
{
modesV.push_back(distanceV[i]);
}
}
resWeightsV.resize(modesV.size());
for (size_t i=0; i<modesV.size(); i++)
{
resWeightsV[i] = getResultWeight(modesV[i]);
}
}
protected:
vector<Point3d> positionsV;
vector<double> weightsV;
vector<Point3d> positionsV;
vector<double> weightsV;
Point3d densityKernel;
int positionsCount;
Point3d densityKernel;
int positionsCount;
vector<Point3d> meanshiftV;
vector<Point3d> distanceV;
int iterMax;
double modeEps;
vector<Point3d> meanshiftV;
vector<Point3d> distanceV;
int iterMax;
double modeEps;
Point3d getNewValue(const Point3d& inPt) const
Point3d getNewValue(const Point3d& inPt) const
{
Point3d resPoint(.0);
Point3d ratPoint(.0);
for (size_t i=0; i<positionsV.size(); i++)
{
Point3d aPt= positionsV[i];
Point3d bPt = inPt;
Point3d sPt = densityKernel;
sPt.x *= exp(aPt.z);
sPt.y *= exp(aPt.z);
aPt.x /= sPt.x;
aPt.y /= sPt.y;
aPt.z /= sPt.z;
Point3d resPoint(.0);
Point3d ratPoint(.0);
for (size_t i=0; i<positionsV.size(); i++)
{
Point3d aPt= positionsV[i];
Point3d bPt = inPt;
Point3d sPt = densityKernel;
bPt.x /= sPt.x;
bPt.y /= sPt.y;
bPt.z /= sPt.z;
double w = (weightsV[i])*std::exp(-((aPt-bPt).dot(aPt-bPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1)));
resPoint += w*aPt;
sPt.x *= exp(aPt.z);
sPt.y *= exp(aPt.z);
ratPoint.x += w/sPt.x;
ratPoint.y += w/sPt.y;
ratPoint.z += w/sPt.z;
}
resPoint.x /= ratPoint.x;
resPoint.y /= ratPoint.y;
resPoint.z /= ratPoint.z;
return resPoint;
aPt.x /= sPt.x;
aPt.y /= sPt.y;
aPt.z /= sPt.z;
bPt.x /= sPt.x;
bPt.y /= sPt.y;
bPt.z /= sPt.z;
double w = (weightsV[i])*std::exp(-((aPt-bPt).dot(aPt-bPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1)));
resPoint += w*aPt;
ratPoint.x += w/sPt.x;
ratPoint.y += w/sPt.y;
ratPoint.z += w/sPt.z;
}
resPoint.x /= ratPoint.x;
resPoint.y /= ratPoint.y;
resPoint.z /= ratPoint.z;
return resPoint;
}
double getResultWeight(const Point3d& inPt) const
double getResultWeight(const Point3d& inPt) const
{
double sumW=0;
for (size_t i=0; i<positionsV.size(); i++)
{
Point3d aPt = positionsV[i];
Point3d sPt = densityKernel;
double sumW=0;
for (size_t i=0; i<positionsV.size(); i++)
{
Point3d aPt = positionsV[i];
Point3d sPt = densityKernel;
sPt.x *= exp(aPt.z);
sPt.y *= exp(aPt.z);
sPt.x *= exp(aPt.z);
sPt.y *= exp(aPt.z);
aPt -= inPt;
aPt.x /= sPt.x;
aPt.y /= sPt.y;
aPt.z /= sPt.z;
sumW+=(weightsV[i])*std::exp(-(aPt.dot(aPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1)));
}
return sumW;
aPt -= inPt;
aPt.x /= sPt.x;
aPt.y /= sPt.y;
aPt.z /= sPt.z;
sumW+=(weightsV[i])*std::exp(-(aPt.dot(aPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1)));
}
return sumW;
}
Point3d moveToMode(Point3d aPt) const
Point3d moveToMode(Point3d aPt) const
{
Point3d bPt;
for (int i = 0; i<iterMax; i++)
{
bPt = aPt;
aPt = getNewValue(bPt);
if ( getDistance(aPt, bPt) <= modeEps )
{
break;
}
}
return aPt;
Point3d bPt;
for (int i = 0; i<iterMax; i++)
{
bPt = aPt;
aPt = getNewValue(bPt);
if ( getDistance(aPt, bPt) <= modeEps )
{
break;
}
}
return aPt;
}
double getDistance(Point3d p1, Point3d p2) const
{
Point3d ns = densityKernel;
ns.x *= exp(p2.z);
ns.y *= exp(p2.z);
p2 -= p1;
p2.x /= ns.x;
p2.y /= ns.y;
p2.z /= ns.z;
return p2.dot(p2);
Point3d ns = densityKernel;
ns.x *= exp(p2.z);
ns.y *= exp(p2.z);
p2 -= p1;
p2.x /= ns.x;
p2.y /= ns.y;
p2.z /= ns.z;
return p2.dot(p2);
}
};
//new grouping function with using meanshift
static void groupRectangles_meanshift(vector<Rect>& rectList, double detectThreshold, vector<double>* foundWeights,
vector<double>& scales, Size winDetSize)
static void groupRectangles_meanshift(vector<Rect>& rectList, double detectThreshold, vector<double>* foundWeights,
vector<double>& scales, Size winDetSize)
{
int detectionCount = (int)rectList.size();
vector<Point3d> hits(detectionCount), resultHits;
vector<double> hitWeights(detectionCount), resultWeights;
Point2d hitCenter;
for (int i=0; i < detectionCount; i++)
for (int i=0; i < detectionCount; i++)
{
hitWeights[i] = (*foundWeights)[i];
hitCenter = (rectList[i].tl() + rectList[i].br())*(0.5); //center of rectangles
@@ -338,17 +338,17 @@ static void groupRectangles_meanshift(vector<Rect>& rectList, double detectThres
msGrouping.getModes(resultHits, resultWeights, 1);
for (unsigned i=0; i < resultHits.size(); ++i)
for (unsigned i=0; i < resultHits.size(); ++i)
{
double scale = exp(resultHits[i].z);
hitCenter.x = resultHits[i].x;
hitCenter.y = resultHits[i].y;
Size s( int(winDetSize.width * scale), int(winDetSize.height * scale) );
Rect resultRect( int(hitCenter.x-s.width/2), int(hitCenter.y-s.height/2),
int(s.width), int(s.height) );
Rect resultRect( int(hitCenter.x-s.width/2), int(hitCenter.y-s.height/2),
int(s.width), int(s.height) );
if (resultWeights[i] > detectThreshold)
if (resultWeights[i] > detectThreshold)
{
rectList.push_back(resultRect);
foundWeights->push_back(resultWeights[i]);
@@ -371,13 +371,13 @@ void groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<d
groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights);
}
//can be used for HOG detection algorithm only
void groupRectangles_meanshift(vector<Rect>& rectList, vector<double>& foundWeights,
vector<double>& foundScales, double detectThreshold, Size winDetSize)
void groupRectangles_meanshift(vector<Rect>& rectList, vector<double>& foundWeights,
vector<double>& foundScales, double detectThreshold, Size winDetSize)
{
groupRectangles_meanshift(rectList, detectThreshold, &foundWeights, foundScales, winDetSize);
groupRectangles_meanshift(rectList, detectThreshold, &foundWeights, foundScales, winDetSize);
}
FeatureEvaluator::~FeatureEvaluator() {}
bool FeatureEvaluator::read(const FileNode&) {return true;}
@@ -394,21 +394,21 @@ bool HaarEvaluator::Feature :: read( const FileNode& node )
{
FileNode rnode = node[CC_RECTS];
FileNodeIterator it = rnode.begin(), it_end = rnode.end();
int ri;
for( ri = 0; ri < RECT_NUM; ri++ )
{
rect[ri].r = Rect();
rect[ri].weight = 0.f;
}
for(ri = 0; it != it_end; ++it, ri++)
{
FileNodeIterator it2 = (*it).begin();
it2 >> rect[ri].r.x >> rect[ri].r.y >>
rect[ri].r.width >> rect[ri].r.height >> rect[ri].weight;
}
tilted = (int)node[CC_TILTED] != 0;
return true;
}
@@ -427,7 +427,7 @@ bool HaarEvaluator::read(const FileNode& node)
featuresPtr = &(*features)[0];
FileNodeIterator it = node.begin(), it_end = node.end();
hasTiltedFeatures = false;
for(int i = 0; it != it_end; ++it, i++)
{
if(!featuresPtr[i].read(*it))
@@ -437,7 +437,7 @@ bool HaarEvaluator::read(const FileNode& node)
}
return true;
}
Ptr<FeatureEvaluator> HaarEvaluator::clone() const
{
HaarEvaluator* ret = new HaarEvaluator;
@@ -451,7 +451,7 @@ Ptr<FeatureEvaluator> HaarEvaluator::clone() const
memcpy( ret->p, p, 4*sizeof(p[0]) );
memcpy( ret->pq, pq, 4*sizeof(pq[0]) );
ret->offset = offset;
ret->varianceNormFactor = varianceNormFactor;
ret->varianceNormFactor = varianceNormFactor;
return ret;
}
@@ -460,10 +460,10 @@ bool HaarEvaluator::setImage( const Mat &image, Size _origWinSize )
int rn = image.rows+1, cn = image.cols+1;
origWinSize = _origWinSize;
normrect = Rect(1, 1, origWinSize.width-2, origWinSize.height-2);
if (image.cols < origWinSize.width || image.rows < origWinSize.height)
return false;
if( sum0.rows < rn || sum0.cols < cn )
{
sum0.create(rn, cn, CV_32S);
@@ -485,10 +485,10 @@ bool HaarEvaluator::setImage( const Mat &image, Size _origWinSize )
const double* sqdata = (const double*)sqsum.data;
size_t sumStep = sum.step/sizeof(sdata[0]);
size_t sqsumStep = sqsum.step/sizeof(sqdata[0]);
CV_SUM_PTRS( p[0], p[1], p[2], p[3], sdata, normrect, sumStep );
CV_SUM_PTRS( pq[0], pq[1], pq[2], pq[3], sqdata, normrect, sqsumStep );
size_t fi, nfeatures = features->size();
for( fi = 0; fi < nfeatures; fi++ )
@@ -568,19 +568,19 @@ bool LBPEvaluator::setImage( const Mat& image, Size _origWinSize )
if( image.cols < origWinSize.width || image.rows < origWinSize.height )
return false;
if( sum0.rows < rn || sum0.cols < cn )
sum0.create(rn, cn, CV_32S);
sum = Mat(rn, cn, CV_32S, sum0.data);
integral(image, sum);
size_t fi, nfeatures = features->size();
for( fi = 0; fi < nfeatures; fi++ )
featuresPtr[fi].updatePtrs( sum );
return true;
}
bool LBPEvaluator::setWindow( Point pt )
{
if( pt.x < 0 || pt.y < 0 ||
@@ -589,7 +589,7 @@ bool LBPEvaluator::setWindow( Point pt )
return false;
offset = pt.y * ((int)sum.step/sizeof(int)) + pt.x;
return true;
}
}
//---------------------------------------------- HOGEvaluator ---------------------------------------
bool HOGEvaluator::Feature :: read( const FileNode& node )
@@ -638,7 +638,7 @@ Ptr<FeatureEvaluator> HOGEvaluator::clone() const
ret->featuresPtr = &(*ret->features)[0];
ret->offset = offset;
ret->hist = hist;
ret->normSum = normSum;
ret->normSum = normSum;
return ret;
}
@@ -756,7 +756,7 @@ void HOGEvaluator::integralHistogram(const Mat &img, vector<Mat> &histogram, Mat
memset( histBuf, 0, histSize.width * sizeof(histBuf[0]) );
histBuf += histStep + 1;
for( y = 0; y < qangle.rows; y++ )
{
{
histBuf[-1] = 0.f;
float strSum = 0.f;
for( x = 0; x < qangle.cols; x++ )
@@ -775,7 +775,7 @@ void HOGEvaluator::integralHistogram(const Mat &img, vector<Mat> &histogram, Mat
Ptr<FeatureEvaluator> FeatureEvaluator::create( int featureType )
{
return featureType == HAAR ? Ptr<FeatureEvaluator>(new HaarEvaluator) :
featureType == LBP ? Ptr<FeatureEvaluator>(new LBPEvaluator) :
featureType == LBP ? Ptr<FeatureEvaluator>(new LBPEvaluator) :
featureType == HOG ? Ptr<FeatureEvaluator>(new HOGEvaluator) :
Ptr<FeatureEvaluator>();
}
@@ -787,13 +787,13 @@ CascadeClassifier::CascadeClassifier()
}
CascadeClassifier::CascadeClassifier(const string& filename)
{
load(filename);
{
load(filename);
}
CascadeClassifier::~CascadeClassifier()
{
}
}
bool CascadeClassifier::empty() const
{
@@ -805,57 +805,57 @@ bool CascadeClassifier::load(const string& filename)
oldCascade.release();
data = Data();
featureEvaluator.release();
FileStorage fs(filename, FileStorage::READ);
if( !fs.isOpened() )
return false;
if( read(fs.getFirstTopLevelNode()) )
return true;
fs.release();
oldCascade = Ptr<CvHaarClassifierCascade>((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
return !oldCascade.empty();
}
int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& featureEvaluator, Point pt, double& weight )
int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& evaluator, Point pt, double& weight )
{
CV_Assert( oldCascade.empty() );
assert( data.featureType == FeatureEvaluator::HAAR ||
data.featureType == FeatureEvaluator::LBP ||
data.featureType == FeatureEvaluator::HOG );
if( !featureEvaluator->setWindow(pt) )
if( !evaluator->setWindow(pt) )
return -1;
if( data.isStumpBased )
{
if( data.featureType == FeatureEvaluator::HAAR )
return predictOrderedStump<HaarEvaluator>( *this, featureEvaluator, weight );
return predictOrderedStump<HaarEvaluator>( *this, evaluator, weight );
else if( data.featureType == FeatureEvaluator::LBP )
return predictCategoricalStump<LBPEvaluator>( *this, featureEvaluator, weight );
return predictCategoricalStump<LBPEvaluator>( *this, evaluator, weight );
else if( data.featureType == FeatureEvaluator::HOG )
return predictOrderedStump<HOGEvaluator>( *this, featureEvaluator, weight );
return predictOrderedStump<HOGEvaluator>( *this, evaluator, weight );
else
return -2;
}
else
{
if( data.featureType == FeatureEvaluator::HAAR )
return predictOrdered<HaarEvaluator>( *this, featureEvaluator, weight );
return predictOrdered<HaarEvaluator>( *this, evaluator, weight );
else if( data.featureType == FeatureEvaluator::LBP )
return predictCategorical<LBPEvaluator>( *this, featureEvaluator, weight );
return predictCategorical<LBPEvaluator>( *this, evaluator, weight );
else if( data.featureType == FeatureEvaluator::HOG )
return predictOrdered<HOGEvaluator>( *this, featureEvaluator, weight );
return predictOrdered<HOGEvaluator>( *this, evaluator, weight );
else
return -2;
}
}
bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& featureEvaluator, const Mat& image )
bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& evaluator, const Mat& image )
{
return empty() ? false : featureEvaluator->setImage(image, data.origWinSize);
return empty() ? false : evaluator->setImage(image, data.origWinSize);
}
void CascadeClassifier::setMaskGenerator(Ptr<MaskGenerator> _maskGenerator)
@@ -878,7 +878,7 @@ void CascadeClassifier::setFaceDetectionMaskGenerator()
struct CascadeClassifierInvoker
{
CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor,
CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor,
ConcurrentRectVector& _vec, vector<int>& _levels, vector<double>& _weights, bool outputLevels, const Mat& _mask)
{
classifier = &_cc;
@@ -891,7 +891,7 @@ struct CascadeClassifierInvoker
levelWeights = outputLevels ? &_weights : 0;
mask=_mask;
}
void operator()(const BlockedRange& range) const
{
Ptr<FeatureEvaluator> evaluator = classifier->featureEvaluator->clone();
@@ -916,11 +916,11 @@ struct CascadeClassifierInvoker
result = -(int)classifier->data.stages.size();
if( classifier->data.stages.size() + result < 4 )
{
rectangles->push_back(Rect(cvRound(x*scalingFactor), cvRound(y*scalingFactor), winSize.width, winSize.height));
rectangles->push_back(Rect(cvRound(x*scalingFactor), cvRound(y*scalingFactor), winSize.width, winSize.height));
rejectLevels->push_back(-result);
levelWeights->push_back(gypWeight);
}
}
}
else if( result > 0 )
rectangles->push_back(Rect(cvRound(x*scalingFactor), cvRound(y*scalingFactor),
winSize.width, winSize.height));
@@ -929,7 +929,7 @@ struct CascadeClassifierInvoker
}
}
}
CascadeClassifier* classifier;
ConcurrentRectVector* rectangles;
Size processingRectSize;
@@ -939,7 +939,7 @@ struct CascadeClassifierInvoker
vector<double> *levelWeights;
Mat mask;
};
struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } };
bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
@@ -995,17 +995,17 @@ bool CascadeClassifier::setImage(const Mat& image)
return featureEvaluator->setImage(image, data.origWinSize);
}
void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
vector<int>& rejectLevels,
vector<double>& levelWeights,
double scaleFactor, int minNeighbors,
int flags, Size minObjectSize, Size maxObjectSize,
int flags, Size minObjectSize, Size maxObjectSize,
bool outputRejectLevels )
{
const double GROUP_EPS = 0.2;
CV_Assert( scaleFactor > 1 && image.depth() == CV_8U );
if( empty() )
return;
@@ -1031,7 +1031,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
if( maxObjectSize.height == 0 || maxObjectSize.width == 0 )
maxObjectSize = image.size();
Mat grayImage = image;
if( grayImage.channels() > 1 )
{
@@ -1039,7 +1039,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
cvtColor(grayImage, temp, CV_BGR2GRAY);
grayImage = temp;
}
Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U);
vector<Rect> candidates;
@@ -1050,14 +1050,14 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
Size windowSize( cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) );
Size scaledImageSize( cvRound( grayImage.cols/factor ), cvRound( grayImage.rows/factor ) );
Size processingRectSize( scaledImageSize.width - originalWindowSize.width + 1, scaledImageSize.height - originalWindowSize.height + 1 );
if( processingRectSize.width <= 0 || processingRectSize.height <= 0 )
break;
if( windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height )
break;
if( windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height )
continue;
Mat scaledImage( scaledImageSize, CV_8U, imageBuffer.data );
resize( grayImage, scaledImage, scaledImageSize, 0, 0, CV_INTER_LINEAR );
@@ -1083,12 +1083,12 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
stripSize = processingRectSize.height;
#endif
if( !detectSingleScale( scaledImage, stripCount, processingRectSize, stripSize, yStep, factor, candidates,
if( !detectSingleScale( scaledImage, stripCount, processingRectSize, stripSize, yStep, factor, candidates,
rejectLevels, levelWeights, outputRejectLevels ) )
break;
}
objects.resize(candidates.size());
std::copy(candidates.begin(), candidates.end(), objects.begin());
@@ -1108,14 +1108,14 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
{
vector<int> fakeLevels;
vector<double> fakeWeights;
detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor,
detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor,
minNeighbors, flags, minObjectSize, maxObjectSize, false );
}
}
bool CascadeClassifier::Data::read(const FileNode &root)
{
static const float THRESHOLD_EPS = 1e-5f;
// load stage params
string stageTypeStr = (string)root[CC_STAGE_TYPE];
if( stageTypeStr == CC_BOOST )
@@ -1232,11 +1232,11 @@ bool CascadeClassifier::read(const FileNode& root)
FileNode fn = root[CC_FEATURES];
if( fn.empty() )
return false;
return featureEvaluator->read(fn);
}
template<> void Ptr<CvHaarClassifierCascade>::delete_obj()
{ cvReleaseHaarClassifierCascade(&obj); }
{ cvReleaseHaarClassifierCascade(&obj); }
} // namespace cv

View File

@@ -256,14 +256,14 @@ static int decode(Sampler &sa, code &cc)
{
uchar binary[8] = {0,0,0,0,0,0,0,0};
uchar b = 0;
int i, sum;
int sum;
sum = 0;
for (i = 0; i < 64; i++)
for (int i = 0; i < 64; i++)
sum += sa.getpixel(1 + (i & 7), 1 + (i >> 3));
uchar mean = (uchar)(sum / 64);
for (i = 0; i < 64; i++) {
for (int i = 0; i < 64; i++) {
b = (b << 1) + (sa.getpixel(pickup[i].x, pickup[i].y) <= mean);
if ((i & 7) == 7) {
binary[i >> 3] = b;
@@ -275,12 +275,11 @@ static int decode(Sampler &sa, code &cc)
uchar c[5] = {0,0,0,0,0};
{
int i, j;
uchar a[5] = {228, 48, 15, 111, 62};
int k = 5;
for (i = 0; i < 3; i++) {
for (int i = 0; i < 3; i++) {
uchar t = binary[i] ^ c[4];
for (j = k - 1; j != -1; j--) {
for (int j = k - 1; j != -1; j--) {
if (t == 0)
c[j] = 0;
else
@@ -390,12 +389,12 @@ deque <CvDataMatrixCode> cvFindDataMatrix(CvMat *im)
deque <CvPoint> candidates;
{
int x, y;
int r = cxy->rows;
int c = cxy->cols;
for (y = 0; y < r; y++) {
int rows = cxy->rows;
int cols = cxy->cols;
for (y = 0; y < rows; y++) {
const short *cd = (const short*)cvPtr2D(cxy, y, 0);
const short *ccd = (const short*)cvPtr2D(ccxy, y, 0);
for (x = 0; x < c; x += 4, cd += 8, ccd += 8) {
for (x = 0; x < cols; x += 4, cd += 8, ccd += 8) {
__m128i v = _mm_loadu_si128((const __m128i*)cd);
__m128 cyxyxA = _mm_cvtepi32_ps(_mm_srai_epi32(_mm_unpacklo_epi16(v, v), 16));
__m128 cyxyxB = _mm_cvtepi32_ps(_mm_srai_epi32(_mm_unpackhi_epi16(v, v), 16));
@@ -496,7 +495,7 @@ endo: ; // end search for this o
namespace cv
{
void findDataMatrix(InputArray _image,
vector<string>& codes,
OutputArray _corners,
@@ -507,23 +506,23 @@ void findDataMatrix(InputArray _image,
deque <CvDataMatrixCode> rc = cvFindDataMatrix(&m);
int i, n = (int)rc.size();
Mat corners;
if( _corners.needed() )
{
_corners.create(n, 4, CV_32SC2);
corners = _corners.getMat();
}
if( _dmtx.needed() )
_dmtx.create(n, 1, CV_8U);
codes.resize(n);
for( i = 0; i < n; i++ )
{
CvDataMatrixCode& rc_i = rc[i];
codes[i] = string(rc_i.msg);
if( corners.data )
{
const Point* srcpt = (Point*)rc_i.corners->data.ptr;
@@ -532,7 +531,7 @@ void findDataMatrix(InputArray _image,
dstpt[k] = srcpt[k];
}
cvReleaseMat(&rc_i.corners);
if( _dmtx.needed() )
{
_dmtx.create(rc_i.original->rows, rc_i.original->cols, rc_i.original->type, i);
@@ -550,20 +549,20 @@ void drawDataMatrixCodes(InputOutputArray _image,
Mat image = _image.getMat();
Mat corners = _corners.getMat();
int i, n = corners.rows;
if( n > 0 )
{
CV_Assert( corners.depth() == CV_32S &&
corners.cols*corners.channels() == 8 &&
n == (int)codes.size() );
}
for( i = 0; i < n; i++ )
{
Scalar c(0, 255, 0);
Scalar c2(255, 0,0);
const Point* pt = (const Point*)corners.ptr(i);
for( int k = 0; k < 4; k++ )
line(image, pt[k], pt[(k+1)%4], c);
//int baseline = 0;
@@ -571,5 +570,5 @@ void drawDataMatrixCodes(InputOutputArray _image,
putText(image, codes[i], pt[0], CV_FONT_HERSHEY_SIMPLEX, 0.8, c2, 1, CV_AA, false);
}
}
}

View File

@@ -456,7 +456,6 @@ void HOGCache::init(const HOGDescriptor* _descriptor,
Size blockSize = descriptor->blockSize;
Size blockStride = descriptor->blockStride;
Size cellSize = descriptor->cellSize;
Size winSize = descriptor->winSize;
int i, j, nbins = descriptor->nbins;
int rawBlockSize = blockSize.width*blockSize.height;
@@ -471,10 +470,10 @@ void HOGCache::init(const HOGDescriptor* _descriptor,
(winSize.height/cacheStride.height)+1);
blockCache.create(cacheSize.height, cacheSize.width*blockHistogramSize);
blockCacheFlags.create(cacheSize);
size_t i, cacheRows = blockCache.rows;
size_t cacheRows = blockCache.rows;
ymaxCached.resize(cacheRows);
for( i = 0; i < cacheRows; i++ )
ymaxCached[i] = -1;
for(size_t ii = 0; ii < cacheRows; ii++ )
ymaxCached[ii] = -1;
}
Mat_<float> weights(blockSize);

View File

@@ -451,15 +451,15 @@ protected:
float strong_threshold;
};
ColorGradientPyramid::ColorGradientPyramid(const Mat& src, const Mat& mask,
float weak_threshold, size_t num_features,
float strong_threshold)
: src(src),
mask(mask),
ColorGradientPyramid::ColorGradientPyramid(const Mat& _src, const Mat& _mask,
float _weak_threshold, size_t _num_features,
float _strong_threshold)
: src(_src),
mask(_mask),
pyramid_level(0),
weak_threshold(weak_threshold),
num_features(num_features),
strong_threshold(strong_threshold)
weak_threshold(_weak_threshold),
num_features(_num_features),
strong_threshold(_strong_threshold)
{
update();
}
@@ -557,10 +557,10 @@ ColorGradient::ColorGradient()
{
}
ColorGradient::ColorGradient(float weak_threshold, size_t num_features, float strong_threshold)
: weak_threshold(weak_threshold),
num_features(num_features),
strong_threshold(strong_threshold)
ColorGradient::ColorGradient(float _weak_threshold, size_t _num_features, float _strong_threshold)
: weak_threshold(_weak_threshold),
num_features(_num_features),
strong_threshold(_strong_threshold)
{
}
@@ -751,13 +751,13 @@ protected:
int extract_threshold;
};
DepthNormalPyramid::DepthNormalPyramid(const Mat& src, const Mat& mask,
int distance_threshold, int difference_threshold, size_t num_features,
int extract_threshold)
: mask(mask),
DepthNormalPyramid::DepthNormalPyramid(const Mat& src, const Mat& _mask,
int distance_threshold, int difference_threshold, size_t _num_features,
int _extract_threshold)
: mask(_mask),
pyramid_level(0),
num_features(num_features),
extract_threshold(extract_threshold)
num_features(_num_features),
extract_threshold(_extract_threshold)
{
quantizedNormals(src, normal, distance_threshold, difference_threshold);
}
@@ -876,12 +876,12 @@ DepthNormal::DepthNormal()
{
}
DepthNormal::DepthNormal(int distance_threshold, int difference_threshold, size_t num_features,
int extract_threshold)
: distance_threshold(distance_threshold),
difference_threshold(difference_threshold),
num_features(num_features),
extract_threshold(extract_threshold)
DepthNormal::DepthNormal(int _distance_threshold, int _difference_threshold, size_t _num_features,
int _extract_threshold)
: distance_threshold(_distance_threshold),
difference_threshold(_difference_threshold),
num_features(_num_features),
extract_threshold(_extract_threshold)
{
}
@@ -1388,9 +1388,9 @@ Detector::Detector()
{
}
Detector::Detector(const std::vector< Ptr<Modality> >& modalities,
Detector::Detector(const std::vector< Ptr<Modality> >& _modalities,
const std::vector<int>& T_pyramid)
: modalities(modalities),
: modalities(_modalities),
pyramid_levels(static_cast<int>(T_pyramid.size())),
T_at_level(T_pyramid)
{
@@ -1480,7 +1480,7 @@ void Detector::match(const std::vector<Mat>& sources, float threshold, std::vect
// Used to filter out weak matches
struct MatchPredicate
{
MatchPredicate(float threshold) : threshold(threshold) {}
MatchPredicate(float _threshold) : threshold(_threshold) {}
bool operator() (const Match& m) { return m.similarity < threshold; }
float threshold;
};
@@ -1554,13 +1554,13 @@ void Detector::matchClass(const LinearMemoryPyramid& lm_pyramid,
int max_x = size.width - tp[start].width - border;
int max_y = size.height - tp[start].height - border;
std::vector<Mat> similarities(modalities.size());
Mat total_similarity;
std::vector<Mat> similarities2(modalities.size());
Mat total_similarity2;
for (int m = 0; m < (int)candidates.size(); ++m)
{
Match& match = candidates[m];
int x = match.x * 2 + 1; /// @todo Support other pyramid distance
int y = match.y * 2 + 1;
Match& match2 = candidates[m];
int x = match2.x * 2 + 1; /// @todo Support other pyramid distance
int y = match2.y * 2 + 1;
// Require 8 (reduced) row/cols to the up/left
x = std::max(x, border);
@@ -1571,22 +1571,22 @@ void Detector::matchClass(const LinearMemoryPyramid& lm_pyramid,
y = std::min(y, max_y);
// Compute local similarity maps for each modality
int num_features = 0;
int numFeatures = 0;
for (int i = 0; i < (int)modalities.size(); ++i)
{
const Template& templ = tp[start + i];
num_features += static_cast<int>(templ.features.size());
similarityLocal(lms[i], templ, similarities[i], size, T, Point(x, y));
numFeatures += static_cast<int>(templ.features.size());
similarityLocal(lms[i], templ, similarities2[i], size, T, Point(x, y));
}
addSimilarities(similarities, total_similarity);
addSimilarities(similarities2, total_similarity2);
// Find best local adjustment
int best_score = 0;
int best_r = -1, best_c = -1;
for (int r = 0; r < total_similarity.rows; ++r)
for (int r = 0; r < total_similarity2.rows; ++r)
{
ushort* row = total_similarity.ptr<ushort>(r);
for (int c = 0; c < total_similarity.cols; ++c)
ushort* row = total_similarity2.ptr<ushort>(r);
for (int c = 0; c < total_similarity2.cols; ++c)
{
int score = row[c];
if (score > best_score)
@@ -1598,9 +1598,9 @@ void Detector::matchClass(const LinearMemoryPyramid& lm_pyramid,
}
}
// Update current match
match.x = (x / T - 8 + best_c) * T + offset;
match.y = (y / T - 8 + best_r) * T + offset;
match.similarity = (best_score * 100.f) / (4 * num_features);
match2.x = (x / T - 8 + best_c) * T + offset;
match2.y = (y / T - 8 + best_r) * T + offset;
match2.similarity = (best_score * 100.f) / (4 * numFeatures);
}
// Filter out any matches that drop below the similarity threshold
@@ -1763,10 +1763,10 @@ void Detector::write(FileStorage& fs) const
tps[template_id].resize(templates_fn.size());
FileNodeIterator templ_it = templates_fn.begin(), templ_it_end = templates_fn.end();
int i = 0;
int idx = 0;
for ( ; templ_it != templ_it_end; ++templ_it)
{
tps[template_id][i++].read(*templ_it);
tps[template_id][idx++].read(*templ_it);
}
}

View File

@@ -53,12 +53,12 @@ using namespace std;
//#define TOTAL_NO_PAIR_E "totalNoPairE"
#define DETECTOR_NAMES "detector_names"
#define DETECTORS "detectors"
#define DETECTORS "detectors"
#define IMAGE_FILENAMES "image_filenames"
#define VALIDATION "validation"
#define FILENAME "fn"
#define FILENAME "fn"
#define C_SCALE_CASCADE "scale_cascade"
#define C_SCALE_CASCADE "scale_cascade"
class CV_DetectorTest : public cvtest::BaseTest
{
@@ -68,9 +68,9 @@ protected:
virtual int prepareData( FileStorage& fs );
virtual void run( int startFrom );
virtual string& getValidationFilename();
virtual void readDetector( const FileNode& fn ) = 0;
virtual void writeDetector( FileStorage& fs, int di ) = 0;
virtual void readDetector( const FileNode& fn ) = 0;
virtual void writeDetector( FileStorage& fs, int di ) = 0;
int runTestCase( int detectorIdx, vector<vector<Rect> >& objects );
virtual int detectMultiScale( int di, const Mat& img, vector<Rect>& objects ) = 0;
int validate( int detectorIdx, vector<vector<Rect> >& objects );
@@ -118,10 +118,10 @@ int CV_DetectorTest::prepareData( FileStorage& _fs )
FileNodeIterator it = fn[DETECTOR_NAMES].begin();
for( ; it != fn[DETECTOR_NAMES].end(); )
{
string name;
it >> name;
detectorNames.push_back(name);
readDetector(fn[DETECTORS][name]);
string _name;
it >> _name;
detectorNames.push_back(_name);
readDetector(fn[DETECTORS][_name]);
}
}
test_case_count = (int)detectorNames.size();
@@ -175,18 +175,18 @@ void CV_DetectorTest::run( int )
}
validationFS << "]"; // DETECTOR_NAMES
// write detectors
validationFS << DETECTORS << "{";
assert( detectorNames.size() == detectorFilenames.size() );
nit = detectorNames.begin();
for( int di = 0; di < detectorNames.size(), nit != detectorNames.end(); ++nit, di++ )
{
validationFS << *nit << "{";
writeDetector( validationFS, di );
validationFS << "}";
}
validationFS << "}";
// write detectors
validationFS << DETECTORS << "{";
assert( detectorNames.size() == detectorFilenames.size() );
nit = detectorNames.begin();
for( int di = 0; di < detectorNames.size(), nit != detectorNames.end(); ++nit, di++ )
{
validationFS << *nit << "{";
writeDetector( validationFS, di );
validationFS << "}";
}
validationFS << "}";
// write image filenames
validationFS << IMAGE_FILENAMES << "[";
vector<string>::const_iterator it = imageFilenames.begin();
@@ -252,8 +252,8 @@ int CV_DetectorTest::runTestCase( int detectorIdx, vector<vector<Rect> >& object
return cvtest::TS::FAIL_INVALID_TEST_DATA;
}
int code = detectMultiScale( detectorIdx, image, imgObjects );
if( code != cvtest::TS::OK )
return code;
if( code != cvtest::TS::OK )
return code;
objects.push_back( imgObjects );
@@ -300,17 +300,17 @@ int CV_DetectorTest::validate( int detectorIdx, vector<vector<Rect> >& objects )
vector<Rect> valRects;
if( node.node->data.seq != 0 )
{
for( FileNodeIterator it = node.begin(); it != node.end(); )
for( FileNodeIterator it2 = node.begin(); it2 != node.end(); )
{
Rect r;
it >> r.x >> r.y >> r.width >> r.height;
it2 >> r.x >> r.y >> r.width >> r.height;
valRects.push_back(r);
}
}
totalValRectCount += (int)valRects.size();
// compare rectangles
vector<uchar> map(valRects.size(), 0);
vector<uchar> map(valRects.size(), 0);
for( vector<Rect>::const_iterator cr = it->begin();
cr != it->end(); ++cr )
{
@@ -337,10 +337,10 @@ int CV_DetectorTest::validate( int detectorIdx, vector<vector<Rect> >& objects )
{
Rect vr = valRects[minIdx];
if( map[minIdx] != 0 || (minDist > dist) || (abs(cr->width - vr.width) > wDiff) ||
(abs(cr->height - vr.height) > hDiff) )
(abs(cr->height - vr.height) > hDiff) )
noPair++;
else
map[minIdx] = 1;
else
map[minIdx] = 1;
}
}
noPair += (int)count_if( map.begin(), map.end(), isZero );
@@ -371,10 +371,10 @@ class CV_CascadeDetectorTest : public CV_DetectorTest
public:
CV_CascadeDetectorTest();
protected:
virtual void readDetector( const FileNode& fn );
virtual void writeDetector( FileStorage& fs, int di );
virtual void readDetector( const FileNode& fn );
virtual void writeDetector( FileStorage& fs, int di );
virtual int detectMultiScale( int di, const Mat& img, vector<Rect>& objects );
vector<int> flags;
vector<int> flags;
};
CV_CascadeDetectorTest::CV_CascadeDetectorTest()
@@ -384,40 +384,40 @@ CV_CascadeDetectorTest::CV_CascadeDetectorTest()
void CV_CascadeDetectorTest::readDetector( const FileNode& fn )
{
string filename;
int flag;
fn[FILENAME] >> filename;
detectorFilenames.push_back(filename);
fn[C_SCALE_CASCADE] >> flag;
if( flag )
flags.push_back( 0 );
else
flags.push_back( CV_HAAR_SCALE_IMAGE );
string filename;
int flag;
fn[FILENAME] >> filename;
detectorFilenames.push_back(filename);
fn[C_SCALE_CASCADE] >> flag;
if( flag )
flags.push_back( 0 );
else
flags.push_back( CV_HAAR_SCALE_IMAGE );
}
void CV_CascadeDetectorTest::writeDetector( FileStorage& fs, int di )
{
int sc = flags[di] & CV_HAAR_SCALE_IMAGE ? 0 : 1;
fs << FILENAME << detectorFilenames[di];
fs << C_SCALE_CASCADE << sc;
int sc = flags[di] & CV_HAAR_SCALE_IMAGE ? 0 : 1;
fs << FILENAME << detectorFilenames[di];
fs << C_SCALE_CASCADE << sc;
}
int CV_CascadeDetectorTest::detectMultiScale( int di, const Mat& img,
vector<Rect>& objects)
{
string dataPath = ts->get_data_path(), filename;
filename = dataPath + detectorFilenames[di];
string dataPath = ts->get_data_path(), filename;
filename = dataPath + detectorFilenames[di];
CascadeClassifier cascade( filename );
if( cascade.empty() )
{
ts->printf( cvtest::TS::LOG, "cascade %s can not be opened");
return cvtest::TS::FAIL_INVALID_TEST_DATA;
}
if( cascade.empty() )
{
ts->printf( cvtest::TS::LOG, "cascade %s can not be opened");
return cvtest::TS::FAIL_INVALID_TEST_DATA;
}
Mat grayImg;
cvtColor( img, grayImg, CV_BGR2GRAY );
equalizeHist( grayImg, grayImg );
cascade.detectMultiScale( grayImg, objects, 1.1, 3, flags[di] );
return cvtest::TS::OK;
return cvtest::TS::OK;
}
//----------------------------------------------- HOGDetectorTest -----------------------------------
@@ -426,8 +426,8 @@ class CV_HOGDetectorTest : public CV_DetectorTest
public:
CV_HOGDetectorTest();
protected:
virtual void readDetector( const FileNode& fn );
virtual void writeDetector( FileStorage& fs, int di );
virtual void readDetector( const FileNode& fn );
virtual void writeDetector( FileStorage& fs, int di );
virtual int detectMultiScale( int di, const Mat& img, vector<Rect>& objects );
};
@@ -438,15 +438,15 @@ CV_HOGDetectorTest::CV_HOGDetectorTest()
void CV_HOGDetectorTest::readDetector( const FileNode& fn )
{
string filename;
if( fn[FILENAME].node->data.seq != 0 )
fn[FILENAME] >> filename;
detectorFilenames.push_back( filename);
string filename;
if( fn[FILENAME].node->data.seq != 0 )
fn[FILENAME] >> filename;
detectorFilenames.push_back( filename);
}
void CV_HOGDetectorTest::writeDetector( FileStorage& fs, int di )
{
fs << FILENAME << detectorFilenames[di];
fs << FILENAME << detectorFilenames[di];
}
int CV_HOGDetectorTest::detectMultiScale( int di, const Mat& img,
@@ -458,7 +458,7 @@ int CV_HOGDetectorTest::detectMultiScale( int di, const Mat& img,
else
assert(0);
hog.detectMultiScale(img, objects);
return cvtest::TS::OK;
return cvtest::TS::OK;
}
TEST(Objdetect_CascadeDetector, regression) { CV_CascadeDetectorTest test; test.safe_run(); }