Remove all using directives for STL namespace and members

Made all STL usages explicit to be able automatically find all usages of
particular class or function.
This commit is contained in:
Andrey Kamaev
2013-02-24 20:14:01 +04:00
parent f783f34e0b
commit 2a6fb2867e
310 changed files with 5744 additions and 5964 deletions

View File

@@ -132,7 +132,7 @@ public:
};
void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights)
void groupRectangles(std::vector<Rect>& rectList, int groupThreshold, double eps, std::vector<int>* weights, std::vector<double>* levelWeights)
{
if( groupThreshold <= 0 || rectList.empty() )
{
@@ -146,13 +146,13 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
return;
}
vector<int> labels;
std::vector<int> labels;
int nclasses = partition(rectList, labels, SimilarRects(eps));
vector<Rect> rrects(nclasses);
vector<int> rweights(nclasses, 0);
vector<int> rejectLevels(nclasses, 0);
vector<double> rejectWeights(nclasses, DBL_MIN);
std::vector<Rect> rrects(nclasses);
std::vector<int> rweights(nclasses, 0);
std::vector<int> rejectLevels(nclasses, 0);
std::vector<double> rejectWeights(nclasses, DBL_MIN);
int i, j, nlabels = (int)labels.size();
for( i = 0; i < nlabels; i++ )
{
@@ -236,8 +236,8 @@ void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vec
class MeanshiftGrouping
{
public:
MeanshiftGrouping(const Point3d& densKer, const vector<Point3d>& posV,
const vector<double>& wV, double eps, int maxIter = 20)
MeanshiftGrouping(const Point3d& densKer, const std::vector<Point3d>& posV,
const std::vector<double>& wV, double eps, int maxIter = 20)
{
densityKernel = densKer;
weightsV = wV;
@@ -256,7 +256,7 @@ public:
}
}
void getModes(vector<Point3d>& modesV, vector<double>& resWeightsV, const double eps)
void getModes(std::vector<Point3d>& modesV, std::vector<double>& resWeightsV, const double eps)
{
for (size_t i=0; i <distanceV.size(); i++)
{
@@ -284,14 +284,14 @@ public:
}
protected:
vector<Point3d> positionsV;
vector<double> weightsV;
std::vector<Point3d> positionsV;
std::vector<double> weightsV;
Point3d densityKernel;
int positionsCount;
vector<Point3d> meanshiftV;
vector<Point3d> distanceV;
std::vector<Point3d> meanshiftV;
std::vector<Point3d> distanceV;
int iterMax;
double modeEps;
@@ -305,8 +305,8 @@ protected:
Point3d bPt = inPt;
Point3d sPt = densityKernel;
sPt.x *= exp(aPt.z);
sPt.y *= exp(aPt.z);
sPt.x *= std::exp(aPt.z);
sPt.y *= std::exp(aPt.z);
aPt.x /= sPt.x;
aPt.y /= sPt.y;
@@ -338,8 +338,8 @@ protected:
Point3d aPt = positionsV[i];
Point3d sPt = densityKernel;
sPt.x *= exp(aPt.z);
sPt.y *= exp(aPt.z);
sPt.x *= std::exp(aPt.z);
sPt.y *= std::exp(aPt.z);
aPt -= inPt;
@@ -370,8 +370,8 @@ protected:
double getDistance(Point3d p1, Point3d p2) const
{
Point3d ns = densityKernel;
ns.x *= exp(p2.z);
ns.y *= exp(p2.z);
ns.x *= std::exp(p2.z);
ns.y *= std::exp(p2.z);
p2 -= p1;
p2.x /= ns.x;
p2.y /= ns.y;
@@ -380,12 +380,12 @@ protected:
}
};
//new grouping function with using meanshift
static void groupRectangles_meanshift(vector<Rect>& rectList, double detectThreshold, vector<double>* foundWeights,
vector<double>& scales, Size winDetSize)
static void groupRectangles_meanshift(std::vector<Rect>& rectList, double detectThreshold, std::vector<double>* foundWeights,
std::vector<double>& scales, Size winDetSize)
{
int detectionCount = (int)rectList.size();
vector<Point3d> hits(detectionCount), resultHits;
vector<double> hitWeights(detectionCount), resultWeights;
std::vector<Point3d> hits(detectionCount), resultHits;
std::vector<double> hitWeights(detectionCount), resultWeights;
Point2d hitCenter;
for (int i=0; i < detectionCount; i++)
@@ -409,7 +409,7 @@ static void groupRectangles_meanshift(vector<Rect>& rectList, double detectThres
for (unsigned i=0; i < resultHits.size(); ++i)
{
double scale = exp(resultHits[i].z);
double scale = std::exp(resultHits[i].z);
hitCenter.x = resultHits[i].x;
hitCenter.y = resultHits[i].y;
Size s( int(winDetSize.width * scale), int(winDetSize.height * scale) );
@@ -424,23 +424,23 @@ static void groupRectangles_meanshift(vector<Rect>& rectList, double detectThres
}
}
void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps)
void groupRectangles(std::vector<Rect>& rectList, int groupThreshold, double eps)
{
groupRectangles(rectList, groupThreshold, eps, 0, 0);
}
void groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThreshold, double eps)
void groupRectangles(std::vector<Rect>& rectList, std::vector<int>& weights, int groupThreshold, double eps)
{
groupRectangles(rectList, groupThreshold, eps, &weights, 0);
}
//used for cascade detection algorithm for ROC-curve calculating
void groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps)
void groupRectangles(std::vector<Rect>& rectList, std::vector<int>& rejectLevels, std::vector<double>& levelWeights, int groupThreshold, double eps)
{
groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights);
}
//can be used for HOG detection algorithm only
void groupRectangles_meanshift(vector<Rect>& rectList, vector<double>& foundWeights,
vector<double>& foundScales, double detectThreshold, Size winDetSize)
void groupRectangles_meanshift(std::vector<Rect>& rectList, std::vector<double>& foundWeights,
std::vector<double>& foundScales, double detectThreshold, Size winDetSize)
{
groupRectangles_meanshift(rectList, detectThreshold, &foundWeights, foundScales, winDetSize);
}
@@ -483,7 +483,7 @@ bool HaarEvaluator::Feature :: read( const FileNode& node )
HaarEvaluator::HaarEvaluator()
{
features = new vector<Feature>();
features = new std::vector<Feature>();
}
HaarEvaluator::~HaarEvaluator()
{
@@ -578,7 +578,7 @@ bool HaarEvaluator::setWindow( Point pt )
double nf = (double)normrect.area() * valsqsum - (double)valsum * valsum;
if( nf > 0. )
nf = sqrt(nf);
nf = std::sqrt(nf);
else
nf = 1.;
varianceNormFactor = 1./nf;
@@ -598,7 +598,7 @@ bool LBPEvaluator::Feature :: read(const FileNode& node )
LBPEvaluator::LBPEvaluator()
{
features = new vector<Feature>();
features = new std::vector<Feature>();
}
LBPEvaluator::~LBPEvaluator()
{
@@ -678,7 +678,7 @@ bool HOGEvaluator::Feature :: read( const FileNode& node )
HOGEvaluator::HOGEvaluator()
{
features = new vector<Feature>();
features = new std::vector<Feature>();
}
HOGEvaluator::~HOGEvaluator()
@@ -745,7 +745,7 @@ bool HOGEvaluator::setWindow(Point pt)
return true;
}
void HOGEvaluator::integralHistogram(const Mat &img, vector<Mat> &histogram, Mat &norm, int nbins) const
void HOGEvaluator::integralHistogram(const Mat &img, std::vector<Mat> &histogram, Mat &norm, int nbins) const
{
CV_Assert( img.type() == CV_8U || img.type() == CV_8UC3 );
int x, y, binIdx;
@@ -854,7 +854,7 @@ CascadeClassifier::CascadeClassifier()
{
}
CascadeClassifier::CascadeClassifier(const string& filename)
CascadeClassifier::CascadeClassifier(const std::string& filename)
{
load(filename);
}
@@ -868,7 +868,7 @@ bool CascadeClassifier::empty() const
return oldCascade.empty() && data.stages.empty();
}
bool CascadeClassifier::load(const string& filename)
bool CascadeClassifier::load(const std::string& filename)
{
oldCascade.release();
data = Data();
@@ -948,7 +948,7 @@ class CascadeClassifierInvoker : public ParallelLoopBody
{
public:
CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor,
vector<Rect>& _vec, vector<int>& _levels, vector<double>& _weights, bool outputLevels, const Mat& _mask, Mutex* _mtx)
std::vector<Rect>& _vec, std::vector<int>& _levels, std::vector<double>& _weights, bool outputLevels, const Mat& _mask, Mutex* _mtx)
{
classifier = &_cc;
processingRectSize = _sz1;
@@ -969,7 +969,7 @@ public:
Size winSize(cvRound(classifier->data.origWinSize.width * scalingFactor), cvRound(classifier->data.origWinSize.height * scalingFactor));
int y1 = range.start * stripSize;
int y2 = min(range.end * stripSize, processingRectSize.height);
int y2 = std::min(range.end * stripSize, processingRectSize.height);
for( int y = y1; y < y2; y += yStep )
{
for( int x = 0; x < processingRectSize.width; x += yStep )
@@ -1012,12 +1012,12 @@ public:
}
CascadeClassifier* classifier;
vector<Rect>* rectangles;
std::vector<Rect>* rectangles;
Size processingRectSize;
int stripSize, yStep;
double scalingFactor;
vector<int> *rejectLevels;
vector<double> *levelWeights;
std::vector<int> *rejectLevels;
std::vector<double> *levelWeights;
Mat mask;
Mutex* mtx;
};
@@ -1026,8 +1026,8 @@ struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } }
bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
int stripSize, int yStep, double factor, vector<Rect>& candidates,
vector<int>& levels, vector<double>& weights, bool outputRejectLevels )
int stripSize, int yStep, double factor, std::vector<Rect>& candidates,
std::vector<int>& levels, std::vector<double>& weights, bool outputRejectLevels )
{
if( !featureEvaluator->setImage( image, data.origWinSize ) )
return false;
@@ -1041,9 +1041,9 @@ bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Siz
currentMask=maskGenerator->generateMask(image);
}
vector<Rect> candidatesVector;
vector<int> rejectLevels;
vector<double> levelWeights;
std::vector<Rect> candidatesVector;
std::vector<int> rejectLevels;
std::vector<double> levelWeights;
Mutex mtx;
if( outputRejectLevels )
{
@@ -1087,9 +1087,9 @@ bool CascadeClassifier::setImage(const Mat& image)
return featureEvaluator->setImage(image, data.origWinSize);
}
void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
vector<int>& rejectLevels,
vector<double>& levelWeights,
void CascadeClassifier::detectMultiScale( const Mat& image, std::vector<Rect>& objects,
std::vector<int>& rejectLevels,
std::vector<double>& levelWeights,
double scaleFactor, int minNeighbors,
int flags, Size minObjectSize, Size maxObjectSize,
bool outputRejectLevels )
@@ -1107,7 +1107,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
CvMat _image = image;
CvSeq* _objects = cvHaarDetectObjectsForROC( &_image, oldCascade, storage, rejectLevels, levelWeights, scaleFactor,
minNeighbors, flags, minObjectSize, maxObjectSize, outputRejectLevels );
vector<CvAvgComp> vecAvgComp;
std::vector<CvAvgComp> vecAvgComp;
Seq<CvAvgComp>(_objects).copyTo(vecAvgComp);
objects.resize(vecAvgComp.size());
std::transform(vecAvgComp.begin(), vecAvgComp.end(), objects.begin(), getRect());
@@ -1133,7 +1133,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
}
Mat imageBuffer(image.rows + 1, image.cols + 1, CV_8U);
vector<Rect> candidates;
std::vector<Rect> candidates;
for( double factor = 1; ; factor *= scaleFactor )
{
@@ -1194,12 +1194,12 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
}
}
void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
void CascadeClassifier::detectMultiScale( const Mat& image, std::vector<Rect>& objects,
double scaleFactor, int minNeighbors,
int flags, Size minObjectSize, Size maxObjectSize)
{
vector<int> fakeLevels;
vector<double> fakeWeights;
std::vector<int> fakeLevels;
std::vector<double> fakeWeights;
detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor,
minNeighbors, flags, minObjectSize, maxObjectSize, false );
}
@@ -1209,13 +1209,13 @@ bool CascadeClassifier::Data::read(const FileNode &root)
static const float THRESHOLD_EPS = 1e-5f;
// load stage params
string stageTypeStr = (string)root[CC_STAGE_TYPE];
std::string stageTypeStr = (std::string)root[CC_STAGE_TYPE];
if( stageTypeStr == CC_BOOST )
stageType = BOOST;
else
return false;
string featureTypeStr = (string)root[CC_FEATURE_TYPE];
std::string featureTypeStr = (std::string)root[CC_FEATURE_TYPE];
if( featureTypeStr == CC_HAAR )
featureType = FeatureEvaluator::HAAR;
else if( featureTypeStr == CC_LBP )

View File

@@ -103,7 +103,7 @@ public:
protected:
Size origWinSize;
Ptr<vector<Feature> > features;
Ptr<std::vector<Feature> > features;
Feature* featuresPtr; // optimization
bool hasTiltedFeatures;
@@ -194,7 +194,7 @@ public:
{ return (*this)(featureIdx); }
protected:
Size origWinSize;
Ptr<vector<Feature> > features;
Ptr<std::vector<Feature> > features;
Feature* featuresPtr; // optimization
Mat sum0, sum;
Rect normrect;
@@ -247,7 +247,7 @@ public:
{
Feature();
float calc( int offset ) const;
void updatePtrs( const vector<Mat>& _hist, const Mat &_normSum );
void updatePtrs( const std::vector<Mat>& _hist, const Mat &_normSum );
bool read( const FileNode& node );
enum { CELL_NUM = 4, BIN_NUM = 9 };
@@ -274,12 +274,12 @@ public:
}
private:
virtual void integralHistogram( const Mat& srcImage, vector<Mat> &histogram, Mat &norm, int nbins ) const;
virtual void integralHistogram( const Mat& srcImage, std::vector<Mat> &histogram, Mat &norm, int nbins ) const;
Size origWinSize;
Ptr<vector<Feature> > features;
Ptr<std::vector<Feature> > features;
Feature* featuresPtr;
vector<Mat> hist;
std::vector<Mat> hist;
Mat normSum;
int offset;
};
@@ -300,7 +300,7 @@ inline float HOGEvaluator::Feature :: calc( int _offset ) const
return res;
}
inline void HOGEvaluator::Feature :: updatePtrs( const vector<Mat> &_hist, const Mat &_normSum )
inline void HOGEvaluator::Feature :: updatePtrs( const std::vector<Mat> &_hist, const Mat &_normSum )
{
int binIdx = featComponent % BIN_NUM;
int cellIdx = featComponent / BIN_NUM;

View File

@@ -1,17 +1,8 @@
#include "precomp.hpp"
#if CV_SSE2
#include <xmmintrin.h>
#endif
#include <deque>
#include <algorithm>
using namespace std;
#undef NDEBUG
#include <assert.h>
class Sampler {
public:
CvMat *im;
@@ -310,7 +301,7 @@ static int decode(Sampler &sa, code &cc)
}
}
static deque<CvPoint> trailto(CvMat *v, int x, int y, CvMat *terminal)
static std::deque<CvPoint> trailto(CvMat *v, int x, int y, CvMat *terminal)
{
CvPoint np;
/* Return the last 10th of the trail of points following v from (x,y)
@@ -319,7 +310,7 @@ static deque<CvPoint> trailto(CvMat *v, int x, int y, CvMat *terminal)
int ex = x + ((short*)cvPtr2D(terminal, y, x))[0];
int ey = y + ((short*)cvPtr2D(terminal, y, x))[1];
deque<CvPoint> r;
std::deque<CvPoint> r;
while ((x != ex) || (y != ey)) {
np.x = x;
np.y = y;
@@ -338,7 +329,7 @@ static deque<CvPoint> trailto(CvMat *v, int x, int y, CvMat *terminal)
}
#endif
deque <CvDataMatrixCode> cvFindDataMatrix(CvMat *im)
std::deque <CvDataMatrixCode> cvFindDataMatrix(CvMat *im)
{
#if CV_SSE2
int r = im->rows;
@@ -386,7 +377,7 @@ deque <CvDataMatrixCode> cvFindDataMatrix(CvMat *im)
cfollow(vc, cxy);
cfollow(vcc, ccxy);
deque <CvPoint> candidates;
std::deque <CvPoint> candidates;
{
int x, y;
int rows = cxy->rows;
@@ -437,13 +428,13 @@ deque <CvDataMatrixCode> cvFindDataMatrix(CvMat *im)
}
}
deque <code> codes;
std::deque <code> codes;
size_t i, j, k;
while (!candidates.empty()) {
CvPoint o = candidates.front();
candidates.pop_front();
deque<CvPoint> ptc = trailto(vc, o.x, o.y, cxy);
deque<CvPoint> ptcc = trailto(vcc, o.x, o.y, ccxy);
std::deque<CvPoint> ptc = trailto(vc, o.x, o.y, cxy);
std::deque<CvPoint> ptcc = trailto(vcc, o.x, o.y, ccxy);
for (j = 0; j < ptc.size(); j++) {
for (k = 0; k < ptcc.size(); k++) {
code cc;
@@ -476,7 +467,7 @@ endo: ; // end search for this o
cvReleaseMat(&cxy);
cvReleaseMat(&ccxy);
deque <CvDataMatrixCode> rc;
std::deque <CvDataMatrixCode> rc;
for (i = 0; i < codes.size(); i++) {
CvDataMatrixCode cc;
strcpy(cc.msg, codes[i].msg);
@@ -487,7 +478,7 @@ endo: ; // end search for this o
return rc;
#else
(void)im;
deque <CvDataMatrixCode> rc;
std::deque <CvDataMatrixCode> rc;
return rc;
#endif
}
@@ -498,13 +489,13 @@ namespace cv
{
void findDataMatrix(InputArray _image,
vector<string>& codes,
std::vector<std::string>& codes,
OutputArray _corners,
OutputArrayOfArrays _dmtx)
{
Mat image = _image.getMat();
CvMat m(image);
deque <CvDataMatrixCode> rc = cvFindDataMatrix(&m);
std::deque <CvDataMatrixCode> rc = cvFindDataMatrix(&m);
int i, n = (int)rc.size();
Mat corners;
@@ -522,7 +513,7 @@ void findDataMatrix(InputArray _image,
for( i = 0; i < n; i++ )
{
CvDataMatrixCode& rc_i = rc[i];
codes[i] = string(rc_i.msg);
codes[i] = std::string(rc_i.msg);
if( corners.data )
{
@@ -544,7 +535,7 @@ void findDataMatrix(InputArray _image,
}
void drawDataMatrixCodes(InputOutputArray _image,
const vector<string>& codes,
const std::vector<std::string>& codes,
InputArray _corners)
{
Mat image = _image.getMat();

View File

@@ -854,7 +854,7 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
cascade->pq2[pq_offset] + cascade->pq3[pq_offset];
variance_norm_factor = variance_norm_factor*cascade->inv_window_area - mean*mean;
if( variance_norm_factor >= 0. )
variance_norm_factor = sqrt(variance_norm_factor);
variance_norm_factor = std::sqrt(variance_norm_factor);
else
variance_norm_factor = 1.;
@@ -1305,7 +1305,7 @@ public:
{
Size winSize0 = cascade->orig_window_size;
Size winSize(cvRound(winSize0.width*factor), cvRound(winSize0.height*factor));
int y1 = range.start*stripSize, y2 = min(range.end*stripSize, sum1.rows - 1 - winSize0.height);
int y1 = range.start*stripSize, y2 = std::min(range.end*stripSize, sum1.rows - 1 - winSize0.height);
if (y2 <= y1 || sum1.cols <= 1 + winSize0.width)
return;

View File

@@ -120,7 +120,7 @@ bool HOGDescriptor::read(FileNode& obj)
return true;
}
void HOGDescriptor::write(FileStorage& fs, const String& objName) const
void HOGDescriptor::write(FileStorage& fs, const std::string& objName) const
{
if( !objName.empty() )
fs << objName;
@@ -142,14 +142,14 @@ void HOGDescriptor::write(FileStorage& fs, const String& objName) const
fs << "}";
}
bool HOGDescriptor::load(const String& filename, const String& objname)
bool HOGDescriptor::load(const std::string& filename, const std::string& objname)
{
FileStorage fs(filename, FileStorage::READ);
FileNode obj = !objname.empty() ? fs[objname] : fs.getFirstTopLevelNode();
return read(obj);
}
void HOGDescriptor::save(const String& filename, const String& objName) const
void HOGDescriptor::save(const std::string& filename, const std::string& objName) const
{
FileStorage fs(filename, FileStorage::WRITE);
write(fs, !objName.empty() ? objName : FileStorage::getDefaultObjectName(filename));
@@ -409,11 +409,11 @@ struct HOGCache
const float* getBlock(Point pt, float* buf);
virtual void normalizeBlockHistogram(float* histogram) const;
vector<PixData> pixData;
vector<BlockData> blockData;
std::vector<PixData> pixData;
std::vector<BlockData> blockData;
bool useCache;
vector<int> ymaxCached;
std::vector<int> ymaxCached;
Size winSize, cacheStride;
Size nblocks, ncells;
int blockHistogramSize;
@@ -791,9 +791,9 @@ Rect HOGCache::getWindow(Size imageSize, Size winStride, int idx) const
}
void HOGDescriptor::compute(const Mat& img, vector<float>& descriptors,
void HOGDescriptor::compute(const Mat& img, std::vector<float>& descriptors,
Size winStride, Size padding,
const vector<Point>& locations) const
const std::vector<Point>& locations) const
{
if( winStride == Size() )
winStride = cellSize;
@@ -854,8 +854,8 @@ void HOGDescriptor::compute(const Mat& img, vector<float>& descriptors,
void HOGDescriptor::detect(const Mat& img,
vector<Point>& hits, vector<double>& weights, double hitThreshold,
Size winStride, Size padding, const vector<Point>& locations) const
std::vector<Point>& hits, std::vector<double>& weights, double hitThreshold,
Size winStride, Size padding, const std::vector<Point>& locations) const
{
hits.clear();
if( svmDetector.empty() )
@@ -882,7 +882,7 @@ void HOGDescriptor::detect(const Mat& img,
size_t dsize = getDescriptorSize();
double rho = svmDetector.size() > dsize ? svmDetector[dsize] : 0;
vector<float> blockHist(blockHistogramSize);
std::vector<float> blockHist(blockHistogramSize);
for( size_t i = 0; i < nwindows; i++ )
{
@@ -932,10 +932,10 @@ void HOGDescriptor::detect(const Mat& img,
}
}
void HOGDescriptor::detect(const Mat& img, vector<Point>& hits, double hitThreshold,
Size winStride, Size padding, const vector<Point>& locations) const
void HOGDescriptor::detect(const Mat& img, std::vector<Point>& hits, double hitThreshold,
Size winStride, Size padding, const std::vector<Point>& locations) const
{
vector<double> weightsV;
std::vector<double> weightsV;
detect(img, hits, weightsV, hitThreshold, winStride, padding, locations);
}
@@ -965,8 +965,8 @@ public:
double minScale = i1 > 0 ? levelScale[i1] : i2 > 1 ? levelScale[i1+1] : std::max(img.cols, img.rows);
Size maxSz(cvCeil(img.cols/minScale), cvCeil(img.rows/minScale));
Mat smallerImgBuf(maxSz, img.type());
vector<Point> locations;
vector<double> hitsWeights;
std::vector<Point> locations;
std::vector<double> hitsWeights;
for( i = i1; i < i2; i++ )
{
@@ -1019,14 +1019,14 @@ public:
void HOGDescriptor::detectMultiScale(
const Mat& img, vector<Rect>& foundLocations, vector<double>& foundWeights,
const Mat& img, std::vector<Rect>& foundLocations, std::vector<double>& foundWeights,
double hitThreshold, Size winStride, Size padding,
double scale0, double finalThreshold, bool useMeanshiftGrouping) const
{
double scale = 1.;
int levels = 0;
vector<double> levelScale;
std::vector<double> levelScale;
for( levels = 0; levels < nlevels; levels++ )
{
levelScale.push_back(scale);
@@ -1064,11 +1064,11 @@ void HOGDescriptor::detectMultiScale(
}
}
void HOGDescriptor::detectMultiScale(const Mat& img, vector<Rect>& foundLocations,
void HOGDescriptor::detectMultiScale(const Mat& img, std::vector<Rect>& foundLocations,
double hitThreshold, Size winStride, Size padding,
double scale0, double finalThreshold, bool useMeanshiftGrouping) const
{
vector<double> foundWeights;
std::vector<double> foundWeights;
detectMultiScale(img, foundLocations, foundWeights, hitThreshold, winStride,
padding, scale0, finalThreshold, useMeanshiftGrouping);
}
@@ -1078,7 +1078,7 @@ typedef RTTIImpl<HOGDescriptor> HOGRTTI;
CvType hog_type( CV_TYPE_NAME_HOG_DESCRIPTOR, HOGRTTI::isInstance,
HOGRTTI::release, HOGRTTI::read, HOGRTTI::write, HOGRTTI::clone);
vector<float> HOGDescriptor::getDefaultPeopleDetector()
std::vector<float> HOGDescriptor::getDefaultPeopleDetector()
{
static const float detector[] = {
0.05359386f, -0.14721455f, -0.05532170f, 0.05077307f,
@@ -1886,11 +1886,11 @@ vector<float> HOGDescriptor::getDefaultPeopleDetector()
-0.01612278f, -1.46097376e-003f, 0.14013411f, -8.96181818e-003f,
-0.03250246f, 3.38630192e-003f, 2.64779478e-003f, 0.03359732f,
-0.02411991f, -0.04229729f, 0.10666174f, -6.66579151f };
return vector<float>(detector, detector + sizeof(detector)/sizeof(detector[0]));
return std::vector<float>(detector, detector + sizeof(detector)/sizeof(detector[0]));
}
//This function renurn 1981 SVM coeffs obtained from daimler's base.
//To use these coeffs the detection window size should be (48,96)
vector<float> HOGDescriptor::getDaimlerPeopleDetector()
std::vector<float> HOGDescriptor::getDaimlerPeopleDetector()
{
static const float detector[] = {
0.294350f, -0.098796f, -0.129522f, 0.078753f,
@@ -2389,7 +2389,7 @@ vector<float> HOGDescriptor::getDaimlerPeopleDetector()
-0.025054f, -0.093026f, -0.035372f, -0.233209f,
-0.049869f, -0.039151f, -0.022279f, -0.065380f,
-9.063785f};
return vector<float>(detector, detector + sizeof(detector)/sizeof(detector[0]));
return std::vector<float>(detector, detector + sizeof(detector)/sizeof(detector[0]));
}
class HOGConfInvoker : public ParallelLoopBody
@@ -2415,7 +2415,7 @@ public:
Size maxSz(cvCeil(img.cols/(*locations)[0].scale), cvCeil(img.rows/(*locations)[0].scale));
Mat smallerImgBuf(maxSz, img.type());
vector<Point> dets;
std::vector<Point> dets;
for( i = i1; i < i2; i++ )
{
@@ -2451,7 +2451,7 @@ public:
Mutex* mtx;
};
void HOGDescriptor::detectROI(const cv::Mat& img, const vector<cv::Point> &locations,
void HOGDescriptor::detectROI(const cv::Mat& img, const std::vector<cv::Point> &locations,
CV_OUT std::vector<cv::Point>& foundLocations, CV_OUT std::vector<double>& confidences,
double hitThreshold, cv::Size winStride,
cv::Size padding) const
@@ -2489,7 +2489,7 @@ void HOGDescriptor::detectROI(const cv::Mat& img, const vector<cv::Point> &locat
size_t dsize = getDescriptorSize();
double rho = svmDetector.size() > dsize ? svmDetector[dsize] : 0;
vector<float> blockHist(blockHistogramSize);
std::vector<float> blockHist(blockHistogramSize);
for( size_t i = 0; i < nwindows; i++ )
{

View File

@@ -158,7 +158,7 @@ LatentSvmDetector::ObjectDetection::ObjectDetection( const Rect& _rect, float _s
LatentSvmDetector::LatentSvmDetector()
{}
LatentSvmDetector::LatentSvmDetector( const vector<string>& filenames, const vector<string>& _classNames )
LatentSvmDetector::LatentSvmDetector( const std::vector<std::string>& filenames, const std::vector<std::string>& _classNames )
{
load( filenames, _classNames );
}
@@ -182,7 +182,7 @@ bool LatentSvmDetector::empty() const
return detectors.empty();
}
const vector<string>& LatentSvmDetector::getClassNames() const
const std::vector<std::string>& LatentSvmDetector::getClassNames() const
{
return classNames;
}
@@ -192,13 +192,13 @@ size_t LatentSvmDetector::getClassCount() const
return classNames.size();
}
static string extractModelName( const string& filename )
static std::string extractModelName( const std::string& filename )
{
size_t startPos = filename.rfind('/');
if( startPos == string::npos )
if( startPos == std::string::npos )
startPos = filename.rfind('\\');
if( startPos == string::npos )
if( startPos == std::string::npos )
startPos = 0;
else
startPos++;
@@ -210,7 +210,7 @@ static string extractModelName( const string& filename )
return filename.substr(startPos, substrLength);
}
bool LatentSvmDetector::load( const vector<string>& filenames, const vector<string>& _classNames )
bool LatentSvmDetector::load( const std::vector<std::string>& filenames, const std::vector<std::string>& _classNames )
{
clear();
@@ -218,7 +218,7 @@ bool LatentSvmDetector::load( const vector<string>& filenames, const vector<stri
for( size_t i = 0; i < filenames.size(); i++ )
{
const string filename = filenames[i];
const std::string filename = filenames[i];
if( filename.length() < 5 || filename.substr(filename.length()-4, 4) != ".xml" )
continue;
@@ -239,7 +239,7 @@ bool LatentSvmDetector::load( const vector<string>& filenames, const vector<stri
}
void LatentSvmDetector::detect( const Mat& image,
vector<ObjectDetection>& objectDetections,
std::vector<ObjectDetection>& objectDetections,
float overlapThreshold,
int numThreads )
{