Boring changes - objdetect.
This commit is contained in:
parent
de6a934f66
commit
b12894d95a
@ -141,7 +141,7 @@ public:
|
||||
static Ptr<FeatureEvaluator> create(int type);
|
||||
};
|
||||
|
||||
template<> CV_EXPORTS void Ptr<CvHaarClassifierCascade>::delete_obj();
|
||||
template<> CV_EXPORTS void DefaultDeleter<CvHaarClassifierCascade>::operator ()(CvHaarClassifierCascade* obj) const;
|
||||
|
||||
enum { CASCADE_DO_CANNY_PRUNING = 1,
|
||||
CASCADE_SCALE_IMAGE = 2,
|
||||
|
@ -171,7 +171,7 @@ public:
|
||||
\param nonMaxSuppression Whenever non-maximum suppression is done over the branch probabilities
|
||||
\param minProbability The minimum probability difference between local maxima and local minima ERs
|
||||
*/
|
||||
CV_EXPORTS Ptr<ERFilter> createERFilterNM1(const Ptr<ERFilter::Callback>& cb = NULL,
|
||||
CV_EXPORTS Ptr<ERFilter> createERFilterNM1(const Ptr<ERFilter::Callback>& cb = Ptr<ERFilter::Callback>(),
|
||||
int thresholdDelta = 1, float minArea = 0.000025,
|
||||
float maxArea = 0.13, float minProbability = 0.2,
|
||||
bool nonMaxSuppression = true,
|
||||
@ -190,7 +190,7 @@ CV_EXPORTS Ptr<ERFilter> createERFilterNM1(const Ptr<ERFilter::Callback>& cb = N
|
||||
if omitted tries to load a default classifier from file trained_classifierNM2.xml
|
||||
\param minProbability The minimum probability P(er|character) allowed for retreived ER's
|
||||
*/
|
||||
CV_EXPORTS Ptr<ERFilter> createERFilterNM2(const Ptr<ERFilter::Callback>& cb = NULL,
|
||||
CV_EXPORTS Ptr<ERFilter> createERFilterNM2(const Ptr<ERFilter::Callback>& cb = Ptr<ERFilter::Callback>(),
|
||||
float minProbability = 0.85);
|
||||
|
||||
}
|
||||
|
@ -467,7 +467,7 @@ bool HaarEvaluator::Feature :: read( const FileNode& node )
|
||||
|
||||
HaarEvaluator::HaarEvaluator()
|
||||
{
|
||||
features = new std::vector<Feature>();
|
||||
features = makePtr<std::vector<Feature> >();
|
||||
}
|
||||
HaarEvaluator::~HaarEvaluator()
|
||||
{
|
||||
@ -492,7 +492,7 @@ bool HaarEvaluator::read(const FileNode& node)
|
||||
|
||||
Ptr<FeatureEvaluator> HaarEvaluator::clone() const
|
||||
{
|
||||
HaarEvaluator* ret = new HaarEvaluator;
|
||||
Ptr<HaarEvaluator> ret = makePtr<HaarEvaluator>();
|
||||
ret->origWinSize = origWinSize;
|
||||
ret->features = features;
|
||||
ret->featuresPtr = &(*ret->features)[0];
|
||||
@ -582,7 +582,7 @@ bool LBPEvaluator::Feature :: read(const FileNode& node )
|
||||
|
||||
LBPEvaluator::LBPEvaluator()
|
||||
{
|
||||
features = new std::vector<Feature>();
|
||||
features = makePtr<std::vector<Feature> >();
|
||||
}
|
||||
LBPEvaluator::~LBPEvaluator()
|
||||
{
|
||||
@ -603,7 +603,7 @@ bool LBPEvaluator::read( const FileNode& node )
|
||||
|
||||
Ptr<FeatureEvaluator> LBPEvaluator::clone() const
|
||||
{
|
||||
LBPEvaluator* ret = new LBPEvaluator;
|
||||
Ptr<LBPEvaluator> ret = makePtr<LBPEvaluator>();
|
||||
ret->origWinSize = origWinSize;
|
||||
ret->features = features;
|
||||
ret->featuresPtr = &(*ret->features)[0];
|
||||
@ -662,7 +662,7 @@ bool HOGEvaluator::Feature :: read( const FileNode& node )
|
||||
|
||||
HOGEvaluator::HOGEvaluator()
|
||||
{
|
||||
features = new std::vector<Feature>();
|
||||
features = makePtr<std::vector<Feature> >();
|
||||
}
|
||||
|
||||
HOGEvaluator::~HOGEvaluator()
|
||||
@ -684,7 +684,7 @@ bool HOGEvaluator::read( const FileNode& node )
|
||||
|
||||
Ptr<FeatureEvaluator> HOGEvaluator::clone() const
|
||||
{
|
||||
HOGEvaluator* ret = new HOGEvaluator;
|
||||
Ptr<HOGEvaluator> ret = makePtr<HOGEvaluator>();
|
||||
ret->origWinSize = origWinSize;
|
||||
ret->features = features;
|
||||
ret->featuresPtr = &(*ret->features)[0];
|
||||
@ -849,7 +849,7 @@ CascadeClassifier::~CascadeClassifier()
|
||||
|
||||
bool CascadeClassifier::empty() const
|
||||
{
|
||||
return oldCascade.empty() && data.stages.empty();
|
||||
return !oldCascade && data.stages.empty();
|
||||
}
|
||||
|
||||
bool CascadeClassifier::load(const String& filename)
|
||||
@ -867,13 +867,13 @@ bool CascadeClassifier::load(const String& filename)
|
||||
|
||||
fs.release();
|
||||
|
||||
oldCascade = Ptr<CvHaarClassifierCascade>((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
|
||||
oldCascade.reset((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
|
||||
return !oldCascade.empty();
|
||||
}
|
||||
|
||||
int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& evaluator, Point pt, double& weight )
|
||||
{
|
||||
CV_Assert( oldCascade.empty() );
|
||||
CV_Assert( !oldCascade );
|
||||
|
||||
assert( data.featureType == FeatureEvaluator::HAAR ||
|
||||
data.featureType == FeatureEvaluator::LBP ||
|
||||
@ -1022,7 +1022,7 @@ bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Siz
|
||||
#endif
|
||||
|
||||
Mat currentMask;
|
||||
if (!maskGenerator.empty()) {
|
||||
if (maskGenerator) {
|
||||
currentMask=maskGenerator->generateMask(image);
|
||||
}
|
||||
|
||||
@ -1097,7 +1097,7 @@ void CascadeClassifier::detectMultiScaleNoGrouping( const Mat& image, std::vecto
|
||||
{
|
||||
candidates.clear();
|
||||
|
||||
if (!maskGenerator.empty())
|
||||
if (maskGenerator)
|
||||
maskGenerator->initializeMask(image);
|
||||
|
||||
if( maxObjectSize.height == 0 || maxObjectSize.width == 0 )
|
||||
@ -1350,7 +1350,7 @@ bool CascadeClassifier::read(const FileNode& root)
|
||||
return featureEvaluator->read(fn);
|
||||
}
|
||||
|
||||
template<> void Ptr<CvHaarClassifierCascade>::delete_obj()
|
||||
template<> void DefaultDeleter<CvHaarClassifierCascade>::operator ()(CvHaarClassifierCascade* obj) const
|
||||
{ cvReleaseHaarClassifierCascade(&obj); }
|
||||
|
||||
} // namespace cv
|
||||
|
@ -179,7 +179,6 @@ ERFilterNM::ERFilterNM()
|
||||
minProbabilityDiff = 1.;
|
||||
num_accepted_regions = 0;
|
||||
num_rejected_regions = 0;
|
||||
classifier = NULL;
|
||||
}
|
||||
|
||||
// the key method. Takes image on input, vector of ERStat is output for the first stage,
|
||||
@ -1085,10 +1084,10 @@ Ptr<ERFilter> createERFilterNM1(const Ptr<ERFilter::Callback>& cb, int threshold
|
||||
CV_Assert( (thresholdDelta >= 0) && (thresholdDelta <= 128) );
|
||||
CV_Assert( (minProbabilityDiff >= 0.) && (minProbabilityDiff <= 1.) );
|
||||
|
||||
Ptr<ERFilterNM> filter = new ERFilterNM();
|
||||
Ptr<ERFilterNM> filter = makePtr<ERFilterNM>();
|
||||
|
||||
if (cb == NULL)
|
||||
filter->setCallback(new ERClassifierNM1());
|
||||
filter->setCallback(makePtr<ERClassifierNM1>());
|
||||
else
|
||||
filter->setCallback(cb);
|
||||
|
||||
@ -1119,11 +1118,11 @@ Ptr<ERFilter> createERFilterNM2(const Ptr<ERFilter::Callback>& cb, float minProb
|
||||
|
||||
CV_Assert( (minProbability >= 0.) && (minProbability <= 1.) );
|
||||
|
||||
Ptr<ERFilterNM> filter = new ERFilterNM();
|
||||
Ptr<ERFilterNM> filter = makePtr<ERFilterNM>();
|
||||
|
||||
|
||||
if (cb == NULL)
|
||||
filter->setCallback(new ERClassifierNM2());
|
||||
filter->setCallback(makePtr<ERClassifierNM2>());
|
||||
else
|
||||
filter->setCallback(cb);
|
||||
|
||||
|
@ -1536,15 +1536,15 @@ cvHaarDetectObjectsForROC( const CvArr* _img,
|
||||
maxSize.width = img->cols;
|
||||
}
|
||||
|
||||
temp = cvCreateMat( img->rows, img->cols, CV_8UC1 );
|
||||
sum = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
|
||||
sqsum = cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 );
|
||||
temp.reset(cvCreateMat( img->rows, img->cols, CV_8UC1 ));
|
||||
sum.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ));
|
||||
sqsum.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 ));
|
||||
|
||||
if( !cascade->hid_cascade )
|
||||
icvCreateHidHaarClassifierCascade(cascade);
|
||||
|
||||
if( cascade->hid_cascade->has_tilted_features )
|
||||
tilted = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
|
||||
tilted.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ));
|
||||
|
||||
result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage );
|
||||
|
||||
@ -1566,7 +1566,7 @@ cvHaarDetectObjectsForROC( const CvArr* _img,
|
||||
if( use_ipp )
|
||||
normImg = cvCreateMat( img->rows, img->cols, CV_32FC1 );
|
||||
#endif
|
||||
imgSmall = cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 );
|
||||
imgSmall.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 ));
|
||||
|
||||
for( factor = 1; ; factor *= scaleFactor )
|
||||
{
|
||||
@ -1635,7 +1635,7 @@ cvHaarDetectObjectsForROC( const CvArr* _img,
|
||||
|
||||
if( doCannyPruning )
|
||||
{
|
||||
sumcanny = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
|
||||
sumcanny.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ));
|
||||
cvCanny( img, temp, 0, 50, 3 );
|
||||
cvIntegral( temp, sumcanny );
|
||||
}
|
||||
|
@ -204,11 +204,11 @@ void QuantizedPyramid::selectScatteredFeatures(const std::vector<Candidate>& can
|
||||
Ptr<Modality> Modality::create(const String& modality_type)
|
||||
{
|
||||
if (modality_type == "ColorGradient")
|
||||
return new ColorGradient();
|
||||
return makePtr<ColorGradient>();
|
||||
else if (modality_type == "DepthNormal")
|
||||
return new DepthNormal();
|
||||
return makePtr<DepthNormal>();
|
||||
else
|
||||
return NULL;
|
||||
return Ptr<Modality>();
|
||||
}
|
||||
|
||||
Ptr<Modality> Modality::create(const FileNode& fn)
|
||||
@ -574,7 +574,7 @@ String ColorGradient::name() const
|
||||
Ptr<QuantizedPyramid> ColorGradient::processImpl(const Mat& src,
|
||||
const Mat& mask) const
|
||||
{
|
||||
return new ColorGradientPyramid(src, mask, weak_threshold, num_features, strong_threshold);
|
||||
return makePtr<ColorGradientPyramid>(src, mask, weak_threshold, num_features, strong_threshold);
|
||||
}
|
||||
|
||||
void ColorGradient::read(const FileNode& fn)
|
||||
@ -889,8 +889,8 @@ String DepthNormal::name() const
|
||||
Ptr<QuantizedPyramid> DepthNormal::processImpl(const Mat& src,
|
||||
const Mat& mask) const
|
||||
{
|
||||
return new DepthNormalPyramid(src, mask, distance_threshold, difference_threshold,
|
||||
num_features, extract_threshold);
|
||||
return makePtr<DepthNormalPyramid>(src, mask, distance_threshold, difference_threshold,
|
||||
num_features, extract_threshold);
|
||||
}
|
||||
|
||||
void DepthNormal::read(const FileNode& fn)
|
||||
@ -1828,16 +1828,16 @@ static const int T_DEFAULTS[] = {5, 8};
|
||||
Ptr<Detector> getDefaultLINE()
|
||||
{
|
||||
std::vector< Ptr<Modality> > modalities;
|
||||
modalities.push_back(new ColorGradient);
|
||||
return new Detector(modalities, std::vector<int>(T_DEFAULTS, T_DEFAULTS + 2));
|
||||
modalities.push_back(makePtr<ColorGradient>());
|
||||
return makePtr<Detector>(modalities, std::vector<int>(T_DEFAULTS, T_DEFAULTS + 2));
|
||||
}
|
||||
|
||||
Ptr<Detector> getDefaultLINEMOD()
|
||||
{
|
||||
std::vector< Ptr<Modality> > modalities;
|
||||
modalities.push_back(new ColorGradient);
|
||||
modalities.push_back(new DepthNormal);
|
||||
return new Detector(modalities, std::vector<int>(T_DEFAULTS, T_DEFAULTS + 2));
|
||||
modalities.push_back(makePtr<ColorGradient>());
|
||||
modalities.push_back(makePtr<DepthNormal>());
|
||||
return makePtr<Detector>(modalities, std::vector<int>(T_DEFAULTS, T_DEFAULTS + 2));
|
||||
}
|
||||
|
||||
} // namespace linemod
|
||||
|
@ -426,10 +426,10 @@ int CV_CascadeDetectorTest::detectMultiScale_C( const string& filename,
|
||||
int di, const Mat& img,
|
||||
vector<Rect>& objects )
|
||||
{
|
||||
Ptr<CvHaarClassifierCascade> c_cascade = cvLoadHaarClassifierCascade(filename.c_str(), cvSize(0,0));
|
||||
Ptr<CvMemStorage> storage = cvCreateMemStorage();
|
||||
Ptr<CvHaarClassifierCascade> c_cascade(cvLoadHaarClassifierCascade(filename.c_str(), cvSize(0,0)));
|
||||
Ptr<CvMemStorage> storage(cvCreateMemStorage());
|
||||
|
||||
if( c_cascade.empty() )
|
||||
if( !c_cascade )
|
||||
{
|
||||
ts->printf( cvtest::TS::LOG, "cascade %s can not be opened");
|
||||
return cvtest::TS::FAIL_INVALID_TEST_DATA;
|
||||
|
Loading…
Reference in New Issue
Block a user