diff --git a/modules/objdetect/include/opencv2/objdetect.hpp b/modules/objdetect/include/opencv2/objdetect.hpp index febd3aed5..f1b371610 100644 --- a/modules/objdetect/include/opencv2/objdetect.hpp +++ b/modules/objdetect/include/opencv2/objdetect.hpp @@ -141,7 +141,7 @@ public: static Ptr create(int type); }; -template<> CV_EXPORTS void Ptr::delete_obj(); +template<> CV_EXPORTS void DefaultDeleter::operator ()(CvHaarClassifierCascade* obj) const; enum { CASCADE_DO_CANNY_PRUNING = 1, CASCADE_SCALE_IMAGE = 2, diff --git a/modules/objdetect/include/opencv2/objdetect/erfilter.hpp b/modules/objdetect/include/opencv2/objdetect/erfilter.hpp index d1bfeaed0..8a1b580c3 100644 --- a/modules/objdetect/include/opencv2/objdetect/erfilter.hpp +++ b/modules/objdetect/include/opencv2/objdetect/erfilter.hpp @@ -171,7 +171,7 @@ public: \param nonMaxSuppression Whenever non-maximum suppression is done over the branch probabilities \param minProbability The minimum probability difference between local maxima and local minima ERs */ -CV_EXPORTS Ptr createERFilterNM1(const Ptr& cb = NULL, +CV_EXPORTS Ptr createERFilterNM1(const Ptr& cb = Ptr(), int thresholdDelta = 1, float minArea = 0.000025, float maxArea = 0.13, float minProbability = 0.2, bool nonMaxSuppression = true, @@ -190,7 +190,7 @@ CV_EXPORTS Ptr createERFilterNM1(const Ptr& cb = N if omitted tries to load a default classifier from file trained_classifierNM2.xml \param minProbability The minimum probability P(er|character) allowed for retreived ER's */ -CV_EXPORTS Ptr createERFilterNM2(const Ptr& cb = NULL, +CV_EXPORTS Ptr createERFilterNM2(const Ptr& cb = Ptr(), float minProbability = 0.85); } diff --git a/modules/objdetect/src/cascadedetect.cpp b/modules/objdetect/src/cascadedetect.cpp index 04ec41d24..88f463faa 100644 --- a/modules/objdetect/src/cascadedetect.cpp +++ b/modules/objdetect/src/cascadedetect.cpp @@ -467,7 +467,7 @@ bool HaarEvaluator::Feature :: read( const FileNode& node ) HaarEvaluator::HaarEvaluator() { - features = new std::vector(); + features = makePtr >(); } HaarEvaluator::~HaarEvaluator() { @@ -492,7 +492,7 @@ bool HaarEvaluator::read(const FileNode& node) Ptr HaarEvaluator::clone() const { - HaarEvaluator* ret = new HaarEvaluator; + Ptr ret = makePtr(); ret->origWinSize = origWinSize; ret->features = features; ret->featuresPtr = &(*ret->features)[0]; @@ -582,7 +582,7 @@ bool LBPEvaluator::Feature :: read(const FileNode& node ) LBPEvaluator::LBPEvaluator() { - features = new std::vector(); + features = makePtr >(); } LBPEvaluator::~LBPEvaluator() { @@ -603,7 +603,7 @@ bool LBPEvaluator::read( const FileNode& node ) Ptr LBPEvaluator::clone() const { - LBPEvaluator* ret = new LBPEvaluator; + Ptr ret = makePtr(); ret->origWinSize = origWinSize; ret->features = features; ret->featuresPtr = &(*ret->features)[0]; @@ -662,7 +662,7 @@ bool HOGEvaluator::Feature :: read( const FileNode& node ) HOGEvaluator::HOGEvaluator() { - features = new std::vector(); + features = makePtr >(); } HOGEvaluator::~HOGEvaluator() @@ -684,7 +684,7 @@ bool HOGEvaluator::read( const FileNode& node ) Ptr HOGEvaluator::clone() const { - HOGEvaluator* ret = new HOGEvaluator; + Ptr ret = makePtr(); ret->origWinSize = origWinSize; ret->features = features; ret->featuresPtr = &(*ret->features)[0]; @@ -849,7 +849,7 @@ CascadeClassifier::~CascadeClassifier() bool CascadeClassifier::empty() const { - return oldCascade.empty() && data.stages.empty(); + return !oldCascade && data.stages.empty(); } bool CascadeClassifier::load(const String& filename) @@ -867,13 +867,13 @@ bool CascadeClassifier::load(const String& filename) fs.release(); - oldCascade = Ptr((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0)); + oldCascade.reset((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0)); return !oldCascade.empty(); } int CascadeClassifier::runAt( Ptr& evaluator, Point pt, double& weight ) { - CV_Assert( oldCascade.empty() ); + CV_Assert( !oldCascade ); assert( data.featureType == FeatureEvaluator::HAAR || data.featureType == FeatureEvaluator::LBP || @@ -1022,7 +1022,7 @@ bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Siz #endif Mat currentMask; - if (!maskGenerator.empty()) { + if (maskGenerator) { currentMask=maskGenerator->generateMask(image); } @@ -1097,7 +1097,7 @@ void CascadeClassifier::detectMultiScaleNoGrouping( const Mat& image, std::vecto { candidates.clear(); - if (!maskGenerator.empty()) + if (maskGenerator) maskGenerator->initializeMask(image); if( maxObjectSize.height == 0 || maxObjectSize.width == 0 ) @@ -1350,7 +1350,7 @@ bool CascadeClassifier::read(const FileNode& root) return featureEvaluator->read(fn); } -template<> void Ptr::delete_obj() +template<> void DefaultDeleter::operator ()(CvHaarClassifierCascade* obj) const { cvReleaseHaarClassifierCascade(&obj); } } // namespace cv diff --git a/modules/objdetect/src/erfilter.cpp b/modules/objdetect/src/erfilter.cpp index ac8fc70e8..b8e964f5d 100644 --- a/modules/objdetect/src/erfilter.cpp +++ b/modules/objdetect/src/erfilter.cpp @@ -179,7 +179,6 @@ ERFilterNM::ERFilterNM() minProbabilityDiff = 1.; num_accepted_regions = 0; num_rejected_regions = 0; - classifier = NULL; } // the key method. Takes image on input, vector of ERStat is output for the first stage, @@ -1085,10 +1084,10 @@ Ptr createERFilterNM1(const Ptr& cb, int threshold CV_Assert( (thresholdDelta >= 0) && (thresholdDelta <= 128) ); CV_Assert( (minProbabilityDiff >= 0.) && (minProbabilityDiff <= 1.) ); - Ptr filter = new ERFilterNM(); + Ptr filter = makePtr(); if (cb == NULL) - filter->setCallback(new ERClassifierNM1()); + filter->setCallback(makePtr()); else filter->setCallback(cb); @@ -1119,11 +1118,11 @@ Ptr createERFilterNM2(const Ptr& cb, float minProb CV_Assert( (minProbability >= 0.) && (minProbability <= 1.) ); - Ptr filter = new ERFilterNM(); + Ptr filter = makePtr(); if (cb == NULL) - filter->setCallback(new ERClassifierNM2()); + filter->setCallback(makePtr()); else filter->setCallback(cb); diff --git a/modules/objdetect/src/haar.cpp b/modules/objdetect/src/haar.cpp index 2212b6c67..cbb60b091 100644 --- a/modules/objdetect/src/haar.cpp +++ b/modules/objdetect/src/haar.cpp @@ -1536,15 +1536,15 @@ cvHaarDetectObjectsForROC( const CvArr* _img, maxSize.width = img->cols; } - temp = cvCreateMat( img->rows, img->cols, CV_8UC1 ); - sum = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ); - sqsum = cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 ); + temp.reset(cvCreateMat( img->rows, img->cols, CV_8UC1 )); + sum.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 )); + sqsum.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 )); if( !cascade->hid_cascade ) icvCreateHidHaarClassifierCascade(cascade); if( cascade->hid_cascade->has_tilted_features ) - tilted = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ); + tilted.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 )); result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage ); @@ -1566,7 +1566,7 @@ cvHaarDetectObjectsForROC( const CvArr* _img, if( use_ipp ) normImg = cvCreateMat( img->rows, img->cols, CV_32FC1 ); #endif - imgSmall = cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 ); + imgSmall.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 )); for( factor = 1; ; factor *= scaleFactor ) { @@ -1635,7 +1635,7 @@ cvHaarDetectObjectsForROC( const CvArr* _img, if( doCannyPruning ) { - sumcanny = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ); + sumcanny.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 )); cvCanny( img, temp, 0, 50, 3 ); cvIntegral( temp, sumcanny ); } diff --git a/modules/objdetect/src/linemod.cpp b/modules/objdetect/src/linemod.cpp index 0fd3433a5..e8fc8e4aa 100644 --- a/modules/objdetect/src/linemod.cpp +++ b/modules/objdetect/src/linemod.cpp @@ -204,11 +204,11 @@ void QuantizedPyramid::selectScatteredFeatures(const std::vector& can Ptr Modality::create(const String& modality_type) { if (modality_type == "ColorGradient") - return new ColorGradient(); + return makePtr(); else if (modality_type == "DepthNormal") - return new DepthNormal(); + return makePtr(); else - return NULL; + return Ptr(); } Ptr Modality::create(const FileNode& fn) @@ -574,7 +574,7 @@ String ColorGradient::name() const Ptr ColorGradient::processImpl(const Mat& src, const Mat& mask) const { - return new ColorGradientPyramid(src, mask, weak_threshold, num_features, strong_threshold); + return makePtr(src, mask, weak_threshold, num_features, strong_threshold); } void ColorGradient::read(const FileNode& fn) @@ -889,8 +889,8 @@ String DepthNormal::name() const Ptr DepthNormal::processImpl(const Mat& src, const Mat& mask) const { - return new DepthNormalPyramid(src, mask, distance_threshold, difference_threshold, - num_features, extract_threshold); + return makePtr(src, mask, distance_threshold, difference_threshold, + num_features, extract_threshold); } void DepthNormal::read(const FileNode& fn) @@ -1828,16 +1828,16 @@ static const int T_DEFAULTS[] = {5, 8}; Ptr getDefaultLINE() { std::vector< Ptr > modalities; - modalities.push_back(new ColorGradient); - return new Detector(modalities, std::vector(T_DEFAULTS, T_DEFAULTS + 2)); + modalities.push_back(makePtr()); + return makePtr(modalities, std::vector(T_DEFAULTS, T_DEFAULTS + 2)); } Ptr getDefaultLINEMOD() { std::vector< Ptr > modalities; - modalities.push_back(new ColorGradient); - modalities.push_back(new DepthNormal); - return new Detector(modalities, std::vector(T_DEFAULTS, T_DEFAULTS + 2)); + modalities.push_back(makePtr()); + modalities.push_back(makePtr()); + return makePtr(modalities, std::vector(T_DEFAULTS, T_DEFAULTS + 2)); } } // namespace linemod diff --git a/modules/objdetect/test/test_cascadeandhog.cpp b/modules/objdetect/test/test_cascadeandhog.cpp index b4fd541a1..a30109905 100644 --- a/modules/objdetect/test/test_cascadeandhog.cpp +++ b/modules/objdetect/test/test_cascadeandhog.cpp @@ -426,10 +426,10 @@ int CV_CascadeDetectorTest::detectMultiScale_C( const string& filename, int di, const Mat& img, vector& objects ) { - Ptr c_cascade = cvLoadHaarClassifierCascade(filename.c_str(), cvSize(0,0)); - Ptr storage = cvCreateMemStorage(); + Ptr c_cascade(cvLoadHaarClassifierCascade(filename.c_str(), cvSize(0,0))); + Ptr storage(cvCreateMemStorage()); - if( c_cascade.empty() ) + if( !c_cascade ) { ts->printf( cvtest::TS::LOG, "cascade %s can not be opened"); return cvtest::TS::FAIL_INVALID_TEST_DATA;