revisions 8681 and 8688 restored. Warning fixed.
Warning: changes beak binary compatibility
This commit is contained in:
parent
0a58d8f139
commit
a3be73b5cc
@ -7,22 +7,73 @@
|
|||||||
|
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
namespace cv
|
||||||
|
{
|
||||||
class DetectionBasedTracker
|
class DetectionBasedTracker
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
struct Parameters
|
struct Parameters
|
||||||
{
|
{
|
||||||
int minObjectSize;
|
|
||||||
int maxObjectSize;
|
|
||||||
double scaleFactor;
|
|
||||||
int maxTrackLifetime;
|
int maxTrackLifetime;
|
||||||
int minNeighbors;
|
|
||||||
int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
|
int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
|
||||||
|
|
||||||
Parameters();
|
Parameters();
|
||||||
};
|
};
|
||||||
|
|
||||||
DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params);
|
class IDetector
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
IDetector():
|
||||||
|
minObjSize(96, 96),
|
||||||
|
maxObjSize(INT_MAX, INT_MAX),
|
||||||
|
minNeighbours(2),
|
||||||
|
scaleFactor(1.1f)
|
||||||
|
{}
|
||||||
|
|
||||||
|
virtual void detect(const cv::Mat& Image, std::vector<cv::Rect>& objects) = 0;
|
||||||
|
|
||||||
|
void setMinObjectSize(const cv::Size& min)
|
||||||
|
{
|
||||||
|
minObjSize = min;
|
||||||
|
}
|
||||||
|
void setMaxObjectSize(const cv::Size& max)
|
||||||
|
{
|
||||||
|
maxObjSize = max;
|
||||||
|
}
|
||||||
|
cv::Size getMinObjectSize() const
|
||||||
|
{
|
||||||
|
return minObjSize;
|
||||||
|
}
|
||||||
|
cv::Size getMaxObjectSize() const
|
||||||
|
{
|
||||||
|
return maxObjSize;
|
||||||
|
}
|
||||||
|
float getScaleFactor()
|
||||||
|
{
|
||||||
|
return scaleFactor;
|
||||||
|
}
|
||||||
|
void setScaleFactor(float value)
|
||||||
|
{
|
||||||
|
scaleFactor = value;
|
||||||
|
}
|
||||||
|
int getMinNeighbours()
|
||||||
|
{
|
||||||
|
return minNeighbours;
|
||||||
|
}
|
||||||
|
void setMinNeighbours(int value)
|
||||||
|
{
|
||||||
|
minNeighbours = value;
|
||||||
|
}
|
||||||
|
virtual ~IDetector() {}
|
||||||
|
|
||||||
|
protected:
|
||||||
|
cv::Size minObjSize;
|
||||||
|
cv::Size maxObjSize;
|
||||||
|
int minNeighbours;
|
||||||
|
float scaleFactor;
|
||||||
|
};
|
||||||
|
|
||||||
|
DetectionBasedTracker(cv::Ptr<IDetector> MainDetector, cv::Ptr<IDetector> TrackingDetector, const Parameters& params);
|
||||||
virtual ~DetectionBasedTracker();
|
virtual ~DetectionBasedTracker();
|
||||||
|
|
||||||
virtual bool run();
|
virtual bool run();
|
||||||
@ -44,7 +95,6 @@ class DetectionBasedTracker
|
|||||||
cv::Ptr<SeparateDetectionWork> separateDetectionWork;
|
cv::Ptr<SeparateDetectionWork> separateDetectionWork;
|
||||||
friend void* workcycleObjectDetectorFunction(void* p);
|
friend void* workcycleObjectDetectorFunction(void* p);
|
||||||
|
|
||||||
|
|
||||||
struct InnerParameters
|
struct InnerParameters
|
||||||
{
|
{
|
||||||
int numLastPositionsToTrack;
|
int numLastPositionsToTrack;
|
||||||
@ -90,13 +140,11 @@ class DetectionBasedTracker
|
|||||||
std::vector<float> weightsPositionsSmoothing;
|
std::vector<float> weightsPositionsSmoothing;
|
||||||
std::vector<float> weightsSizesSmoothing;
|
std::vector<float> weightsSizesSmoothing;
|
||||||
|
|
||||||
cv::CascadeClassifier cascadeForTracking;
|
cv::Ptr<IDetector> cascadeForTracking;
|
||||||
|
|
||||||
|
|
||||||
void updateTrackedObjects(const std::vector<cv::Rect>& detectedObjects);
|
void updateTrackedObjects(const std::vector<cv::Rect>& detectedObjects);
|
||||||
cv::Rect calcTrackedObjectPositionToShow(int i) const;
|
cv::Rect calcTrackedObjectPositionToShow(int i) const;
|
||||||
void detectInRegion(const cv::Mat& img, const cv::Rect& r, std::vector<cv::Rect>& detectedObjectsInRegions);
|
void detectInRegion(const cv::Mat& img, const cv::Rect& r, std::vector<cv::Rect>& detectedObjectsInRegions);
|
||||||
};
|
};
|
||||||
|
} //end of cv namespace
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -40,6 +40,7 @@ static inline cv::Point2f centerRect(const cv::Rect& r)
|
|||||||
{
|
{
|
||||||
return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
|
return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
|
||||||
};
|
};
|
||||||
|
|
||||||
static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
|
static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
|
||||||
{
|
{
|
||||||
cv::Point2f m=centerRect(r);
|
cv::Point2f m=centerRect(r);
|
||||||
@ -51,11 +52,15 @@ static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
|
|||||||
return cv::Rect(x, y, cvRound(width), cvRound(height));
|
return cv::Rect(x, y, cvRound(width), cvRound(height));
|
||||||
};
|
};
|
||||||
|
|
||||||
void* workcycleObjectDetectorFunction(void* p);
|
namespace cv
|
||||||
class DetectionBasedTracker::SeparateDetectionWork
|
{
|
||||||
|
void* workcycleObjectDetectorFunction(void* p);
|
||||||
|
}
|
||||||
|
|
||||||
|
class cv::DetectionBasedTracker::SeparateDetectionWork
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, const std::string& cascadeFilename);
|
SeparateDetectionWork(cv::DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector);
|
||||||
virtual ~SeparateDetectionWork();
|
virtual ~SeparateDetectionWork();
|
||||||
bool communicateWithDetectingThread(const Mat& imageGray, vector<Rect>& rectsWhereRegions);
|
bool communicateWithDetectingThread(const Mat& imageGray, vector<Rect>& rectsWhereRegions);
|
||||||
bool run();
|
bool run();
|
||||||
@ -77,7 +82,7 @@ class DetectionBasedTracker::SeparateDetectionWork
|
|||||||
protected:
|
protected:
|
||||||
|
|
||||||
DetectionBasedTracker& detectionBasedTracker;
|
DetectionBasedTracker& detectionBasedTracker;
|
||||||
cv::CascadeClassifier cascadeInThread;
|
cv::Ptr<DetectionBasedTracker::IDetector> cascadeInThread;
|
||||||
|
|
||||||
pthread_t second_workthread;
|
pthread_t second_workthread;
|
||||||
pthread_mutex_t mutex;
|
pthread_mutex_t mutex;
|
||||||
@ -105,7 +110,7 @@ class DetectionBasedTracker::SeparateDetectionWork
|
|||||||
long long timeWhenDetectingThreadStartedWork;
|
long long timeWhenDetectingThreadStartedWork;
|
||||||
};
|
};
|
||||||
|
|
||||||
DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, const std::string& cascadeFilename)
|
cv::DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector)
|
||||||
:detectionBasedTracker(_detectionBasedTracker),
|
:detectionBasedTracker(_detectionBasedTracker),
|
||||||
cascadeInThread(),
|
cascadeInThread(),
|
||||||
isObjectDetectingReady(false),
|
isObjectDetectingReady(false),
|
||||||
@ -113,9 +118,10 @@ DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBas
|
|||||||
stateThread(STATE_THREAD_STOPPED),
|
stateThread(STATE_THREAD_STOPPED),
|
||||||
timeWhenDetectingThreadStartedWork(-1)
|
timeWhenDetectingThreadStartedWork(-1)
|
||||||
{
|
{
|
||||||
if(!cascadeInThread.load(cascadeFilename)) {
|
CV_Assert(!_detector.empty());
|
||||||
CV_Error(CV_StsBadArg, "DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork: Cannot load a cascade from the file '"+cascadeFilename+"'");
|
|
||||||
}
|
cascadeInThread = _detector;
|
||||||
|
|
||||||
int res=0;
|
int res=0;
|
||||||
res=pthread_mutex_init(&mutex, NULL);//TODO: should be attributes?
|
res=pthread_mutex_init(&mutex, NULL);//TODO: should be attributes?
|
||||||
if (res) {
|
if (res) {
|
||||||
@ -137,7 +143,7 @@ DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBas
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork()
|
cv::DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork()
|
||||||
{
|
{
|
||||||
if(stateThread!=STATE_THREAD_STOPPED) {
|
if(stateThread!=STATE_THREAD_STOPPED) {
|
||||||
LOGE("\n\n\nATTENTION!!! dangerous algorithm error: destructor DetectionBasedTracker::DetectionBasedTracker::~SeparateDetectionWork is called before stopping the workthread");
|
LOGE("\n\n\nATTENTION!!! dangerous algorithm error: destructor DetectionBasedTracker::DetectionBasedTracker::~SeparateDetectionWork is called before stopping the workthread");
|
||||||
@ -147,7 +153,7 @@ DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork()
|
|||||||
pthread_cond_destroy(&objectDetectorRun);
|
pthread_cond_destroy(&objectDetectorRun);
|
||||||
pthread_mutex_destroy(&mutex);
|
pthread_mutex_destroy(&mutex);
|
||||||
}
|
}
|
||||||
bool DetectionBasedTracker::SeparateDetectionWork::run()
|
bool cv::DetectionBasedTracker::SeparateDetectionWork::run()
|
||||||
{
|
{
|
||||||
LOGD("DetectionBasedTracker::SeparateDetectionWork::run() --- start");
|
LOGD("DetectionBasedTracker::SeparateDetectionWork::run() --- start");
|
||||||
pthread_mutex_lock(&mutex);
|
pthread_mutex_lock(&mutex);
|
||||||
@ -196,18 +202,18 @@ do {
|
|||||||
} while(0)
|
} while(0)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
void* workcycleObjectDetectorFunction(void* p)
|
void* cv::workcycleObjectDetectorFunction(void* p)
|
||||||
{
|
{
|
||||||
CATCH_ALL_AND_LOG({ ((DetectionBasedTracker::SeparateDetectionWork*)p)->workcycleObjectDetector(); });
|
CATCH_ALL_AND_LOG({ ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->workcycleObjectDetector(); });
|
||||||
try{
|
try{
|
||||||
((DetectionBasedTracker::SeparateDetectionWork*)p)->stateThread=DetectionBasedTracker::SeparateDetectionWork::STATE_THREAD_STOPPED;
|
((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->stateThread = cv::DetectionBasedTracker::SeparateDetectionWork::STATE_THREAD_STOPPED;
|
||||||
} catch(...) {
|
} catch(...) {
|
||||||
LOGE0("DetectionBasedTracker: workcycleObjectDetectorFunction: ERROR concerning pointer, received as the function parameter");
|
LOGE0("DetectionBasedTracker: workcycleObjectDetectorFunction: ERROR concerning pointer, received as the function parameter");
|
||||||
}
|
}
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
|
void cv::DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
|
||||||
{
|
{
|
||||||
static double freq = getTickFrequency();
|
static double freq = getTickFrequency();
|
||||||
LOGD0("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- start");
|
LOGD0("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- start");
|
||||||
@ -274,20 +280,17 @@ void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
|
|||||||
|
|
||||||
int64 t1_detect=getTickCount();
|
int64 t1_detect=getTickCount();
|
||||||
|
|
||||||
int minObjectSize=detectionBasedTracker.parameters.minObjectSize;
|
cascadeInThread->detect(imageSeparateDetecting, objects);
|
||||||
Size min_objectSize=Size(minObjectSize, minObjectSize);
|
|
||||||
|
|
||||||
int maxObjectSize=detectionBasedTracker.parameters.maxObjectSize;
|
/*cascadeInThread.detectMultiScale( imageSeparateDetecting, objects,
|
||||||
Size max_objectSize(maxObjectSize, maxObjectSize);
|
|
||||||
|
|
||||||
|
|
||||||
cascadeInThread.detectMultiScale( imageSeparateDetecting, objects,
|
|
||||||
detectionBasedTracker.parameters.scaleFactor, detectionBasedTracker.parameters.minNeighbors, 0
|
detectionBasedTracker.parameters.scaleFactor, detectionBasedTracker.parameters.minNeighbors, 0
|
||||||
|CV_HAAR_SCALE_IMAGE
|
|CV_HAAR_SCALE_IMAGE
|
||||||
,
|
,
|
||||||
min_objectSize,
|
min_objectSize,
|
||||||
max_objectSize
|
max_objectSize
|
||||||
);
|
);
|
||||||
|
*/
|
||||||
|
|
||||||
LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- end handling imageSeparateDetecting");
|
LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- end handling imageSeparateDetecting");
|
||||||
|
|
||||||
if (!isWorking()) {
|
if (!isWorking()) {
|
||||||
@ -333,7 +336,7 @@ void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
|
|||||||
LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector: Returning");
|
LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector: Returning");
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::SeparateDetectionWork::stop()
|
void cv::DetectionBasedTracker::SeparateDetectionWork::stop()
|
||||||
{
|
{
|
||||||
//FIXME: TODO: should add quickStop functionality
|
//FIXME: TODO: should add quickStop functionality
|
||||||
pthread_mutex_lock(&mutex);
|
pthread_mutex_lock(&mutex);
|
||||||
@ -350,7 +353,7 @@ void DetectionBasedTracker::SeparateDetectionWork::stop()
|
|||||||
pthread_mutex_unlock(&mutex);
|
pthread_mutex_unlock(&mutex);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::SeparateDetectionWork::resetTracking()
|
void cv::DetectionBasedTracker::SeparateDetectionWork::resetTracking()
|
||||||
{
|
{
|
||||||
LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking");
|
LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking");
|
||||||
pthread_mutex_lock(&mutex);
|
pthread_mutex_lock(&mutex);
|
||||||
@ -371,7 +374,7 @@ void DetectionBasedTracker::SeparateDetectionWork::resetTracking()
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread(const Mat& imageGray, vector<Rect>& rectsWhereRegions)
|
bool cv::DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread(const Mat& imageGray, vector<Rect>& rectsWhereRegions)
|
||||||
{
|
{
|
||||||
static double freq = getTickFrequency();
|
static double freq = getTickFrequency();
|
||||||
|
|
||||||
@ -420,19 +423,13 @@ bool DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThrea
|
|||||||
return shouldHandleResult;
|
return shouldHandleResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
DetectionBasedTracker::Parameters::Parameters()
|
cv::DetectionBasedTracker::Parameters::Parameters()
|
||||||
{
|
{
|
||||||
minObjectSize=96;
|
|
||||||
maxObjectSize=INT_MAX;
|
|
||||||
scaleFactor=1.1;
|
|
||||||
maxTrackLifetime=5;
|
maxTrackLifetime=5;
|
||||||
minNeighbors=2;
|
|
||||||
minDetectionPeriod=0;
|
minDetectionPeriod=0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cv::DetectionBasedTracker::InnerParameters::InnerParameters()
|
||||||
|
|
||||||
DetectionBasedTracker::InnerParameters::InnerParameters()
|
|
||||||
{
|
{
|
||||||
numLastPositionsToTrack=4;
|
numLastPositionsToTrack=4;
|
||||||
numStepsToWaitBeforeFirstShow=6;
|
numStepsToWaitBeforeFirstShow=6;
|
||||||
@ -444,39 +441,32 @@ DetectionBasedTracker::InnerParameters::InnerParameters()
|
|||||||
coeffObjectSpeedUsingInPrediction=0.8;
|
coeffObjectSpeedUsingInPrediction=0.8;
|
||||||
|
|
||||||
}
|
}
|
||||||
DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params)
|
|
||||||
|
cv::DetectionBasedTracker::DetectionBasedTracker(cv::Ptr<IDetector> MainDetector, cv::Ptr<IDetector> TrackingDetector, const Parameters& params)
|
||||||
:separateDetectionWork(),
|
:separateDetectionWork(),
|
||||||
|
parameters(params),
|
||||||
innerParameters(),
|
innerParameters(),
|
||||||
numTrackedSteps(0)
|
numTrackedSteps(0),
|
||||||
|
cascadeForTracking(TrackingDetector)
|
||||||
{
|
{
|
||||||
CV_Assert( (params.minObjectSize > 0)
|
CV_Assert( (params.maxTrackLifetime >= 0)
|
||||||
&& (params.maxObjectSize >= 0)
|
&& (!MainDetector.empty())
|
||||||
&& (params.scaleFactor > 1.0)
|
&& (!TrackingDetector.empty()) );
|
||||||
&& (params.maxTrackLifetime >= 0) );
|
|
||||||
|
|
||||||
if (!cascadeForTracking.load(cascadeFilename)) {
|
separateDetectionWork = new SeparateDetectionWork(*this, MainDetector);
|
||||||
CV_Error(CV_StsBadArg, "DetectionBasedTracker::DetectionBasedTracker: Cannot load a cascade from the file '"+cascadeFilename+"'");
|
|
||||||
}
|
|
||||||
|
|
||||||
parameters=params;
|
|
||||||
|
|
||||||
separateDetectionWork=new SeparateDetectionWork(*this, cascadeFilename);
|
|
||||||
|
|
||||||
weightsPositionsSmoothing.push_back(1);
|
weightsPositionsSmoothing.push_back(1);
|
||||||
weightsSizesSmoothing.push_back(0.5);
|
weightsSizesSmoothing.push_back(0.5);
|
||||||
weightsSizesSmoothing.push_back(0.3);
|
weightsSizesSmoothing.push_back(0.3);
|
||||||
weightsSizesSmoothing.push_back(0.2);
|
weightsSizesSmoothing.push_back(0.2);
|
||||||
|
|
||||||
}
|
}
|
||||||
DetectionBasedTracker::~DetectionBasedTracker()
|
|
||||||
|
cv::DetectionBasedTracker::~DetectionBasedTracker()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
void DetectionBasedTracker::process(const Mat& imageGray)
|
void DetectionBasedTracker::process(const Mat& imageGray)
|
||||||
{
|
{
|
||||||
|
|
||||||
CV_Assert(imageGray.type()==CV_8UC1);
|
CV_Assert(imageGray.type()==CV_8UC1);
|
||||||
|
|
||||||
if (!separateDetectionWork->isWorking()) {
|
if (!separateDetectionWork->isWorking()) {
|
||||||
@ -494,15 +484,9 @@ void DetectionBasedTracker::process(const Mat& imageGray)
|
|||||||
|
|
||||||
Mat imageDetect=imageGray;
|
Mat imageDetect=imageGray;
|
||||||
|
|
||||||
int D=parameters.minObjectSize;
|
|
||||||
if (D < 1)
|
|
||||||
D=1;
|
|
||||||
|
|
||||||
vector<Rect> rectsWhereRegions;
|
vector<Rect> rectsWhereRegions;
|
||||||
bool shouldHandleResult=separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions);
|
bool shouldHandleResult=separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if (shouldHandleResult) {
|
if (shouldHandleResult) {
|
||||||
LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
|
LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
|
||||||
} else {
|
} else {
|
||||||
@ -517,7 +501,6 @@ void DetectionBasedTracker::process(const Mat& imageGray)
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
//correction by speed of rectangle
|
//correction by speed of rectangle
|
||||||
if (n > 1) {
|
if (n > 1) {
|
||||||
Point2f center=centerRect(r);
|
Point2f center=centerRect(r);
|
||||||
@ -547,7 +530,7 @@ void DetectionBasedTracker::process(const Mat& imageGray)
|
|||||||
updateTrackedObjects(detectedObjectsInRegions);
|
updateTrackedObjects(detectedObjectsInRegions);
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
|
void cv::DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
|
||||||
{
|
{
|
||||||
result.clear();
|
result.clear();
|
||||||
|
|
||||||
@ -560,7 +543,8 @@ void DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
|
|||||||
LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
|
LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
|
|
||||||
|
void cv::DetectionBasedTracker::getObjects(std::vector<Object>& result) const
|
||||||
{
|
{
|
||||||
result.clear();
|
result.clear();
|
||||||
|
|
||||||
@ -574,25 +558,23 @@ void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool cv::DetectionBasedTracker::run()
|
||||||
|
|
||||||
bool DetectionBasedTracker::run()
|
|
||||||
{
|
{
|
||||||
return separateDetectionWork->run();
|
return separateDetectionWork->run();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::stop()
|
void cv::DetectionBasedTracker::stop()
|
||||||
{
|
{
|
||||||
separateDetectionWork->stop();
|
separateDetectionWork->stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::resetTracking()
|
void cv::DetectionBasedTracker::resetTracking()
|
||||||
{
|
{
|
||||||
separateDetectionWork->resetTracking();
|
separateDetectionWork->resetTracking();
|
||||||
trackedObjects.clear();
|
trackedObjects.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObjects)
|
void cv::DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObjects)
|
||||||
{
|
{
|
||||||
enum {
|
enum {
|
||||||
NEW_RECTANGLE=-1,
|
NEW_RECTANGLE=-1,
|
||||||
@ -711,7 +693,8 @@ void DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObj
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
|
|
||||||
|
Rect cv::DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
|
||||||
{
|
{
|
||||||
if ( (i < 0) || (i >= (int)trackedObjects.size()) ) {
|
if ( (i < 0) || (i >= (int)trackedObjects.size()) ) {
|
||||||
LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=%d", i);
|
LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=%d", i);
|
||||||
@ -743,8 +726,8 @@ Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
|
|||||||
double sum=0;
|
double sum=0;
|
||||||
for(int j=0; j < Nsize; j++) {
|
for(int j=0; j < Nsize; j++) {
|
||||||
int k=N-j-1;
|
int k=N-j-1;
|
||||||
w+= lastPositions[k].width * weightsSizesSmoothing[j];
|
w += lastPositions[k].width * weightsSizesSmoothing[j];
|
||||||
h+= lastPositions[k].height * weightsSizesSmoothing[j];
|
h += lastPositions[k].height * weightsSizesSmoothing[j];
|
||||||
sum+=weightsSizesSmoothing[j];
|
sum+=weightsSizesSmoothing[j];
|
||||||
}
|
}
|
||||||
w /= sum;
|
w /= sum;
|
||||||
@ -792,7 +775,7 @@ Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions)
|
void cv::DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions)
|
||||||
{
|
{
|
||||||
Rect r0(Point(), img.size());
|
Rect r0(Point(), img.size());
|
||||||
Rect r1=scale_rect(r, innerParameters.coeffTrackingWindowSize);
|
Rect r1=scale_rect(r, innerParameters.coeffTrackingWindowSize);
|
||||||
@ -802,8 +785,7 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
int d=std::min(r.width, r.height);
|
int d = cvRound(std::min(r.width, r.height) * innerParameters.coeffObjectSizeToTrack);
|
||||||
d=cvRound(d * innerParameters.coeffObjectSizeToTrack);
|
|
||||||
|
|
||||||
vector<Rect> tmpobjects;
|
vector<Rect> tmpobjects;
|
||||||
|
|
||||||
@ -811,17 +793,17 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
|
|||||||
LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d",
|
LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d",
|
||||||
img1.size().width, img1.size().height, d);
|
img1.size().width, img1.size().height, d);
|
||||||
|
|
||||||
int maxObjectSize=parameters.maxObjectSize;
|
cascadeForTracking->setMinObjectSize(Size(d, d));
|
||||||
Size max_objectSize(maxObjectSize, maxObjectSize);
|
cascadeForTracking->detect(img1, tmpobjects);
|
||||||
|
/*
|
||||||
cascadeForTracking.detectMultiScale( img1, tmpobjects,
|
detectMultiScale( img1, tmpobjects,
|
||||||
parameters.scaleFactor, parameters.minNeighbors, 0
|
parameters.scaleFactor, parameters.minNeighbors, 0
|
||||||
|CV_HAAR_FIND_BIGGEST_OBJECT
|
|CV_HAAR_FIND_BIGGEST_OBJECT
|
||||||
|CV_HAAR_SCALE_IMAGE
|
|CV_HAAR_SCALE_IMAGE
|
||||||
,
|
,
|
||||||
Size(d,d),
|
Size(d,d),
|
||||||
max_objectSize
|
max_objectSize
|
||||||
);
|
);*/
|
||||||
|
|
||||||
for(size_t i=0; i < tmpobjects.size(); i++) {
|
for(size_t i=0; i < tmpobjects.size(); i++) {
|
||||||
Rect curres(tmpobjects[i].tl() + r1.tl(), tmpobjects[i].size());
|
Rect curres(tmpobjects[i].tl() + r1.tl(), tmpobjects[i].size());
|
||||||
@ -829,12 +811,9 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool DetectionBasedTracker::setParameters(const Parameters& params)
|
bool cv::DetectionBasedTracker::setParameters(const Parameters& params)
|
||||||
{
|
{
|
||||||
if ( (params.minObjectSize <= 0)
|
if ( params.maxTrackLifetime < 0 )
|
||||||
|| (params.maxObjectSize < 0)
|
|
||||||
|| (params.scaleFactor <= 1.0)
|
|
||||||
|| (params.maxTrackLifetime < 0) )
|
|
||||||
{
|
{
|
||||||
LOGE("DetectionBasedTracker::setParameters: ERROR: wrong parameters value");
|
LOGE("DetectionBasedTracker::setParameters: ERROR: wrong parameters value");
|
||||||
return false;
|
return false;
|
||||||
@ -846,7 +825,7 @@ bool DetectionBasedTracker::setParameters(const Parameters& params)
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
const DetectionBasedTracker::Parameters& DetectionBasedTracker::getParameters()
|
const cv::DetectionBasedTracker::Parameters& DetectionBasedTracker::getParameters()
|
||||||
{
|
{
|
||||||
return parameters;
|
return parameters;
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,29 @@ inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
|
|||||||
mat = Mat(v_rect, true);
|
mat = Mat(v_rect, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
|
||||||
|
IDetector(),
|
||||||
|
Detector(detector)
|
||||||
|
{
|
||||||
|
CV_Assert(!detector.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
|
||||||
|
{
|
||||||
|
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual ~CascadeDetectorAdapter()
|
||||||
|
{}
|
||||||
|
|
||||||
|
private:
|
||||||
|
CascadeDetectorAdapter();
|
||||||
|
cv::Ptr<cv::CascadeClassifier> Detector;
|
||||||
|
};
|
||||||
|
|
||||||
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
|
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
|
||||||
(JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
|
(JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
|
||||||
{
|
{
|
||||||
@ -27,10 +50,11 @@ JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeC
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
DetectionBasedTracker::Parameters DetectorParams;
|
// TODO: Reimplement using adapter
|
||||||
if (faceSize > 0)
|
// DetectionBasedTracker::Parameters DetectorParams;
|
||||||
DetectorParams.minObjectSize = faceSize;
|
// if (faceSize > 0)
|
||||||
result = (jlong)new DetectionBasedTracker(stdFileName, DetectorParams);
|
// DetectorParams.minObjectSize = faceSize;
|
||||||
|
// result = (jlong)new DetectionBasedTracker(stdFileName, DetectorParams);
|
||||||
}
|
}
|
||||||
catch(cv::Exception e)
|
catch(cv::Exception e)
|
||||||
{
|
{
|
||||||
@ -128,12 +152,11 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSe
|
|||||||
{
|
{
|
||||||
if (faceSize > 0)
|
if (faceSize > 0)
|
||||||
{
|
{
|
||||||
DetectionBasedTracker::Parameters DetectorParams = \
|
// TODO: Reimplement using adapter
|
||||||
((DetectionBasedTracker*)thiz)->getParameters();
|
// DetectionBasedTracker::Parameters DetectorParams = ((DetectionBasedTracker*)thiz)->getParameters();
|
||||||
DetectorParams.minObjectSize = faceSize;
|
// DetectorParams.minObjectSize = faceSize;
|
||||||
((DetectionBasedTracker*)thiz)->setParameters(DetectorParams);
|
// ((DetectionBasedTracker*)thiz)->setParameters(DetectorParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
catch(cv::Exception e)
|
catch(cv::Exception e)
|
||||||
{
|
{
|
||||||
@ -160,7 +183,7 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDe
|
|||||||
vector<Rect> RectFaces;
|
vector<Rect> RectFaces;
|
||||||
((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
|
((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
|
||||||
((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
|
((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
|
||||||
vector_Rect_to_Mat(RectFaces, *((Mat*)faces));
|
*((Mat*)faces) = Mat(RectFaces, true);
|
||||||
}
|
}
|
||||||
catch(cv::Exception e)
|
catch(cv::Exception e)
|
||||||
{
|
{
|
||||||
|
@ -7,7 +7,9 @@ public class DetectionBasedTracker
|
|||||||
{
|
{
|
||||||
public DetectionBasedTracker(String cascadeName, int minFaceSize)
|
public DetectionBasedTracker(String cascadeName, int minFaceSize)
|
||||||
{
|
{
|
||||||
mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
|
mMainDetector = nativeCreateDetector(cascadeName, minFaceSize);
|
||||||
|
mTrackingDetector = nativeCreateDetector(cascadeName, minFaceSize);
|
||||||
|
mNativeObj = nativeCreateTracker(mMainDetector, mTrackingDetector);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void start()
|
public void start()
|
||||||
@ -22,7 +24,8 @@ public class DetectionBasedTracker
|
|||||||
|
|
||||||
public void setMinFaceSize(int size)
|
public void setMinFaceSize(int size)
|
||||||
{
|
{
|
||||||
nativeSetFaceSize(mNativeObj, size);
|
nativeSetFaceSize(mMainDetector, size);
|
||||||
|
nativeSetFaceSize(mTrackingDetector, size);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void detect(Mat imageGray, MatOfRect faces)
|
public void detect(Mat imageGray, MatOfRect faces)
|
||||||
@ -32,17 +35,25 @@ public class DetectionBasedTracker
|
|||||||
|
|
||||||
public void release()
|
public void release()
|
||||||
{
|
{
|
||||||
nativeDestroyObject(mNativeObj);
|
nativeDestroyTracker(mNativeObj);
|
||||||
|
nativeDestroyDetector(mMainDetector);
|
||||||
|
nativeDestroyDetector(mTrackingDetector);
|
||||||
mNativeObj = 0;
|
mNativeObj = 0;
|
||||||
|
mMainDetector = 0;
|
||||||
|
mTrackingDetector = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private long mNativeObj = 0;
|
private long mNativeObj = 0;
|
||||||
|
private long mMainDetector = 0;
|
||||||
|
private long mTrackingDetector = 0;
|
||||||
|
|
||||||
private static native long nativeCreateObject(String cascadeName, int minFaceSize);
|
private static native long nativeCreateDetector(String cascadeName, int minFaceSize);
|
||||||
private static native void nativeDestroyObject(long thiz);
|
private static native long nativeCreateTracker(long mainDetector, long trackingDetector);
|
||||||
|
private static native void nativeDestroyTracker(long tracker);
|
||||||
|
private static native void nativeDestroyDetector(long detector);
|
||||||
private static native void nativeStart(long thiz);
|
private static native void nativeStart(long thiz);
|
||||||
private static native void nativeStop(long thiz);
|
private static native void nativeStop(long thiz);
|
||||||
private static native void nativeSetFaceSize(long thiz, int size);
|
private static native void nativeSetFaceSize(long detector, int size);
|
||||||
private static native void nativeDetect(long thiz, long inputImage, long faces);
|
private static native void nativeDetect(long thiz, long inputImage, long faces);
|
||||||
|
|
||||||
static
|
static
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
#
|
#
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
|
|
||||||
SET(OPENCV_CPP_SAMPLES_REQUIRED_DEPS opencv_core opencv_flann opencv_imgproc
|
SET(OPENCV_CPP_SAMPLES_REQUIRED_DEPS opencv_core_vision_api opencv_core opencv_flann opencv_imgproc
|
||||||
opencv_highgui opencv_ml opencv_video opencv_objdetect opencv_photo opencv_nonfree
|
opencv_highgui opencv_ml opencv_video opencv_objdetect opencv_photo opencv_nonfree
|
||||||
opencv_features2d opencv_calib3d opencv_legacy opencv_contrib opencv_stitching opencv_videostab)
|
opencv_features2d opencv_calib3d opencv_legacy opencv_contrib opencv_stitching opencv_videostab)
|
||||||
|
|
||||||
|
55
samples/cpp/core_vision_tracking_image.cpp
Normal file
55
samples/cpp/core_vision_tracking_image.cpp
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
#include <opencv2/core/core.hpp> // Basic OpenCV structures (cv::Mat, Scalar)
|
||||||
|
#include <opencv2/highgui/highgui.hpp> // OpenCV window I/O
|
||||||
|
#include <opencv2/core_vision_api/tracker.hpp>
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
using namespace cv;
|
||||||
|
|
||||||
|
const string WindowName = "Face Detection example";
|
||||||
|
const Scalar RectColor = CV_RGB(0,255,0);
|
||||||
|
|
||||||
|
int main()
|
||||||
|
{
|
||||||
|
namedWindow(WindowName);
|
||||||
|
cv::moveWindow(WindowName, 100, 100);
|
||||||
|
|
||||||
|
Mat Viewport;
|
||||||
|
Mat ReferenceFrame = imread("board.jpg");
|
||||||
|
if (ReferenceFrame.empty())
|
||||||
|
{
|
||||||
|
printf("Error: Cannot load input image\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
cv::Ptr<nv::Tracker> tracker = nv::Algorithm::create<nv::Tracker>("nv::Tracker::OpticalFlow");
|
||||||
|
|
||||||
|
tracker->initialize();
|
||||||
|
|
||||||
|
// First frame for initialization
|
||||||
|
tracker->feed(ReferenceFrame);
|
||||||
|
|
||||||
|
nv::Tracker::TrackedObjectHandler obj = tracker->addObject(cv::Rect(100,100, 200, 200));
|
||||||
|
|
||||||
|
while(true)
|
||||||
|
{
|
||||||
|
tracker->feed(ReferenceFrame);
|
||||||
|
|
||||||
|
if (obj->getStatus() == nv::Tracker::LOST_STATUS)
|
||||||
|
break;
|
||||||
|
|
||||||
|
cv::Rect currentLocation = obj->getLocation();
|
||||||
|
|
||||||
|
ReferenceFrame.copyTo(Viewport);
|
||||||
|
rectangle(Viewport, currentLocation, RectColor);
|
||||||
|
|
||||||
|
imshow(WindowName, Viewport);
|
||||||
|
|
||||||
|
if (cvWaitKey(30) >= 0) break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
104
samples/cpp/dbt_face_detection.cpp
Normal file
104
samples/cpp/dbt_face_detection.cpp
Normal file
@ -0,0 +1,104 @@
|
|||||||
|
#if 0 //defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
|
||||||
|
|
||||||
|
#include <opencv2/imgproc/imgproc.hpp> // Gaussian Blur
|
||||||
|
#include <opencv2/core/core.hpp> // Basic OpenCV structures (cv::Mat, Scalar)
|
||||||
|
#include <opencv2/highgui/highgui.hpp> // OpenCV window I/O
|
||||||
|
#include <opencv2/features2d/features2d.hpp>
|
||||||
|
#include <opencv2/contrib/detection_based_tracker.hpp>
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
using namespace std;
|
||||||
|
using namespace cv;
|
||||||
|
|
||||||
|
const string WindowName = "Face Detection example";
|
||||||
|
|
||||||
|
class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
|
||||||
|
IDetector(),
|
||||||
|
Detector(detector)
|
||||||
|
{
|
||||||
|
CV_Assert(!detector.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
|
||||||
|
{
|
||||||
|
Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
virtual ~CascadeDetectorAdapter()
|
||||||
|
{}
|
||||||
|
|
||||||
|
private:
|
||||||
|
CascadeDetectorAdapter();
|
||||||
|
cv::Ptr<cv::CascadeClassifier> Detector;
|
||||||
|
};
|
||||||
|
|
||||||
|
int main(int argc, char* argv[])
|
||||||
|
{
|
||||||
|
namedWindow(WindowName);
|
||||||
|
|
||||||
|
VideoCapture VideoStream(0);
|
||||||
|
|
||||||
|
if (!VideoStream.isOpened())
|
||||||
|
{
|
||||||
|
printf("Error: Cannot open video stream from camera\n");
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string cascadeFrontalfilename = "../../data/lbpcascades/lbpcascade_frontalface.xml";
|
||||||
|
cv::Ptr<cv::CascadeClassifier> cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
|
||||||
|
cv::Ptr<DetectionBasedTracker::IDetector> MainDetector = new CascadeDetectorAdapter(cascade);
|
||||||
|
|
||||||
|
cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
|
||||||
|
cv::Ptr<DetectionBasedTracker::IDetector> TrackingDetector = new CascadeDetectorAdapter(cascade);
|
||||||
|
|
||||||
|
DetectionBasedTracker::Parameters params;
|
||||||
|
DetectionBasedTracker Detector(MainDetector, TrackingDetector, params);
|
||||||
|
|
||||||
|
if (!Detector.run())
|
||||||
|
{
|
||||||
|
printf("Error: Detector initialization failed\n");
|
||||||
|
return 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
Mat ReferenceFrame;
|
||||||
|
Mat GrayFrame;
|
||||||
|
vector<Rect> Faces;
|
||||||
|
|
||||||
|
while(true)
|
||||||
|
{
|
||||||
|
VideoStream >> ReferenceFrame;
|
||||||
|
cvtColor(ReferenceFrame, GrayFrame, COLOR_RGB2GRAY);
|
||||||
|
Detector.process(GrayFrame);
|
||||||
|
Detector.getObjects(Faces);
|
||||||
|
|
||||||
|
for (size_t i = 0; i < Faces.size(); i++)
|
||||||
|
{
|
||||||
|
rectangle(ReferenceFrame, Faces[i], CV_RGB(0,255,0));
|
||||||
|
}
|
||||||
|
|
||||||
|
imshow(WindowName, ReferenceFrame);
|
||||||
|
|
||||||
|
if (cvWaitKey(30) >= 0) break;
|
||||||
|
}
|
||||||
|
|
||||||
|
Detector.stop();
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
#include <stdio.h>
|
||||||
|
int main()
|
||||||
|
{
|
||||||
|
printf("This sample works for UNIX or ANDROID only\n");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif
|
@ -43,8 +43,6 @@
|
|||||||
#define LOGE(...) do{} while(0)
|
#define LOGE(...) do{} while(0)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
using namespace cv;
|
using namespace cv;
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
@ -63,9 +61,31 @@ static void usage()
|
|||||||
LOGE0("\t (e.g.\"opencv/data/lbpcascades/lbpcascade_frontalface.xml\" ");
|
LOGE0("\t (e.g.\"opencv/data/lbpcascades/lbpcascade_frontalface.xml\" ");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
|
||||||
|
Detector(detector)
|
||||||
|
{
|
||||||
|
CV_Assert(!detector.empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
|
||||||
|
{
|
||||||
|
Detector->detectMultiScale(Image, objects, 1.1, 3, 0, minObjSize, maxObjSize);
|
||||||
|
}
|
||||||
|
virtual ~CascadeDetectorAdapter()
|
||||||
|
{}
|
||||||
|
|
||||||
|
private:
|
||||||
|
CascadeDetectorAdapter();
|
||||||
|
cv::Ptr<cv::CascadeClassifier> Detector;
|
||||||
|
};
|
||||||
|
|
||||||
static int test_FaceDetector(int argc, char *argv[])
|
static int test_FaceDetector(int argc, char *argv[])
|
||||||
{
|
{
|
||||||
if (argc < 4) {
|
if (argc < 4)
|
||||||
|
{
|
||||||
usage();
|
usage();
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
@ -80,12 +100,14 @@ static int test_FaceDetector(int argc, char *argv[])
|
|||||||
vector<Mat> images;
|
vector<Mat> images;
|
||||||
{
|
{
|
||||||
char filename[256];
|
char filename[256];
|
||||||
for(int n=1; ; n++) {
|
for(int n=1; ; n++)
|
||||||
|
{
|
||||||
snprintf(filename, sizeof(filename), filepattern, n);
|
snprintf(filename, sizeof(filename), filepattern, n);
|
||||||
LOGD("filename='%s'", filename);
|
LOGD("filename='%s'", filename);
|
||||||
Mat m0;
|
Mat m0;
|
||||||
m0=imread(filename);
|
m0=imread(filename);
|
||||||
if (m0.empty()) {
|
if (m0.empty())
|
||||||
|
{
|
||||||
LOGI0("Cannot read the file --- break");
|
LOGI0("Cannot read the file --- break");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -94,10 +116,15 @@ static int test_FaceDetector(int argc, char *argv[])
|
|||||||
LOGD("read %d images", (int)images.size());
|
LOGD("read %d images", (int)images.size());
|
||||||
}
|
}
|
||||||
|
|
||||||
DetectionBasedTracker::Parameters params;
|
|
||||||
std::string cascadeFrontalfilename=cascadefile;
|
std::string cascadeFrontalfilename=cascadefile;
|
||||||
|
cv::Ptr<cv::CascadeClassifier> cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
|
||||||
|
cv::Ptr<DetectionBasedTracker::IDetector> MainDetector = new CascadeDetectorAdapter(cascade);
|
||||||
|
|
||||||
DetectionBasedTracker fd(cascadeFrontalfilename, params);
|
cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
|
||||||
|
cv::Ptr<DetectionBasedTracker::IDetector> TrackingDetector = new CascadeDetectorAdapter(cascade);
|
||||||
|
|
||||||
|
DetectionBasedTracker::Parameters params;
|
||||||
|
DetectionBasedTracker fd(MainDetector, TrackingDetector, params);
|
||||||
|
|
||||||
fd.run();
|
fd.run();
|
||||||
|
|
||||||
@ -108,12 +135,13 @@ static int test_FaceDetector(int argc, char *argv[])
|
|||||||
double freq=getTickFrequency();
|
double freq=getTickFrequency();
|
||||||
|
|
||||||
int num_images=images.size();
|
int num_images=images.size();
|
||||||
for(int n=1; n <= num_images; n++) {
|
for(int n=1; n <= num_images; n++)
|
||||||
|
{
|
||||||
int64 tcur=getTickCount();
|
int64 tcur=getTickCount();
|
||||||
int64 dt=tcur-tprev;
|
int64 dt=tcur-tprev;
|
||||||
tprev=tcur;
|
tprev=tcur;
|
||||||
double t_ms=((double)dt)/freq * 1000.0;
|
double t_ms=((double)dt)/freq * 1000.0;
|
||||||
LOGD("\n\nSTEP n=%d from prev step %f ms\n\n", n, t_ms);
|
LOGD("\n\nSTEP n=%d from prev step %f ms\n", n, t_ms);
|
||||||
m=images[n-1];
|
m=images[n-1];
|
||||||
CV_Assert(! m.empty());
|
CV_Assert(! m.empty());
|
||||||
cvtColor(m, gray, CV_BGR2GRAY);
|
cvtColor(m, gray, CV_BGR2GRAY);
|
||||||
@ -123,11 +151,8 @@ static int test_FaceDetector(int argc, char *argv[])
|
|||||||
vector<Rect> result;
|
vector<Rect> result;
|
||||||
fd.getObjects(result);
|
fd.getObjects(result);
|
||||||
|
|
||||||
|
for(size_t i=0; i < result.size(); i++)
|
||||||
|
{
|
||||||
|
|
||||||
|
|
||||||
for(size_t i=0; i < result.size(); i++) {
|
|
||||||
Rect r=result[i];
|
Rect r=result[i];
|
||||||
CV_Assert(r.area() > 0);
|
CV_Assert(r.area() > 0);
|
||||||
Point tl=r.tl();
|
Point tl=r.tl();
|
||||||
@ -136,23 +161,21 @@ static int test_FaceDetector(int argc, char *argv[])
|
|||||||
rectangle(m, tl, br, color, 3);
|
rectangle(m, tl, br, color, 3);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
{
|
|
||||||
char outfilename[256];
|
char outfilename[256];
|
||||||
for(int n=1; n <= num_images; n++) {
|
for(int n=1; n <= num_images; n++)
|
||||||
|
{
|
||||||
snprintf(outfilename, sizeof(outfilename), outfilepattern, n);
|
snprintf(outfilename, sizeof(outfilename), outfilepattern, n);
|
||||||
LOGD("outfilename='%s'", outfilename);
|
LOGD("outfilename='%s'", outfilename);
|
||||||
m=images[n-1];
|
m=images[n-1];
|
||||||
imwrite(outfilename, m);
|
imwrite(outfilename, m);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
fd.stop();
|
fd.stop();
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
int main(int argc, char *argv[])
|
int main(int argc, char *argv[])
|
||||||
{
|
{
|
||||||
return test_FaceDetector(argc, argv);
|
return test_FaceDetector(argc, argv);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user