Merge branch 'android_camera_2.4' into 2.4

This commit is contained in:
Andrey Kamaev 2012-08-24 16:55:44 +04:00
commit 0948f4f863
34 changed files with 1349 additions and 1439 deletions

View File

@ -39,7 +39,8 @@ using namespace std;
static inline cv::Point2f centerRect(const cv::Rect& r) static inline cv::Point2f centerRect(const cv::Rect& r)
{ {
return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2); return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
}; }
static inline cv::Rect scale_rect(const cv::Rect& r, float scale) static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
{ {
cv::Point2f m=centerRect(r); cv::Point2f m=centerRect(r);
@ -49,9 +50,10 @@ static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
int y=cvRound(m.y - height/2); int y=cvRound(m.y - height/2);
return cv::Rect(x, y, cvRound(width), cvRound(height)); return cv::Rect(x, y, cvRound(width), cvRound(height));
}; }
void* workcycleObjectDetectorFunction(void* p); void* workcycleObjectDetectorFunction(void* p);
class DetectionBasedTracker::SeparateDetectionWork class DetectionBasedTracker::SeparateDetectionWork
{ {
public: public:
@ -61,6 +63,7 @@ class DetectionBasedTracker::SeparateDetectionWork
bool run(); bool run();
void stop(); void stop();
void resetTracking(); void resetTracking();
inline bool isWorking() inline bool isWorking()
{ {
return (stateThread==STATE_THREAD_WORKING_SLEEPING) || (stateThread==STATE_THREAD_WORKING_WITH_IMAGE); return (stateThread==STATE_THREAD_WORKING_SLEEPING) || (stateThread==STATE_THREAD_WORKING_WITH_IMAGE);
@ -165,33 +168,33 @@ bool DetectionBasedTracker::SeparateDetectionWork::run()
} }
#ifdef __GNUC__ #ifdef __GNUC__
#define CATCH_ALL_AND_LOG(_block) \ #define CATCH_ALL_AND_LOG(_block) \
do { \ do { \
try { \ try { \
_block; \ _block; \
break; \ break; \
} \ } \
catch(cv::Exception& e) { \ catch(cv::Exception& e) { \
LOGE0("\n %s: ERROR: OpenCV Exception caught: \n'%s'\n\n", __func__, e.what()); \ LOGE0("\n %s: ERROR: OpenCV Exception caught: \n'%s'\n\n", __func__, e.what()); \
} catch(std::exception& e) { \ } catch(std::exception& e) { \
LOGE0("\n %s: ERROR: Exception caught: \n'%s'\n\n", __func__, e.what()); \ LOGE0("\n %s: ERROR: Exception caught: \n'%s'\n\n", __func__, e.what()); \
} catch(...) { \ } catch(...) { \
LOGE0("\n %s: ERROR: UNKNOWN Exception caught\n\n", __func__); \ LOGE0("\n %s: ERROR: UNKNOWN Exception caught\n\n", __func__); \
} \ } \
} while(0) } while(0)
#else #else
#define CATCH_ALL_AND_LOG(_block) \ #define CATCH_ALL_AND_LOG(_block) \
do { \ do { \
try { \ try { \
_block; \ _block; \
break; \ break; \
} \ } \
catch(cv::Exception& e) { \ catch(cv::Exception& e) { \
LOGE0("\n ERROR: OpenCV Exception caught: \n'%s'\n\n", e.what()); \ LOGE0("\n ERROR: OpenCV Exception caught: \n'%s'\n\n", e.what()); \
} catch(std::exception& e) { \ } catch(std::exception& e) { \
LOGE0("\n ERROR: Exception caught: \n'%s'\n\n", e.what()); \ LOGE0("\n ERROR: Exception caught: \n'%s'\n\n", e.what()); \
} catch(...) { \ } catch(...) { \
LOGE0("\n ERROR: UNKNOWN Exception caught\n\n"); \ LOGE0("\n ERROR: UNKNOWN Exception caught\n\n"); \
} \ } \
} while(0) } while(0)
#endif #endif
@ -275,7 +278,7 @@ void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
int64 t1_detect=getTickCount(); int64 t1_detect=getTickCount();
int minObjectSize=detectionBasedTracker.parameters.minObjectSize; int minObjectSize=detectionBasedTracker.parameters.minObjectSize;
Size min_objectSize=Size(minObjectSize, minObjectSize); Size min_objectSize = Size(minObjectSize, minObjectSize);
int maxObjectSize=detectionBasedTracker.parameters.maxObjectSize; int maxObjectSize=detectionBasedTracker.parameters.maxObjectSize;
Size max_objectSize(maxObjectSize, maxObjectSize); Size max_objectSize(maxObjectSize, maxObjectSize);
@ -295,8 +298,8 @@ void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
break; break;
} }
int64 t2_detect=getTickCount(); int64 t2_detect = getTickCount();
int64 dt_detect=t2_detect-t1_detect; int64 dt_detect = t2_detect-t1_detect;
double dt_detect_ms=((double)dt_detect)/freq * 1000.0; double dt_detect_ms=((double)dt_detect)/freq * 1000.0;
LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- objects num==%d, t_ms=%.4f", (int)objects.size(), dt_detect_ms); LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- objects num==%d, t_ms=%.4f", (int)objects.size(), dt_detect_ms);
@ -375,26 +378,26 @@ bool DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThrea
{ {
static double freq = getTickFrequency(); static double freq = getTickFrequency();
bool shouldCommunicateWithDetectingThread=(stateThread==STATE_THREAD_WORKING_SLEEPING); bool shouldCommunicateWithDetectingThread = (stateThread==STATE_THREAD_WORKING_SLEEPING);
LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: shouldCommunicateWithDetectingThread=%d", (shouldCommunicateWithDetectingThread?1:0)); LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: shouldCommunicateWithDetectingThread=%d", (shouldCommunicateWithDetectingThread?1:0));
if (!shouldCommunicateWithDetectingThread) { if (!shouldCommunicateWithDetectingThread) {
return false; return false;
} }
bool shouldHandleResult=false; bool shouldHandleResult = false;
pthread_mutex_lock(&mutex); pthread_mutex_lock(&mutex);
if (isObjectDetectingReady) { if (isObjectDetectingReady) {
shouldHandleResult=true; shouldHandleResult=true;
rectsWhereRegions=resultDetect; rectsWhereRegions = resultDetect;
isObjectDetectingReady=false; isObjectDetectingReady=false;
double lastBigDetectionDuration=1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq); double lastBigDetectionDuration = 1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq);
LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: lastBigDetectionDuration=%f ms", (double)lastBigDetectionDuration); LOGD("DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread: lastBigDetectionDuration=%f ms", (double)lastBigDetectionDuration);
} }
bool shouldSendNewDataToWorkThread=true; bool shouldSendNewDataToWorkThread = true;
if (timeWhenDetectingThreadStartedWork > 0) { if (timeWhenDetectingThreadStartedWork > 0) {
double time_from_previous_launch_in_ms=1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq); //the same formula as for lastBigDetectionDuration double time_from_previous_launch_in_ms=1000.0 * (((double)(getTickCount() - timeWhenDetectingThreadStartedWork )) / freq); //the same formula as for lastBigDetectionDuration
shouldSendNewDataToWorkThread = (time_from_previous_launch_in_ms >= detectionBasedTracker.parameters.minDetectionPeriod); shouldSendNewDataToWorkThread = (time_from_previous_launch_in_ms >= detectionBasedTracker.parameters.minDetectionPeriod);
@ -430,8 +433,6 @@ DetectionBasedTracker::Parameters::Parameters()
minDetectionPeriod=0; minDetectionPeriod=0;
} }
DetectionBasedTracker::InnerParameters::InnerParameters() DetectionBasedTracker::InnerParameters::InnerParameters()
{ {
numLastPositionsToTrack=4; numLastPositionsToTrack=4;
@ -444,6 +445,7 @@ DetectionBasedTracker::InnerParameters::InnerParameters()
coeffObjectSpeedUsingInPrediction=0.8; coeffObjectSpeedUsingInPrediction=0.8;
} }
DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params) DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params)
:separateDetectionWork(), :separateDetectionWork(),
innerParameters(), innerParameters(),
@ -468,15 +470,13 @@ DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename,
weightsSizesSmoothing.push_back(0.2); weightsSizesSmoothing.push_back(0.2);
} }
DetectionBasedTracker::~DetectionBasedTracker() DetectionBasedTracker::~DetectionBasedTracker()
{ {
} }
void DetectionBasedTracker::process(const Mat& imageGray) void DetectionBasedTracker::process(const Mat& imageGray)
{ {
CV_Assert(imageGray.type()==CV_8UC1); CV_Assert(imageGray.type()==CV_8UC1);
if (!separateDetectionWork->isWorking()) { if (!separateDetectionWork->isWorking()) {
@ -494,38 +494,36 @@ void DetectionBasedTracker::process(const Mat& imageGray)
Mat imageDetect=imageGray; Mat imageDetect=imageGray;
int D=parameters.minObjectSize; int D = parameters.minObjectSize;
if (D < 1) if (D < 1)
D=1; D=1;
vector<Rect> rectsWhereRegions; vector<Rect> rectsWhereRegions;
bool shouldHandleResult=separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions); bool shouldHandleResult=separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions);
if (shouldHandleResult) { if (shouldHandleResult) {
LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect"); LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
} else { } else {
LOGD("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions"); LOGD("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
for(size_t i=0; i < trackedObjects.size(); i++) { for(size_t i = 0; i < trackedObjects.size(); i++) {
int n=trackedObjects[i].lastPositions.size(); int n = trackedObjects[i].lastPositions.size();
CV_Assert(n > 0); CV_Assert(n > 0);
Rect r=trackedObjects[i].lastPositions[n-1]; Rect r = trackedObjects[i].lastPositions[n-1];
if(r.area()==0) { if(r.area() == 0) {
LOGE("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty"); LOGE("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
continue; continue;
} }
//correction by speed of rectangle // correction by speed of rectangle
if (n > 1) { if (n > 1) {
Point2f center=centerRect(r); Point2f center = centerRect(r);
Point2f center_prev=centerRect(trackedObjects[i].lastPositions[n-2]); Point2f center_prev = centerRect(trackedObjects[i].lastPositions[n-2]);
Point2f shift=(center - center_prev) * innerParameters.coeffObjectSpeedUsingInPrediction; Point2f shift = (center - center_prev) * innerParameters.coeffObjectSpeedUsingInPrediction;
r.x+=cvRound(shift.x); r.x += cvRound(shift.x);
r.y+=cvRound(shift.y); r.y += cvRound(shift.y);
} }
@ -538,7 +536,7 @@ void DetectionBasedTracker::process(const Mat& imageGray)
LOGD("DetectionBasedTracker::process: rectsWhereRegions.size()=%d", (int)rectsWhereRegions.size()); LOGD("DetectionBasedTracker::process: rectsWhereRegions.size()=%d", (int)rectsWhereRegions.size());
for(size_t i=0; i < rectsWhereRegions.size(); i++) { for(size_t i=0; i < rectsWhereRegions.size(); i++) {
Rect r=rectsWhereRegions[i]; Rect r = rectsWhereRegions[i];
detectInRegion(imageDetect, r, detectedObjectsInRegions); detectInRegion(imageDetect, r, detectedObjectsInRegions);
} }
@ -560,6 +558,7 @@ void DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height); LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
} }
} }
void DetectionBasedTracker::getObjects(std::vector<Object>& result) const void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
{ {
result.clear(); result.clear();
@ -574,8 +573,6 @@ void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
} }
} }
bool DetectionBasedTracker::run() bool DetectionBasedTracker::run()
{ {
return separateDetectionWork->run(); return separateDetectionWork->run();
@ -711,6 +708,7 @@ void DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObj
} }
} }
} }
Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
{ {
if ( (i < 0) || (i >= (int)trackedObjects.size()) ) { if ( (i < 0) || (i >= (int)trackedObjects.size()) ) {
@ -795,15 +793,16 @@ Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions) void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions)
{ {
Rect r0(Point(), img.size()); Rect r0(Point(), img.size());
Rect r1=scale_rect(r, innerParameters.coeffTrackingWindowSize); Rect r1 = scale_rect(r, innerParameters.coeffTrackingWindowSize);
r1=r1 & r0; r1 = r1 & r0;
if ( (r1.width <=0) || (r1.height <= 0) ) { if ( (r1.width <=0) || (r1.height <= 0) ) {
LOGD("DetectionBasedTracker::detectInRegion: Empty intersection"); LOGD("DetectionBasedTracker::detectInRegion: Empty intersection");
return; return;
} }
int d=std::min(r.width, r.height); int d = std::min(r.width, r.height);
d=cvRound(d * innerParameters.coeffObjectSizeToTrack); d = cvRound(d * innerParameters.coeffObjectSizeToTrack);
vector<Rect> tmpobjects; vector<Rect> tmpobjects;
@ -811,7 +810,7 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d", LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d",
img1.size().width, img1.size().height, d); img1.size().width, img1.size().height, d);
int maxObjectSize=parameters.maxObjectSize; int maxObjectSize = parameters.maxObjectSize;
Size max_objectSize(maxObjectSize, maxObjectSize); Size max_objectSize(maxObjectSize, maxObjectSize);
cascadeForTracking.detectMultiScale( img1, tmpobjects, cascadeForTracking.detectMultiScale( img1, tmpobjects,

View File

@ -48,12 +48,15 @@
#include <android/log.h> #include <android/log.h>
#include <camera_activity.hpp> #include <camera_activity.hpp>
#if !defined(LOGD) && !defined(LOGI) && !defined(LOGE) //#if !defined(LOGD) && !defined(LOGI) && !defined(LOGE)
#undef LOGD
#undef LOGE
#undef LOGI
#define LOG_TAG "CV_CAP" #define LOG_TAG "CV_CAP"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)) #define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)) #define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)) #define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
#endif //#endif
class HighguiAndroidCameraActivity; class HighguiAndroidCameraActivity;
@ -86,8 +89,8 @@ protected:
//raw from camera //raw from camera
int m_width; int m_width;
int m_height; int m_height;
unsigned char *m_frameYUV420; cv::Mat m_frameYUV420;
unsigned char *m_frameYUV420next; cv::Mat m_frameYUV420next;
enum YUVformat enum YUVformat
{ {
@ -115,9 +118,9 @@ private:
bool m_hasColor; bool m_hasColor;
enum CvCapture_Android_DataState { enum CvCapture_Android_DataState {
CVCAPTURE_ANDROID_STATE_NO_FRAME=0, CVCAPTURE_ANDROID_STATE_NO_FRAME=0,
CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED, CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED,
CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED
}; };
volatile CvCapture_Android_DataState m_dataState; volatile CvCapture_Android_DataState m_dataState;
@ -189,8 +192,8 @@ CvCapture_Android::CvCapture_Android(int cameraId)
m_height = 0; m_height = 0;
m_activity = 0; m_activity = 0;
m_isOpened = false; m_isOpened = false;
m_frameYUV420 = 0; // m_frameYUV420 = 0;
m_frameYUV420next = 0; // m_frameYUV420next = 0;
m_hasGray = false; m_hasGray = false;
m_hasColor = false; m_hasColor = false;
m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME; m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME;
@ -231,20 +234,19 @@ CvCapture_Android::~CvCapture_Android()
{ {
((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate(); ((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate();
pthread_mutex_lock(&m_nextFrameMutex);
pthread_mutex_lock(&m_nextFrameMutex); // unsigned char *tmp1=m_frameYUV420;
// unsigned char *tmp2=m_frameYUV420next;
// m_frameYUV420 = 0;
// m_frameYUV420next = 0;
// delete tmp1;
// delete tmp2;
unsigned char *tmp1=m_frameYUV420; m_dataState=CVCAPTURE_ANDROID_STATE_NO_FRAME;
unsigned char *tmp2=m_frameYUV420next; pthread_cond_broadcast(&m_nextFrameCond);
m_frameYUV420 = 0;
m_frameYUV420next = 0;
delete tmp1;
delete tmp2;
m_dataState=CVCAPTURE_ANDROID_STATE_NO_FRAME; pthread_mutex_unlock(&m_nextFrameMutex);
pthread_cond_broadcast(&m_nextFrameCond);
pthread_mutex_unlock(&m_nextFrameMutex);
//m_activity->disconnect() will be automatically called inside destructor; //m_activity->disconnect() will be automatically called inside destructor;
delete m_activity; delete m_activity;
@ -257,7 +259,7 @@ CvCapture_Android::~CvCapture_Android()
double CvCapture_Android::getProperty( int propIdx ) double CvCapture_Android::getProperty( int propIdx )
{ {
switch ( propIdx ) switch ( propIdx )
{ {
case CV_CAP_PROP_FRAME_WIDTH: case CV_CAP_PROP_FRAME_WIDTH:
return (double)m_activity->getFrameWidth(); return (double)m_activity->getFrameWidth();
@ -308,7 +310,7 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT, propValue); m_activity->setProperty(ANDROID_CAMERA_PROPERTY_FRAMEHEIGHT, propValue);
break; break;
case CV_CAP_PROP_AUTOGRAB: case CV_CAP_PROP_AUTOGRAB:
m_shouldAutoGrab=(propValue != 0); m_shouldAutoGrab=(propValue != 0);
break; break;
case CV_CAP_PROP_EXPOSURE: case CV_CAP_PROP_EXPOSURE:
m_activity->setProperty(ANDROID_CAMERA_PROPERTY_EXPOSURE, propValue); m_activity->setProperty(ANDROID_CAMERA_PROPERTY_EXPOSURE, propValue);
@ -327,13 +329,13 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
break; break;
default: default:
CV_Error( CV_StsOutOfRange, "Failed attempt to SET unsupported camera property." ); CV_Error( CV_StsOutOfRange, "Failed attempt to SET unsupported camera property." );
return false; return false;
} }
if (propIdx != CV_CAP_PROP_AUTOGRAB) {// property for highgui class CvCapture_Android only if (propIdx != CV_CAP_PROP_AUTOGRAB) {// property for highgui class CvCapture_Android only
m_CameraParamsChanged = true; m_CameraParamsChanged = true;
} }
res = true; res = true;
} }
return res; return res;
@ -342,8 +344,8 @@ bool CvCapture_Android::setProperty( int propIdx, double propValue )
bool CvCapture_Android::grabFrame() bool CvCapture_Android::grabFrame()
{ {
if( !isOpened() ) { if( !isOpened() ) {
LOGE("CvCapture_Android::grabFrame(): camera is not opened"); LOGE("CvCapture_Android::grabFrame(): camera is not opened");
return false; return false;
} }
bool res=false; bool res=false;
@ -352,38 +354,38 @@ bool CvCapture_Android::grabFrame()
{ {
m_activity->applyProperties(); m_activity->applyProperties();
m_CameraParamsChanged = false; m_CameraParamsChanged = false;
m_dataState= CVCAPTURE_ANDROID_STATE_NO_FRAME;//we will wait new frame m_dataState= CVCAPTURE_ANDROID_STATE_NO_FRAME;//we will wait new frame
} }
if (m_dataState!=CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) { if (m_dataState!=CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED)
m_waitingNextFrame = true; {
pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex); m_waitingNextFrame = true;
pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex);
} }
if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) { if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED)
//LOGD("CvCapture_Android::grabFrame: get new frame"); {
//swap current and new frames //LOGD("CvCapture_Android::grabFrame: get new frame");
unsigned char* tmp = m_frameYUV420; //swap current and new frames
m_frameYUV420 = m_frameYUV420next; cv::swap(m_frameYUV420, m_frameYUV420next);
m_frameYUV420next = tmp;
//discard cached frames //discard cached frames
m_hasGray = false; m_hasGray = false;
m_hasColor = false; m_hasColor = false;
m_dataState=CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED; m_dataState=CVCAPTURE_ANDROID_STATE_HAS_FRAME_GRABBED;
m_framesGrabbed++; m_framesGrabbed++;
res=true; res=true;
} else { } else {
LOGE("CvCapture_Android::grabFrame: NO new frame"); LOGE("CvCapture_Android::grabFrame: NO new frame");
} }
int res_unlock=pthread_mutex_unlock(&m_nextFrameMutex); int res_unlock=pthread_mutex_unlock(&m_nextFrameMutex);
if (res_unlock) { if (res_unlock) {
LOGE("Error in CvCapture_Android::grabFrame: pthread_mutex_unlock returned %d --- probably, this object has been destroyed", res_unlock); LOGE("Error in CvCapture_Android::grabFrame: pthread_mutex_unlock returned %d --- probably, this object has been destroyed", res_unlock);
return false; return false;
} }
return res; return res;
@ -393,7 +395,8 @@ IplImage* CvCapture_Android::retrieveFrame( int outputType )
{ {
IplImage* image = NULL; IplImage* image = NULL;
unsigned char *current_frameYUV420=m_frameYUV420; cv::Mat m_frameYUV420_ref = m_frameYUV420;
unsigned char *current_frameYUV420=m_frameYUV420_ref.ptr();
//Attention! all the operations in this function below should occupy less time than the period between two frames from camera //Attention! all the operations in this function below should occupy less time than the period between two frames from camera
if (NULL != current_frameYUV420) if (NULL != current_frameYUV420)
{ {
@ -456,19 +459,10 @@ void CvCapture_Android::setFrame(const void* buffer, int bufferSize)
prepareCacheForYUV(width, height); prepareCacheForYUV(width, height);
//copy data //copy data
memcpy(m_frameYUV420next, buffer, bufferSize); cv::Mat m_frameYUV420next_ref = m_frameYUV420next;
//LOGD("CvCapture_Android::setFrame -- memcpy is done"); memcpy(m_frameYUV420next_ref.ptr(), buffer, bufferSize);
// LOGD("CvCapture_Android::setFrame -- memcpy is done");
#if 0 //moved this part of code into grabFrame // ((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate();
//swap current and new frames
unsigned char* tmp = m_frameYUV420;
m_frameYUV420 = m_frameYUV420next;
m_frameYUV420next = tmp;
//discard cached frames
m_hasGray = false;
m_hasColor = false;
#endif
m_dataState = CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED; m_dataState = CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED;
m_waitingNextFrame = false;//set flag that no more frames required at this moment m_waitingNextFrame = false;//set flag that no more frames required at this moment
@ -482,17 +476,22 @@ void CvCapture_Android::prepareCacheForYUV(int width, int height)
LOGD("CvCapture_Android::prepareCacheForYUV: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height); LOGD("CvCapture_Android::prepareCacheForYUV: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height);
m_width = width; m_width = width;
m_height = height; m_height = height;
/*
unsigned char *tmp = m_frameYUV420next; unsigned char *tmp = m_frameYUV420next;
m_frameYUV420next = new unsigned char [width * height * 3 / 2]; m_frameYUV420next = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) { if (tmp != NULL)
delete[] tmp; {
} delete[] tmp;
}
tmp = m_frameYUV420; tmp = m_frameYUV420;
m_frameYUV420 = new unsigned char [width * height * 3 / 2]; m_frameYUV420 = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) { if (tmp != NULL)
delete[] tmp; {
} delete[] tmp;
}*/
m_frameYUV420.create(height * 3 / 2, width, CV_8UC1);
m_frameYUV420next.create(height * 3 / 2, width, CV_8UC1);
} }
} }

View File

@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView"; private static final String TAG = "Sample-15puzzle::SurfaceView";
private SurfaceHolder mHolder; private SurfaceHolder mHolder;
private VideoCapture mCamera; private VideoCapture mCamera;
@ -29,24 +29,23 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
synchronized (this) { synchronized (this) {
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null; Log.e(TAG, "Failed to open native camera");
Log.e(TAG, "Failed to open native camera"); return false;
return false; }
} }
}
return true; return true;
} }
public void releaseCamera() { public void releaseCamera() {
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.release(); mCamera.release();
mCamera = null; mCamera = null;
} }
} }
} }

View File

@ -12,109 +12,98 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
/** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/ /** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/
public class puzzle15Activity extends Activity public class puzzle15Activity extends Activity
{ {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
private MenuItem mItemNewGame; private MenuItem mItemNewGame;
private MenuItem mItemToggleNumbers; private MenuItem mItemToggleNumbers;
private puzzle15View mView = null; private puzzle15View mView = null;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public puzzle15Activity() private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
{ @Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public puzzle15Activity() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (null != mView)
if (null != mView) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override
protected void onResume() {
Log.i(TAG, "onResume");
super.onResume();
if( mView!=null && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) protected void onResume() {
{ Log.i(TAG, "onResume");
Log.i(TAG, "onCreate"); super.onResume();
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library"); Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{ {
Log.e(TAG, "Cannot connect to OpenCV Manager"); Log.e(TAG, "Cannot connect to OpenCV Manager");
} }
} }
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override @Override
public boolean onCreateOptionsMenu(Menu menu) { public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu"); Log.i(TAG, "onCreateOptionsMenu");

View File

@ -18,7 +18,7 @@ import android.view.View;
import android.view.View.OnTouchListener; import android.view.View.OnTouchListener;
public class puzzle15View extends SampleCvViewBase implements OnTouchListener { public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
private Mat mRgba; private Mat mRgba;
private Mat mRgba15; private Mat mRgba15;
private Mat[] mCells; private Mat[] mCells;
private Mat[] mCells15; private Mat[] mCells15;
@ -45,13 +45,13 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
} }
@Override @Override
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) { synchronized (this) {
// initialize Mat before usage // initialize Mat before usage
mRgba = new Mat(); mRgba = new Mat();
} }
super.surfaceCreated(holder); super.surfaceCreated(holder);
} }
public static void shuffle(int[] array) { public static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) { for (int i = array.length; i > 1; i--) {
@ -83,11 +83,11 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
return sum % 2 == 0; return sum % 2 == 0;
} }
private void createPuzzle(int cols, int rows) { private void createPuzzle(int cols, int rows, int type) {
mCells = new Mat[gridArea]; mCells = new Mat[gridArea];
mCells15 = new Mat[gridArea]; mCells15 = new Mat[gridArea];
mRgba15 = new Mat(rows, cols, mRgba.type()); mRgba15 = new Mat(rows, cols, type);
mIndexses = new int[gridArea]; mIndexses = new int[gridArea];
for (int i = 0; i < gridSize; i++) { for (int i = 0; i < gridSize; i++) {
@ -122,7 +122,11 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
cols = cols - cols%4; cols = cols - cols%4;
if (mCells == null) if (mCells == null)
createPuzzle(cols, rows); createPuzzle(cols, rows, mRgba.type());
else if(mRgba15.cols() != cols || mRgba15.rows() != rows) {
releaseMats();
createPuzzle(cols, rows, mRgba.type());
}
// copy shuffled tiles // copy shuffled tiles
for (int i = 0; i < gridArea; i++) { for (int i = 0; i < gridArea; i++) {
@ -141,10 +145,10 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
drawGrid(cols, rows); drawGrid(cols, rows);
Bitmap bmp = Bitmap.createBitmap(cols, rows, Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(cols, rows, Bitmap.Config.ARGB_8888);
try { try {
Utils.matToBitmap(mRgba15, bmp); Utils.matToBitmap(mRgba15, bmp);
return bmp; return bmp;
} catch(Exception e) { } catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
return null; return null;
} }
@ -162,32 +166,38 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
super.run(); super.run();
synchronized (this) { synchronized (this) {
// Explicitly deallocate Mats releaseMats();
if (mCells != null) {
for (Mat m : mCells)
m.release();
}
if (mCells15 != null) {
for (Mat m : mCells15)
m.release();
}
if (mRgba != null) if (mRgba != null)
mRgba.release(); mRgba.release();
if (mRgba15 != null)
mRgba15.release();
mRgba = null; mRgba = null;
mRgba15 = null;
mCells = null;
mCells15 = null;
mIndexses = null;
} }
} }
private void releaseMats() {
// Explicitly deallocate Mats
if (mCells != null) {
for (Mat m : mCells)
m.release();
}
if (mCells15 != null) {
for (Mat m : mCells15)
m.release();
}
if (mRgba15 != null)
mRgba15.release();
mRgba15 = null;
mCells = null;
mCells15 = null;
mIndexses = null;
}
public boolean onTouch(View v, MotionEvent event) { public boolean onTouch(View v, MotionEvent event) {
if(mRgba==null) return false; if(mRgba==null) return false;
int cols = mRgba.cols(); int cols = mRgba.cols();
int rows = mRgba.rows(); int rows = mRgba.rows();
float xoffset = (getWidth() - cols) / 2; float xoffset = (getWidth() - cols) / 2;
float yoffset = (getHeight() - rows) / 2; float yoffset = (getHeight() - rows) / 2;

View File

@ -10,101 +10,90 @@ import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class ColorBlobDetectionActivity extends Activity { public class ColorBlobDetectionActivity extends Activity {
private static final String TAG = "Example/ColorBlobDetection"; private static final String TAG = "Sample-ColorBlobDetection::Activity";
private ColorBlobDetectionView mView; private ColorBlobDetectionView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override @Override
public void onManagerConnected(int status) { public void onManagerConnected(int status) {
switch (status) { switch (status) {
case LoaderCallbackInterface.SUCCESS: case LoaderCallbackInterface.SUCCESS:
{ {
Log.i(TAG, "OpenCV loaded successfully"); Log.i(TAG, "OpenCV loaded successfully");
// Create and set View // Create and set View
mView = new ColorBlobDetectionView(mAppContext); mView = new ColorBlobDetectionView(mAppContext);
setContentView(mView); setContentView(mView);
// Check native OpenCV camera // Check native OpenCV camera
if( !mView.openCamera() ) { if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create(); AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
ad.show(); ad.show();
} }
} break; } break;
/** OpenCV loader cannot start Google Play **/ /** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR: case LoaderCallbackInterface.MARKET_ERROR:
{ {
Log.d(TAG, "Google Play service is not accessible!"); Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create(); AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager"); MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command."); MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
mAppContext.finish(); mAppContext.finish();
} }
}); });
MarketErrorMessage.show(); MarketErrorMessage.show();
} break; } break;
default: default:
{ {
super.onManagerConnected(status); super.onManagerConnected(status);
} break; } break;
} }
} }
}; };
public ColorBlobDetectionActivity() public ColorBlobDetectionActivity() {
{ Log.i(TAG, "Instantiated new " + this.getClass());
Log.i(TAG, "Instantiated new " + this.getClass()); }
}
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (null != mView)
if (null != mView) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) { }
dialog.dismiss(); }
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library"); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
} }

View File

@ -23,26 +23,25 @@ import android.view.View.OnTouchListener;
public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchListener { public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchListener {
private Mat mRgba; private Mat mRgba;
private boolean mIsColorSelected = false; private boolean mIsColorSelected = false;
private Scalar mBlobColorRgba = new Scalar(255); private Scalar mBlobColorRgba = new Scalar(255);
private Scalar mBlobColorHsv = new Scalar(255); private Scalar mBlobColorHsv = new Scalar(255);
private ColorBlobDetector mDetector = new ColorBlobDetector(); private ColorBlobDetector mDetector = new ColorBlobDetector();
private Mat mSpectrum = new Mat(); private Mat mSpectrum = new Mat();
private static Size SPECTRUM_SIZE = new Size(200, 32); private static Size SPECTRUM_SIZE = new Size(200, 32);
// Logcat tag // Logcat tag
private static final String TAG = "Example/ColorBlobDetection"; private static final String TAG = "Sample-ColorBlobDetection::View";
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255); private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
public ColorBlobDetectionView(Context context) public ColorBlobDetectionView(Context context) {
{
super(context); super(context);
setOnTouchListener(this); setOnTouchListener(this);
} }
@Override @Override
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
@ -54,8 +53,7 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
super.surfaceCreated(holder); super.surfaceCreated(holder);
} }
public boolean onTouch(View v, MotionEvent event) public boolean onTouch(View v, MotionEvent event) {
{
int cols = mRgba.cols(); int cols = mRgba.cols();
int rows = mRgba.rows(); int rows = mRgba.rows();
@ -86,36 +84,33 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
mBlobColorHsv = Core.sumElems(touchedRegionHsv); mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height; int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++) for (int i = 0; i < mBlobColorHsv.val.length; i++)
{ mBlobColorHsv.val[i] /= pointCount;
mBlobColorHsv.val[i] /= pointCount;
}
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv); mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] + Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")"); ", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv); mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE); Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true; mIsColorSelected = true;
return false; // don't need subsequent touch events return false; // don't need subsequent touch events
} }
@Override @Override
protected Bitmap processFrame(VideoCapture capture) { protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
if (mIsColorSelected) if (mIsColorSelected) {
{ mDetector.process(mRgba);
mDetector.process(mRgba); List<MatOfPoint> contours = mDetector.getContours();
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size()); Log.e(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR); Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(2, 34, 2, 34); Mat colorLabel = mRgba.submat(2, 34, 2, 34);
colorLabel.setTo(mBlobColorRgba); colorLabel.setTo(mBlobColorRgba);
@ -125,24 +120,23 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
} }
try { try {
Utils.matToBitmap(mRgba, bmp); Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) { } catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
bmp = null; bmp = null;
} }
return bmp; return bmp;
} }
private Scalar converScalarHsv2Rgba(Scalar hsvColor) private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
{
Mat pointMatRgba = new Mat(); Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor); Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4); Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0)); return new Scalar(pointMatRgba.get(0, 0));
} }
@Override @Override
public void run() { public void run() {

View File

@ -11,66 +11,58 @@ import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar; import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector public class ColorBlobDetector {
{ public void setColorRadius(Scalar radius) {
public void setColorRadius(Scalar radius) mColorRadius = radius;
{ }
mColorRadius = radius;
}
public void setHsvColor(Scalar hsvColor) public void setHsvColor(Scalar hsvColor) {
{ double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0; double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
mLowerBound.val[0] = minH; mLowerBound.val[0] = minH;
mUpperBound.val[0] = maxH; mUpperBound.val[0] = maxH;
mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1]; mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1]; mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];
mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2]; mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2]; mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];
mLowerBound.val[3] = 0; mLowerBound.val[3] = 0;
mUpperBound.val[3] = 255; mUpperBound.val[3] = 255;
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3); Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++) for (int j = 0; j < maxH-minH; j++) {
{ byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255}; spectrumHsv.put(0, j, tmp);
spectrumHsv.put(0, j, tmp); }
}
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4); Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
}
} public Mat getSpectrum() {
return mSpectrum;
}
public Mat getSpectrum() public void setMinContourArea(double area) {
{ mMinContourArea = area;
return mSpectrum; }
}
public void setMinContourArea(double area) public void process(Mat rgbaImage) {
{ Mat pyrDownMat = new Mat();
mMinContourArea = area;
}
public void process(Mat rgbaImage) Imgproc.pyrDown(rgbaImage, pyrDownMat);
{ Imgproc.pyrDown(pyrDownMat, pyrDownMat);
Mat pyrDownMat = new Mat();
Imgproc.pyrDown(rgbaImage, pyrDownMat); Mat hsvMat = new Mat();
Imgproc.pyrDown(pyrDownMat, pyrDownMat); Imgproc.cvtColor(pyrDownMat, hsvMat, Imgproc.COLOR_RGB2HSV_FULL);
Mat hsvMat = new Mat(); Mat Mask = new Mat();
Imgproc.cvtColor(pyrDownMat, hsvMat, Imgproc.COLOR_RGB2HSV_FULL); Core.inRange(hsvMat, mLowerBound, mUpperBound, Mask);
Mat dilatedMask = new Mat();
Mat Mask = new Mat(); Imgproc.dilate(Mask, dilatedMask, new Mat());
Core.inRange(hsvMat, mLowerBound, mUpperBound, Mask);
Mat dilatedMask = new Mat();
Imgproc.dilate(Mask, dilatedMask, new Mat());
List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
Mat hierarchy = new Mat(); Mat hierarchy = new Mat();
@ -80,40 +72,36 @@ public class ColorBlobDetector
// Find max contour area // Find max contour area
double maxArea = 0; double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator(); Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext()) while (each.hasNext()) {
{ MatOfPoint wrapper = each.next();
MatOfPoint wrapper = each.next(); double area = Imgproc.contourArea(wrapper);
double area = Imgproc.contourArea(wrapper); if (area > maxArea)
if (area > maxArea) maxArea = area;
maxArea = area;
} }
// Filter contours by area and resize to fit the original image size // Filter contours by area and resize to fit the original image size
mContours.clear(); mContours.clear();
each = contours.iterator(); each = contours.iterator();
while (each.hasNext()) while (each.hasNext()) {
{ MatOfPoint contour = each.next();
MatOfPoint contour = each.next(); if (Imgproc.contourArea(contour) > mMinContourArea*maxArea) {
if (Imgproc.contourArea(contour) > mMinContourArea*maxArea) Core.multiply(contour, new Scalar(4,4), contour);
{ mContours.add(contour);
Core.multiply(contour, new Scalar(4,4), contour); }
mContours.add(contour);
}
} }
} }
public List<MatOfPoint> getContours() public List<MatOfPoint> getContours() {
{ return mContours;
return mContours; }
}
// Lower and Upper bounds for range checking in HSV color space // Lower and Upper bounds for range checking in HSV color space
private Scalar mLowerBound = new Scalar(0); private Scalar mLowerBound = new Scalar(0);
private Scalar mUpperBound = new Scalar(0); private Scalar mUpperBound = new Scalar(0);
// Minimum contour area in percent for contours filtering // Minimum contour area in percent for contours filtering
private static double mMinContourArea = 0.1; private static double mMinContourArea = 0.1;
// Color radius for range checking in HSV color space // Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0); private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat(); private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>(); private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
} }

View File

@ -29,30 +29,29 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
synchronized (this) { synchronized (this) {
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null; Log.e(TAG, "Failed to open native camera");
Log.e(TAG, "Failed to open native camera"); return false;
return false; }
} }
}
return true; return true;
} }
public void releaseCamera() { public void releaseCamera() {
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.release(); mCamera.release();
mCamera = null; mCamera = null;
} }
} }
} }
public void setupCamera(int width, int height) { public void setupCamera(int width, int height) {
Log.i(TAG, "setupCamera("+width+", "+height+")"); Log.i(TAG, "setupCamera("+width+", "+height+")");
synchronized (this) { synchronized (this) {
if (mCamera != null && mCamera.isOpened()) { if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes(); List<Size> sizes = mCamera.getSupportedPreviewSizes();
@ -115,8 +114,8 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
if (bmp != null) { if (bmp != null) {
Canvas canvas = mHolder.lockCanvas(); Canvas canvas = mHolder.lockCanvas();
if (canvas != null) { if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null); canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas); mHolder.unlockCanvasAndPost(canvas);
} }
bmp.recycle(); bmp.recycle();

View File

@ -9,7 +9,7 @@ include ../../sdk/native/jni/OpenCV.mk
LOCAL_SRC_FILES := DetectionBasedTracker_jni.cpp LOCAL_SRC_FILES := DetectionBasedTracker_jni.cpp
LOCAL_C_INCLUDES += $(LOCAL_PATH) LOCAL_C_INCLUDES += $(LOCAL_PATH)
LOCAL_LDLIBS += -llog -ldl LOCAL_LDLIBS += -llog -ldl
LOCAL_MODULE := detection_based_tracker LOCAL_MODULE := detection_based_tracker

View File

@ -21,159 +21,173 @@ inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
(JNIEnv * jenv, jclass, jstring jFileName, jint faceSize) (JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject enter");
const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL); const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
string stdFileName(jnamestr); string stdFileName(jnamestr);
jlong result = 0; jlong result = 0;
try try
{ {
DetectionBasedTracker::Parameters DetectorParams; DetectionBasedTracker::Parameters DetectorParams;
if (faceSize > 0) if (faceSize > 0)
DetectorParams.minObjectSize = faceSize; DetectorParams.minObjectSize = faceSize;
result = (jlong)new DetectionBasedTracker(stdFileName, DetectorParams); result = (jlong)new DetectionBasedTracker(stdFileName, DetectorParams);
} }
catch(cv::Exception e) catch(cv::Exception e)
{ {
LOGD("nativeCreateObject catched cv::Exception: %s", e.what()); LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException"); jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...) catch (...)
{ {
LOGD("nativeCreateObject catched unknown exception"); LOGD("nativeCreateObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
return 0; return 0;
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject exit");
return result; return result;
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject
(JNIEnv * jenv, jclass, jlong thiz) (JNIEnv * jenv, jclass, jlong thiz)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject enter");
try try
{ {
((DetectionBasedTracker*)thiz)->stop(); if(thiz != 0)
delete (DetectionBasedTracker*)thiz; {
((DetectionBasedTracker*)thiz)->stop();
delete (DetectionBasedTracker*)thiz;
}
} }
catch(cv::Exception e) catch(cv::Exception e)
{ {
LOGD("nativeestroyObject catched cv::Exception: %s", e.what()); LOGD("nativeestroyObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException"); jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...) catch (...)
{ {
LOGD("nativeDestroyObject catched unknown exception"); LOGD("nativeDestroyObject catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart
(JNIEnv * jenv, jclass, jlong thiz) (JNIEnv * jenv, jclass, jlong thiz)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart enter");
try try
{ {
((DetectionBasedTracker*)thiz)->run(); ((DetectionBasedTracker*)thiz)->run();
} }
catch(cv::Exception e) catch(cv::Exception e)
{ {
LOGD("nativeStart catched cv::Exception: %s", e.what()); LOGD("nativeStart catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException"); jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...) catch (...)
{ {
LOGD("nativeStart catched unknown exception"); LOGD("nativeStart catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop
(JNIEnv * jenv, jclass, jlong thiz) (JNIEnv * jenv, jclass, jlong thiz)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop enter");
try try
{ {
((DetectionBasedTracker*)thiz)->stop(); ((DetectionBasedTracker*)thiz)->stop();
} }
catch(cv::Exception e) catch(cv::Exception e)
{ {
LOGD("nativeStop catched cv::Exception: %s", e.what()); LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException"); jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...) catch (...)
{ {
LOGD("nativeStop catched unknown exception"); LOGD("nativeStop catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv * jenv, jclass, jlong thiz, jint faceSize) (JNIEnv * jenv, jclass, jlong thiz, jint faceSize)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize enter");
try try
{ {
if (faceSize > 0) if (faceSize > 0)
{ {
DetectionBasedTracker::Parameters DetectorParams = \ DetectionBasedTracker::Parameters DetectorParams = \
((DetectionBasedTracker*)thiz)->getParameters(); ((DetectionBasedTracker*)thiz)->getParameters();
DetectorParams.minObjectSize = faceSize; DetectorParams.minObjectSize = faceSize;
((DetectionBasedTracker*)thiz)->setParameters(DetectorParams); ((DetectionBasedTracker*)thiz)->setParameters(DetectorParams);
} }
} }
catch(cv::Exception e) catch(cv::Exception e)
{ {
LOGD("nativeStop catched cv::Exception: %s", e.what()); LOGD("nativeStop catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException"); jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...) catch (...)
{ {
LOGD("nativeSetFaceSize catched unknown exception"); LOGD("nativeSetFaceSize catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect
(JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces) (JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect enter");
try try
{ {
vector<Rect> RectFaces; vector<Rect> RectFaces;
((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray)); ((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
((DetectionBasedTracker*)thiz)->getObjects(RectFaces); ((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
vector_Rect_to_Mat(RectFaces, *((Mat*)faces)); vector_Rect_to_Mat(RectFaces, *((Mat*)faces));
} }
catch(cv::Exception e) catch(cv::Exception e)
{ {
LOGD("nativeCreateObject catched cv::Exception: %s", e.what()); LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
jclass je = jenv->FindClass("org/opencv/core/CvException"); jclass je = jenv->FindClass("org/opencv/core/CvException");
if(!je) if(!je)
je = jenv->FindClass("java/lang/Exception"); je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, e.what()); jenv->ThrowNew(je, e.what());
} }
catch (...) catch (...)
{ {
LOGD("nativeDetect catched unknown exception"); LOGD("nativeDetect catched unknown exception");
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect exit");
} }

View File

@ -5,48 +5,41 @@ import org.opencv.core.MatOfRect;
public class DetectionBasedTracker public class DetectionBasedTracker
{ {
public DetectionBasedTracker(String cascadeName, int minFaceSize) public DetectionBasedTracker(String cascadeName, int minFaceSize) {
{ mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
mNativeObj = nativeCreateObject(cascadeName, minFaceSize); }
}
public void start() public void start() {
{ nativeStart(mNativeObj);
nativeStart(mNativeObj); }
}
public void stop() public void stop() {
{ nativeStop(mNativeObj);
nativeStop(mNativeObj); }
}
public void setMinFaceSize(int size) public void setMinFaceSize(int size) {
{ nativeSetFaceSize(mNativeObj, size);
nativeSetFaceSize(mNativeObj, size); }
}
public void detect(Mat imageGray, MatOfRect faces) public void detect(Mat imageGray, MatOfRect faces) {
{ nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr()); }
}
public void release() public void release() {
{ nativeDestroyObject(mNativeObj);
nativeDestroyObject(mNativeObj); mNativeObj = 0;
mNativeObj = 0; }
}
private long mNativeObj = 0; private long mNativeObj = 0;
private static native long nativeCreateObject(String cascadeName, int minFaceSize); private static native long nativeCreateObject(String cascadeName, int minFaceSize);
private static native void nativeDestroyObject(long thiz); private static native void nativeDestroyObject(long thiz);
private static native void nativeStart(long thiz); private static native void nativeStart(long thiz);
private static native void nativeStop(long thiz); private static native void nativeStop(long thiz);
private static native void nativeSetFaceSize(long thiz, int size); private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces); private static native void nativeDetect(long thiz, long inputImage, long faces);
static static {
{ System.loadLibrary("detection_based_tracker");
System.loadLibrary("detection_based_tracker"); }
}
} }

View File

@ -12,74 +12,72 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class FdActivity extends Activity { public class FdActivity extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample-FD::Activity";
private MenuItem mItemFace50; private MenuItem mItemFace50;
private MenuItem mItemFace40; private MenuItem mItemFace40;
private MenuItem mItemFace30; private MenuItem mItemFace30;
private MenuItem mItemFace20; private MenuItem mItemFace20;
private MenuItem mItemType; private MenuItem mItemType;
private FdView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native libs after OpenCV initialization
System.loadLibrary("detection_based_tracker");
// Create and set View
mView = new FdView(mAppContext);
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
private int mDetectorType = 0; private int mDetectorType = 0;
private String[] mDetectorName; private String[] mDetectorName;
private FdView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native libs after OpenCV initialization
System.loadLibrary("detection_based_tracker");
// Create and set View
mView = new FdView(mAppContext);
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
mAppContext.finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public FdActivity() { public FdActivity() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
@ -88,31 +86,25 @@ public class FdActivity extends Activity {
mDetectorName[FdView.NATIVE_DETECTOR] = "Native (tracking)"; mDetectorName[FdView.NATIVE_DETECTOR] = "Native (tracking)";
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (mView != null)
if (mView != null) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( mView != null && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) { }
dialog.dismiss(); }
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
@ -120,12 +112,7 @@ public class FdActivity extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override
@ -136,7 +123,6 @@ public class FdActivity extends Activity {
mItemFace30 = menu.add("Face size 30%"); mItemFace30 = menu.add("Face size 30%");
mItemFace20 = menu.add("Face size 20%"); mItemFace20 = menu.add("Face size 20%");
mItemType = menu.add(mDetectorName[mDetectorType]); mItemType = menu.add(mDetectorName[mDetectorType]);
return true; return true;
} }
@ -146,16 +132,16 @@ public class FdActivity extends Activity {
if (item == mItemFace50) if (item == mItemFace50)
mView.setMinFaceSize(0.5f); mView.setMinFaceSize(0.5f);
else if (item == mItemFace40) else if (item == mItemFace40)
mView.setMinFaceSize(0.4f); mView.setMinFaceSize(0.4f);
else if (item == mItemFace30) else if (item == mItemFace30)
mView.setMinFaceSize(0.3f); mView.setMinFaceSize(0.3f);
else if (item == mItemFace20) else if (item == mItemFace20)
mView.setMinFaceSize(0.2f); mView.setMinFaceSize(0.2f);
else if (item == mItemType) else if (item == mItemType)
{ {
mDetectorType = (mDetectorType + 1) % mDetectorName.length; mDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[mDetectorType]); item.setTitle(mDetectorName[mDetectorType]);
mView.setDetectorType(mDetectorType); mView.setDetectorType(mDetectorType);
} }
return true; return true;
} }

View File

@ -22,7 +22,7 @@ import android.util.Log;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase { class FdView extends SampleCvViewBase {
private static final String TAG = "Sample::FdView"; private static final String TAG = "Sample-FD::View";
private Mat mRgba; private Mat mRgba;
private Mat mGray; private Mat mGray;
private File mCascadeFile; private File mCascadeFile;
@ -37,31 +37,25 @@ class FdView extends SampleCvViewBase {
private int mDetectorType = JAVA_DETECTOR; private int mDetectorType = JAVA_DETECTOR;
private float mRelativeFaceSize = 0; private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0; private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize) public void setMinFaceSize(float faceSize) {
{ mRelativeFaceSize = faceSize;
mRelativeFaceSize = faceSize; mAbsoluteFaceSize = 0;
mAbsoluteFaceSize = 0;
} }
public void setDetectorType(int type) public void setDetectorType(int type) {
{ if (mDetectorType != type) {
if (mDetectorType != type) mDetectorType = type;
{
mDetectorType = type;
if (type == NATIVE_DETECTOR) if (type == NATIVE_DETECTOR) {
{ Log.i(TAG, "Detection Based Tracker enabled");
Log.i(TAG, "Detection Based Tracker enabled"); mNativeDetector.start();
mNativeDetector.start(); } else {
} Log.i(TAG, "Cascade detector enabled");
else mNativeDetector.stop();
{ }
Log.i(TAG, "Cascade detector enabled"); }
mNativeDetector.stop();
}
}
} }
public FdView(Context context) { public FdView(Context context) {
@ -99,7 +93,7 @@ class FdView extends SampleCvViewBase {
} }
@Override @Override
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) { synchronized (this) {
// initialize Mats before usage // initialize Mats before usage
mGray = new Mat(); mGray = new Mat();
@ -107,39 +101,34 @@ class FdView extends SampleCvViewBase {
} }
super.surfaceCreated(holder); super.surfaceCreated(holder);
} }
@Override @Override
protected Bitmap processFrame(VideoCapture capture) { protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mAbsoluteFaceSize == 0) if (mAbsoluteFaceSize == 0) {
{ int height = mGray.rows();
int height = mGray.rows(); if (Math.round(height * mRelativeFaceSize) > 0) {
if (Math.round(height * mRelativeFaceSize) > 0); mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
{ }
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
} }
MatOfRect faces = new MatOfRect(); MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) if (mDetectorType == JAVA_DETECTOR) {
{ if (mJavaDetector != null)
if (mJavaDetector != null) mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} }
else if (mDetectorType == NATIVE_DETECTOR) else if (mDetectorType == NATIVE_DETECTOR) {
{ if (mNativeDetector != null)
if (mNativeDetector != null) mNativeDetector.detect(mGray, faces);
mNativeDetector.detect(mGray, faces);
} }
else else {
{ Log.e(TAG, "Detection method is not selected!");
Log.e(TAG, "Detection method is not selected!");
} }
Rect[] facesArray = faces.toArray(); Rect[] facesArray = faces.toArray();
@ -149,9 +138,9 @@ class FdView extends SampleCvViewBase {
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try { try {
Utils.matToBitmap(mRgba, bmp); Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) { } catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
bmp = null; bmp = null;
} }
@ -170,9 +159,9 @@ class FdView extends SampleCvViewBase {
if (mGray != null) if (mGray != null)
mGray.release(); mGray.release();
if (mCascadeFile != null) if (mCascadeFile != null)
mCascadeFile.delete(); mCascadeFile.delete();
if (mNativeDetector != null) if (mNativeDetector != null)
mNativeDetector.release(); mNativeDetector.release();
mRgba = null; mRgba = null;
mGray = null; mGray = null;

View File

@ -31,24 +31,23 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
synchronized (this) { synchronized (this) {
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); Log.e(TAG, "Failed to open native camera");
mCamera = null; releaseCamera();
Log.e(TAG, "Failed to open native camera"); return false;
return false; }
} }
}
return true; return true;
} }
public void releaseCamera() { public void releaseCamera() {
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.release(); mCamera.release();
mCamera = null; mCamera = null;
} }
} }
} }
@ -93,6 +92,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "surfaceDestroyed");
releaseCamera(); releaseCamera();
Log.i(TAG, "surfaceDestroyed2");
} }
protected abstract Bitmap processFrame(VideoCapture capture); protected abstract Bitmap processFrame(VideoCapture capture);

View File

@ -12,10 +12,11 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class ImageManipulationsActivity extends Activity { public class ImageManipulationsActivity extends Activity {
private static final String TAG = "Sample-ImageManipulations::Activity"; private static final String TAG = "Sample-ImageManipulations::Activity";
public static final int VIEW_MODE_RGBA = 0; public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1; public static final int VIEW_MODE_HIST = 1;
@ -40,81 +41,75 @@ public class ImageManipulationsActivity extends Activity {
private ImageManipulationsView mView; private ImageManipulationsView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override @Override
public void onManagerConnected(int status) { public void onManagerConnected(int status) {
switch (status) { switch (status) {
case LoaderCallbackInterface.SUCCESS: case LoaderCallbackInterface.SUCCESS:
{ {
Log.i(TAG, "OpenCV loaded successfully"); Log.i(TAG, "OpenCV loaded successfully");
// Create and set View // Create and set View
mView = new ImageManipulationsView(mAppContext); mView = new ImageManipulationsView(mAppContext);
setContentView(mView); setContentView(mView);
// Check native OpenCV camera // Check native OpenCV camera
if( !mView.openCamera() ) { if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create(); AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
ad.show(); ad.show();
} }
} break; } break;
/** OpenCV loader cannot start Google Play **/ /** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR: case LoaderCallbackInterface.MARKET_ERROR:
{ {
Log.d(TAG, "Google Play service is not accessible!"); Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create(); AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager"); MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command."); MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
mAppContext.finish(); mAppContext.finish();
} }
}); });
MarketErrorMessage.show(); MarketErrorMessage.show();
} break; } break;
default: default:
{ {
super.onManagerConnected(status); super.onManagerConnected(status);
} break; } break;
} }
} }
}; };
public ImageManipulationsActivity() { public ImageManipulationsActivity() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (null != mView)
if (null != mView) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) { }
dialog.dismiss(); }
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
@ -122,12 +117,7 @@ public class ImageManipulationsActivity extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override

View File

@ -55,7 +55,7 @@ class ImageManipulationsView extends SampleCvViewBase {
} }
@Override @Override
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
synchronized (this) { synchronized (this) {
// initialize Mats before usage // initialize Mats before usage
mGray = new Mat(); mGray = new Mat();
@ -71,11 +71,11 @@ class ImageManipulationsView extends SampleCvViewBase {
mMat0 = new Mat(); mMat0 = new Mat();
mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) }; mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) };
mColorsHue = new Scalar[] { mColorsHue = new Scalar[] {
new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255), new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255), new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255), new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255), new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255) new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
}; };
mWhilte = Scalar.all(255); mWhilte = Scalar.all(255);
mP1 = new Point(); mP1 = new Point();
@ -83,9 +83,9 @@ class ImageManipulationsView extends SampleCvViewBase {
} }
super.surfaceCreated(holder); super.surfaceCreated(holder);
} }
private void CreateAuxiliaryMats() { private void CreateAuxiliaryMats() {
if (mRgba.empty()) if (mRgba.empty())
return; return;
@ -134,38 +134,38 @@ class ImageManipulationsView extends SampleCvViewBase {
int offset = (int) ((mSizeRgba.width - (5*mHistSizeNum + 4*10)*thikness)/2); int offset = (int) ((mSizeRgba.width - (5*mHistSizeNum + 4*10)*thikness)/2);
// RGB // RGB
for(int c=0; c<3; c++) { for(int c=0; c<3; c++) {
Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges); Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF); Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff); mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) { for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness; mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1; mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h]; mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness); Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
} }
} }
// Value and Hue // Value and Hue
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL); Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
// Value // Value
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges); Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF); Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff); mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) { for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness; mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1; mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h]; mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mWhilte, thikness); Core.line(mRgba, mP1, mP2, mWhilte, thikness);
} }
// Hue // Hue
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges); Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF); Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff); mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) { for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness; mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1; mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h]; mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness); Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
} }
break; break;
case ImageManipulationsActivity.VIEW_MODE_CANNY: case ImageManipulationsActivity.VIEW_MODE_CANNY:
@ -231,10 +231,10 @@ class ImageManipulationsView extends SampleCvViewBase {
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try { try {
Utils.matToBitmap(mRgba, bmp); Utils.matToBitmap(mRgba, bmp);
return bmp; return bmp;
} catch(Exception e) { } catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
return null; return null;
} }

View File

@ -31,24 +31,23 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
synchronized (this) { synchronized (this) {
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null; Log.e(TAG, "Failed to open native camera");
Log.e(TAG, "Failed to open native camera"); return false;
return false; }
} }
}
return true; return true;
} }
public void releaseCamera() { public void releaseCamera() {
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.release(); mCamera.release();
mCamera = null; mCamera = null;
} }
} }
} }

View File

@ -11,7 +11,7 @@ import android.view.Window;
public class Sample0Base extends Activity { public class Sample0Base extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
private MenuItem mItemPreviewRGBA; private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray; private MenuItem mItemPreviewGray;
@ -22,31 +22,31 @@ public class Sample0Base extends Activity {
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); super.onPause();
mView.releaseCamera(); mView.releaseCamera();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( !mView.openCamera() ) { if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
ad.show(); ad.show();
} }
} }
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
@ -68,9 +68,9 @@ public class Sample0Base extends Activity {
public boolean onOptionsItemSelected(MenuItem item) { public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item); Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA) if (item == mItemPreviewRGBA)
mView.setViewMode(Sample0View.VIEW_MODE_RGBA); mView.setViewMode(Sample0View.VIEW_MODE_RGBA);
else if (item == mItemPreviewGray) else if (item == mItemPreviewGray)
mView.setViewMode(Sample0View.VIEW_MODE_GRAY); mView.setViewMode(Sample0View.VIEW_MODE_GRAY);
return true; return true;
} }
} }

View File

@ -6,10 +6,10 @@ import android.util.Log;
class Sample0View extends SampleViewBase { class Sample0View extends SampleViewBase {
private static final String TAG = "Sample0View"; private static final String TAG = "Sample0View";
int mSize; int mSize;
int[] mRGBA; int[] mRGBA;
private Bitmap mBitmap; private Bitmap mBitmap;
private int mViewMode; private int mViewMode;
public static final int VIEW_MODE_RGBA = 0; public static final int VIEW_MODE_RGBA = 0;
@ -35,10 +35,10 @@ class Sample0View extends SampleViewBase {
rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y; rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y;
} }
} else if (view_mode == VIEW_MODE_RGBA) { } else if (view_mode == VIEW_MODE_RGBA) {
for (int i = 0; i < getFrameHeight(); i++) for (int i = 0; i < getFrameHeight(); i++) {
for (int j = 0; j < getFrameWidth(); j++) { for (int j = 0; j < getFrameWidth(); j++) {
int index = i * getFrameWidth() + j; int index = i * getFrameWidth() + j;
int supply_index = frameSize + (i >> 1) * getFrameWidth() + (j & ~1); int supply_index = frameSize + (i >> 1) * getFrameWidth() + (j & ~1);
int y = (0xff & ((int) data[index])); int y = (0xff & ((int) data[index]));
int u = (0xff & ((int) data[supply_index + 0])); int u = (0xff & ((int) data[supply_index + 0]));
int v = (0xff & ((int) data[supply_index + 1])); int v = (0xff & ((int) data[supply_index + 1]));
@ -55,35 +55,36 @@ class Sample0View extends SampleViewBase {
rgba[i * getFrameWidth() + j] = 0xff000000 + (b << 16) + (g << 8) + r; rgba[i * getFrameWidth() + j] = 0xff000000 + (b << 16) + (g << 8) + r;
} }
}
} }
mBitmap.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight()); mBitmap.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return mBitmap; return mBitmap;
} }
@Override @Override
protected void onPreviewStarted(int previewWidth, int previewHeight) { protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "onPreviewStarted("+previewWidth+", "+previewHeight+")"); Log.i(TAG, "onPreviewStarted("+previewWidth+", "+previewHeight+")");
/* Create a bitmap that will be used through to calculate the image to */ /* Create a bitmap that will be used through to calculate the image to */
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
mRGBA = new int[previewWidth * previewHeight]; mRGBA = new int[previewWidth * previewHeight];
} }
@Override @Override
protected void onPreviewStopped() { protected void onPreviewStopped() {
Log.i(TAG, "onPreviewStopped"); Log.i(TAG, "onPreviewStopped");
if(mBitmap != null) { if(mBitmap != null) {
mBitmap.recycle(); mBitmap.recycle();
mBitmap = null; mBitmap = null;
} }
if(mRGBA != null) { if(mRGBA != null) {
mRGBA = null; mRGBA = null;
} }
} }
public void setViewMode(int viewMode) { public void setViewMode(int viewMode) {
Log.i(TAG, "setViewMode("+viewMode+")"); Log.i(TAG, "setViewMode("+viewMode+")");
mViewMode = viewMode; mViewMode = viewMode;
} }
} }

View File

@ -25,6 +25,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -43,20 +44,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) { if(mCamera == null) {
Log.e(TAG, "Can't open camera!"); Log.e(TAG, "Can't open camera!");
return false; return false;
} }
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
@ -75,7 +77,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -111,7 +113,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
List<String> FocusModes = params.getSupportedFocusModes(); List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{ {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} }
mCamera.setParameters(params); mCamera.setParameters(params);
@ -125,15 +127,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */ /* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample1Java extends Activity { public class Sample1Java extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
@ -22,81 +23,76 @@ public class Sample1Java extends Activity {
private Sample1View mView; private Sample1View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override @Override
public void onManagerConnected(int status) { public void onManagerConnected(int status) {
switch (status) { switch (status) {
case LoaderCallbackInterface.SUCCESS: case LoaderCallbackInterface.SUCCESS:
{ {
Log.i(TAG, "OpenCV loaded successfully"); Log.i(TAG, "OpenCV loaded successfully");
// Create and set View // Create and set View
mView = new Sample1View(mAppContext); mView = new Sample1View(mAppContext);
setContentView(mView); setContentView(mView);
// Check native OpenCV camera // Check native OpenCV camera
if( !mView.openCamera() ) { if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create(); AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
ad.show(); ad.show();
} }
} break; } break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR: /** OpenCV loader cannot start Google Play **/
{ case LoaderCallbackInterface.MARKET_ERROR:
Log.d(TAG, "Google Play service is not accessible!"); {
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create(); Log.d(TAG, "Google Play service is not accessible!");
MarketErrorMessage.setTitle("OpenCV Manager"); AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command."); MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
public void onClick(DialogInterface dialog, int which) { MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
mAppContext.finish(); public void onClick(DialogInterface dialog, int which) {
} mAppContext.finish();
}); }
MarketErrorMessage.show(); });
} break; MarketErrorMessage.show();
default: } break;
{ default:
super.onManagerConnected(status); {
} break; super.onManagerConnected(status);
} } break;
} }
}; }
};
public Sample1Java() { public Sample1Java() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (null != mView)
if (null != mView) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) { }
dialog.dismiss(); }
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
@ -104,12 +100,7 @@ public class Sample1Java extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override
@ -125,11 +116,11 @@ public class Sample1Java extends Activity {
public boolean onOptionsItemSelected(MenuItem item) { public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item); Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA) { if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample1View.VIEW_MODE_RGBA); mView.setViewMode(Sample1View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) { } else if (item == mItemPreviewGray) {
mView.setViewMode(Sample1View.VIEW_MODE_GRAY); mView.setViewMode(Sample1View.VIEW_MODE_GRAY);
} else if (item == mItemPreviewCanny) { } else if (item == mItemPreviewCanny) {
mView.setViewMode(Sample1View.VIEW_MODE_CANNY); mView.setViewMode(Sample1View.VIEW_MODE_CANNY);
} }
return true; return true;
} }

View File

@ -22,35 +22,35 @@ class Sample1View extends SampleViewBase {
private Mat mRgba; private Mat mRgba;
private Mat mGraySubmat; private Mat mGraySubmat;
private Mat mIntermediateMat; private Mat mIntermediateMat;
private Bitmap mBitmap; private Bitmap mBitmap;
private int mViewMode; private int mViewMode;
public Sample1View(Context context) { public Sample1View(Context context) {
super(context); super(context);
mViewMode = VIEW_MODE_RGBA; mViewMode = VIEW_MODE_RGBA;
} }
@Override @Override
protected void onPreviewStarted(int previewWidth, int previewHeight) { protected void onPreviewStarted(int previewWidth, int previewHeight) {
synchronized (this) { synchronized (this) {
// initialize Mats before usage // initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1); mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth()); mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
mRgba = new Mat(); mRgba = new Mat();
mIntermediateMat = new Mat(); mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888); mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
} }
} }
@Override @Override
protected void onPreviewStopped() { protected void onPreviewStopped() {
if(mBitmap != null) { if(mBitmap != null) {
mBitmap.recycle(); mBitmap.recycle();
} }
synchronized (this) { synchronized (this) {
// Explicitly deallocate Mats // Explicitly deallocate Mats
if (mYuv != null) if (mYuv != null)
mYuv.release(); mYuv.release();
@ -101,7 +101,7 @@ class Sample1View extends SampleViewBase {
} }
public void setViewMode(int viewMode) { public void setViewMode(int viewMode) {
mViewMode = viewMode; mViewMode = viewMode;
} }
} }

View File

@ -25,6 +25,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -43,19 +44,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
releaseCamera(); releaseCamera();
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) { if(mCamera == null) {
Log.e(TAG, "Can't open camera!"); Log.e(TAG, "Can't open camera!");
return false; return false;
} }
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
@ -74,7 +77,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -110,7 +113,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
List<String> FocusModes = params.getSupportedFocusModes(); List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{ {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} }
mCamera.setParameters(params); mCamera.setParameters(params);
@ -124,15 +127,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */ /* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample2NativeCamera extends Activity { public class Sample2NativeCamera extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
@ -26,84 +27,78 @@ public class Sample2NativeCamera extends Activity {
public static int viewMode = VIEW_MODE_RGBA; public static int viewMode = VIEW_MODE_RGBA;
private Sample2View mView; private Sample2View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override @Override
public void onManagerConnected(int status) { public void onManagerConnected(int status) {
switch (status) { switch (status) {
case LoaderCallbackInterface.SUCCESS: case LoaderCallbackInterface.SUCCESS:
{ {
Log.i(TAG, "OpenCV loaded successfully"); Log.i(TAG, "OpenCV loaded successfully");
// Create and set View // Create and set View
mView = new Sample2View(mAppContext); mView = new Sample2View(mAppContext);
setContentView(mView); setContentView(mView);
// Check native OpenCV camera // Check native OpenCV camera
if( !mView.openCamera() ) { if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create(); AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
ad.show(); ad.show();
} }
} break; } break;
/** OpenCV loader cannot start Google Play **/ /** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR: case LoaderCallbackInterface.MARKET_ERROR:
{ {
Log.d(TAG, "Google Play service is not accessible!"); Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create(); AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager"); MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command."); MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
mAppContext.finish(); mAppContext.finish();
} }
}); });
MarketErrorMessage.show(); MarketErrorMessage.show();
} break; } break;
default: default:
{ {
super.onManagerConnected(status); super.onManagerConnected(status);
} break; } break;
} }
} }
}; };
public Sample2NativeCamera() { public Sample2NativeCamera() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (null != mView)
if (null != mView) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) { }
dialog.dismiss(); }
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
@ -111,11 +106,7 @@ public class Sample2NativeCamera extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library"); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override

View File

@ -50,16 +50,16 @@ class Sample2View extends SampleCvViewBase {
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
Imgproc.Canny(mGray, mIntermediateMat, 80, 100); Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4); Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break; break;
} }
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try { try {
Utils.matToBitmap(mRgba, bmp); Utils.matToBitmap(mRgba, bmp);
return bmp; return bmp;
} catch(Exception e) { } catch(Exception e) {
Log.e("org.opencv.samples.tutorial2", "Utils.matToBitmap() throws an exception: " + e.getMessage()); Log.e("org.opencv.samples.tutorial2", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle(); bmp.recycle();
return null; return null;
} }

View File

@ -29,24 +29,23 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
synchronized (this) { synchronized (this) {
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null; Log.e(TAG, "Failed to open native camera");
Log.e(TAG, "Failed to open native camera"); return false;
return false; }
} }
}
return true; return true;
} }
public void releaseCamera() { public void releaseCamera() {
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.release(); mCamera.release();
mCamera = null; mCamera = null;
} }
} }
} }
@ -115,8 +114,8 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
if (bmp != null) { if (bmp != null) {
Canvas canvas = mHolder.lockCanvas(); Canvas canvas = mHolder.lockCanvas();
if (canvas != null) { if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR); canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth()-bmp.getWidth()) / 2, (canvas.getHeight()-bmp.getHeight()) / 2, null); canvas.drawBitmap(bmp, (canvas.getWidth()-bmp.getWidth()) / 2, (canvas.getHeight()-bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas); mHolder.unlockCanvasAndPost(canvas);
} }
bmp.recycle(); bmp.recycle();

View File

@ -10,91 +10,86 @@ import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample3Native extends Activity { public class Sample3Native extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
private Sample3View mView; private Sample3View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override @Override
public void onManagerConnected(int status) { public void onManagerConnected(int status) {
switch (status) { switch (status) {
case LoaderCallbackInterface.SUCCESS: case LoaderCallbackInterface.SUCCESS:
{ {
Log.i(TAG, "OpenCV loaded successfully"); Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization // Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample"); System.loadLibrary("native_sample");
// Create and set View // Create and set View
mView = new Sample3View(mAppContext); mView = new Sample3View(mAppContext);
setContentView(mView); setContentView(mView);
// Check native OpenCV camera // Check native OpenCV camera
if( !mView.openCamera() ) { if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create(); AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
ad.show(); ad.show();
} }
} break; } break;
/** OpenCV loader cannot start Google Play **/ /** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR: case LoaderCallbackInterface.MARKET_ERROR:
{ {
Log.d(TAG, "Google Play service is not accessible!"); Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create(); AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager"); MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command."); MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
mAppContext.finish(); mAppContext.finish();
} }
}); });
MarketErrorMessage.show(); MarketErrorMessage.show();
} break; } break;
default: default:
{ {
super.onManagerConnected(status); super.onManagerConnected(status);
} break; } break;
} }
} }
}; };
public Sample3Native() { public Sample3Native() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (null != mView)
if (null != mView) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) { }
dialog.dismiss(); }
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
@ -102,10 +97,6 @@ public class Sample3Native extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
} }

View File

@ -5,29 +5,29 @@ import android.graphics.Bitmap;
class Sample3View extends SampleViewBase { class Sample3View extends SampleViewBase {
private int mFrameSize; private int mFrameSize;
private Bitmap mBitmap; private Bitmap mBitmap;
private int[] mRGBA; private int[] mRGBA;
public Sample3View(Context context) { public Sample3View(Context context) {
super(context); super(context);
} }
@Override @Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) { protected void onPreviewStarted(int previewWidtd, int previewHeight) {
mFrameSize = previewWidtd * previewHeight; mFrameSize = previewWidtd * previewHeight;
mRGBA = new int[mFrameSize]; mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888); mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
} }
@Override @Override
protected void onPreviewStopped() { protected void onPreviewStopped() {
if(mBitmap != null) { if(mBitmap != null) {
mBitmap.recycle(); mBitmap.recycle();
mBitmap = null; mBitmap = null;
} }
mRGBA = null; mRGBA = null;
} }
@Override @Override
protected Bitmap processFrame(byte[] data) { protected Bitmap processFrame(byte[] data) {

View File

@ -24,6 +24,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -42,19 +43,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
releaseCamera(); releaseCamera();
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) { if(mCamera == null) {
Log.e(TAG, "Can't open camera!"); Log.e(TAG, "Can't open camera!");
return false; return false;
} }
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
@ -73,7 +76,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -109,7 +112,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
List<String> FocusModes = params.getSupportedFocusModes(); List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{ {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} }
mCamera.setParameters(params); mCamera.setParameters(params);
@ -123,15 +126,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */ /* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample4Mixed extends Activity { public class Sample4Mixed extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
@ -23,100 +24,88 @@ public class Sample4Mixed extends Activity {
private Sample4View mView; private Sample4View mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override @Override
public void onManagerConnected(int status) { public void onManagerConnected(int status) {
switch (status) { switch (status) {
case LoaderCallbackInterface.SUCCESS: case LoaderCallbackInterface.SUCCESS:
{ {
Log.i(TAG, "OpenCV loaded successfully"); Log.i(TAG, "OpenCV loaded successfully");
// Load native library after(!) OpenCV initialization // Load native library after(!) OpenCV initialization
System.loadLibrary("mixed_sample"); System.loadLibrary("mixed_sample");
// Create and set View // Create and set View
mView = new Sample4View(mAppContext); mView = new Sample4View(mAppContext);
setContentView(mView); setContentView(mView);
// Check native OpenCV camera // Check native OpenCV camera
if( !mView.openCamera() ) { if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create(); AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!"); ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
dialog.dismiss(); dialog.dismiss();
finish(); finish();
} }
}); });
ad.show(); ad.show();
} }
} break; } break;
/** OpenCV loader cannot start Google Play **/ /** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR: case LoaderCallbackInterface.MARKET_ERROR:
{ {
Log.d(TAG, "Google Play service is not accessible!"); Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create(); AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager"); MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command."); MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) { public void onClick(DialogInterface dialog, int which) {
mAppContext.finish(); mAppContext.finish();
} }
}); });
MarketErrorMessage.show(); MarketErrorMessage.show();
} break; } break;
default: default:
{ {
super.onManagerConnected(status); super.onManagerConnected(status);
} break; } break;
} }
} }
}; };
public Sample4Mixed() { public Sample4Mixed() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause(); if (null != mView)
if (null != mView) mView.releaseCamera();
mView.releaseCamera(); super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) { }
dialog.dismiss(); }
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */ /** Called when the activity is first created. */
@Override @Override
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
public boolean onCreateOptionsMenu(Menu menu) { public boolean onCreateOptionsMenu(Menu menu) {
@ -131,13 +120,13 @@ public class Sample4Mixed extends Activity {
public boolean onOptionsItemSelected(MenuItem item) { public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item); Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA) { if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample4View.VIEW_MODE_RGBA); mView.setViewMode(Sample4View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) { } else if (item == mItemPreviewGray) {
mView.setViewMode(Sample4View.VIEW_MODE_GRAY); mView.setViewMode(Sample4View.VIEW_MODE_GRAY);
} else if (item == mItemPreviewCanny) { } else if (item == mItemPreviewCanny) {
mView.setViewMode(Sample4View.VIEW_MODE_CANNY); mView.setViewMode(Sample4View.VIEW_MODE_CANNY);
} else if (item == mItemPreviewFeatures) { } else if (item == mItemPreviewFeatures) {
mView.setViewMode(Sample4View.VIEW_MODE_FEATURES); mView.setViewMode(Sample4View.VIEW_MODE_FEATURES);
} }
return true; return true;
} }

View File

@ -22,14 +22,14 @@ class Sample4View extends SampleViewBase {
private Mat mIntermediateMat; private Mat mIntermediateMat;
private int mViewMode; private int mViewMode;
private Bitmap mBitmap; private Bitmap mBitmap;
public Sample4View(Context context) { public Sample4View(Context context) {
super(context); super(context);
} }
@Override @Override
protected void onPreviewStarted(int previewWidtd, int previewHeight) { protected void onPreviewStarted(int previewWidtd, int previewHeight) {
// initialize Mats before usage // initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1); mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth()); mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
@ -38,15 +38,15 @@ class Sample4View extends SampleViewBase {
mIntermediateMat = new Mat(); mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888); mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
} }
@Override @Override
protected void onPreviewStopped() { protected void onPreviewStopped() {
if (mBitmap != null) { if (mBitmap != null) {
mBitmap.recycle(); mBitmap.recycle();
mBitmap = null; mBitmap = null;
} }
// Explicitly deallocate Mats // Explicitly deallocate Mats
if (mYuv != null) if (mYuv != null)
@ -63,7 +63,7 @@ class Sample4View extends SampleViewBase {
mGraySubmat = null; mGraySubmat = null;
mIntermediateMat = null; mIntermediateMat = null;
} }
@Override @Override
@ -105,6 +105,6 @@ class Sample4View extends SampleViewBase {
public native void FindFeatures(long matAddrGr, long matAddrRgba); public native void FindFeatures(long matAddrGr, long matAddrRgba);
public void setViewMode(int viewMode) { public void setViewMode(int viewMode) {
mViewMode = viewMode; mViewMode = viewMode;
} }
} }

View File

@ -24,6 +24,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -42,19 +43,21 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
releaseCamera(); releaseCamera();
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) { if(mCamera == null) {
Log.e(TAG, "Can't open camera!"); Log.e(TAG, "Can't open camera!");
return false; return false;
} }
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() { mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
@ -73,7 +76,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
Log.i(TAG, "releaseCamera"); Log.i(TAG, "releaseCamera");
mThreadRun = false; mThreadRun = false;
synchronized (this) { synchronized (this) {
if (mCamera != null) { if (mCamera != null) {
mCamera.stopPreview(); mCamera.stopPreview();
mCamera.setPreviewCallback(null); mCamera.setPreviewCallback(null);
mCamera.release(); mCamera.release();
@ -109,7 +112,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
List<String> FocusModes = params.getSupportedFocusModes(); List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{ {
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} }
mCamera.setParameters(params); mCamera.setParameters(params);
@ -123,15 +126,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */ /* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height); onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }