Merge branch 'android_camera_2.4' into 2.4

This commit is contained in:
Andrey Kamaev 2012-08-24 16:55:44 +04:00
commit 0948f4f863
34 changed files with 1349 additions and 1439 deletions

View File

@ -39,7 +39,8 @@ using namespace std;
static inline cv::Point2f centerRect(const cv::Rect& r) static inline cv::Point2f centerRect(const cv::Rect& r)
{ {
return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2); return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
}; }
static inline cv::Rect scale_rect(const cv::Rect& r, float scale) static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
{ {
cv::Point2f m=centerRect(r); cv::Point2f m=centerRect(r);
@ -49,9 +50,10 @@ static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
int y=cvRound(m.y - height/2); int y=cvRound(m.y - height/2);
return cv::Rect(x, y, cvRound(width), cvRound(height)); return cv::Rect(x, y, cvRound(width), cvRound(height));
}; }
void* workcycleObjectDetectorFunction(void* p); void* workcycleObjectDetectorFunction(void* p);
class DetectionBasedTracker::SeparateDetectionWork class DetectionBasedTracker::SeparateDetectionWork
{ {
public: public:
@ -61,6 +63,7 @@ class DetectionBasedTracker::SeparateDetectionWork
bool run(); bool run();
void stop(); void stop();
void resetTracking(); void resetTracking();
inline bool isWorking() inline bool isWorking()
{ {
return (stateThread==STATE_THREAD_WORKING_SLEEPING) || (stateThread==STATE_THREAD_WORKING_WITH_IMAGE); return (stateThread==STATE_THREAD_WORKING_SLEEPING) || (stateThread==STATE_THREAD_WORKING_WITH_IMAGE);
@ -430,8 +433,6 @@ DetectionBasedTracker::Parameters::Parameters()
minDetectionPeriod=0; minDetectionPeriod=0;
} }
DetectionBasedTracker::InnerParameters::InnerParameters() DetectionBasedTracker::InnerParameters::InnerParameters()
{ {
numLastPositionsToTrack=4; numLastPositionsToTrack=4;
@ -444,6 +445,7 @@ DetectionBasedTracker::InnerParameters::InnerParameters()
coeffObjectSpeedUsingInPrediction=0.8; coeffObjectSpeedUsingInPrediction=0.8;
} }
DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params) DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params)
:separateDetectionWork(), :separateDetectionWork(),
innerParameters(), innerParameters(),
@ -468,15 +470,13 @@ DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename,
weightsSizesSmoothing.push_back(0.2); weightsSizesSmoothing.push_back(0.2);
} }
DetectionBasedTracker::~DetectionBasedTracker() DetectionBasedTracker::~DetectionBasedTracker()
{ {
} }
void DetectionBasedTracker::process(const Mat& imageGray) void DetectionBasedTracker::process(const Mat& imageGray)
{ {
CV_Assert(imageGray.type()==CV_8UC1); CV_Assert(imageGray.type()==CV_8UC1);
if (!separateDetectionWork->isWorking()) { if (!separateDetectionWork->isWorking()) {
@ -501,8 +501,6 @@ void DetectionBasedTracker::process(const Mat& imageGray)
vector<Rect> rectsWhereRegions; vector<Rect> rectsWhereRegions;
bool shouldHandleResult=separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions); bool shouldHandleResult=separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions);
if (shouldHandleResult) { if (shouldHandleResult) {
LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect"); LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
} else { } else {
@ -560,6 +558,7 @@ void DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height); LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
} }
} }
void DetectionBasedTracker::getObjects(std::vector<Object>& result) const void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
{ {
result.clear(); result.clear();
@ -574,8 +573,6 @@ void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
} }
} }
bool DetectionBasedTracker::run() bool DetectionBasedTracker::run()
{ {
return separateDetectionWork->run(); return separateDetectionWork->run();
@ -711,6 +708,7 @@ void DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObj
} }
} }
} }
Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
{ {
if ( (i < 0) || (i >= (int)trackedObjects.size()) ) { if ( (i < 0) || (i >= (int)trackedObjects.size()) ) {
@ -797,6 +795,7 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
Rect r0(Point(), img.size()); Rect r0(Point(), img.size());
Rect r1 = scale_rect(r, innerParameters.coeffTrackingWindowSize); Rect r1 = scale_rect(r, innerParameters.coeffTrackingWindowSize);
r1 = r1 & r0; r1 = r1 & r0;
if ( (r1.width <=0) || (r1.height <= 0) ) { if ( (r1.width <=0) || (r1.height <= 0) ) {
LOGD("DetectionBasedTracker::detectInRegion: Empty intersection"); LOGD("DetectionBasedTracker::detectInRegion: Empty intersection");
return; return;

View File

@ -48,12 +48,15 @@
#include <android/log.h> #include <android/log.h>
#include <camera_activity.hpp> #include <camera_activity.hpp>
#if !defined(LOGD) && !defined(LOGI) && !defined(LOGE) //#if !defined(LOGD) && !defined(LOGI) && !defined(LOGE)
#undef LOGD
#undef LOGE
#undef LOGI
#define LOG_TAG "CV_CAP" #define LOG_TAG "CV_CAP"
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)) #define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)) #define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)) #define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
#endif //#endif
class HighguiAndroidCameraActivity; class HighguiAndroidCameraActivity;
@ -86,8 +89,8 @@ protected:
//raw from camera //raw from camera
int m_width; int m_width;
int m_height; int m_height;
unsigned char *m_frameYUV420; cv::Mat m_frameYUV420;
unsigned char *m_frameYUV420next; cv::Mat m_frameYUV420next;
enum YUVformat enum YUVformat
{ {
@ -189,8 +192,8 @@ CvCapture_Android::CvCapture_Android(int cameraId)
m_height = 0; m_height = 0;
m_activity = 0; m_activity = 0;
m_isOpened = false; m_isOpened = false;
m_frameYUV420 = 0; // m_frameYUV420 = 0;
m_frameYUV420next = 0; // m_frameYUV420next = 0;
m_hasGray = false; m_hasGray = false;
m_hasColor = false; m_hasColor = false;
m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME; m_dataState = CVCAPTURE_ANDROID_STATE_NO_FRAME;
@ -231,15 +234,14 @@ CvCapture_Android::~CvCapture_Android()
{ {
((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate(); ((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate();
pthread_mutex_lock(&m_nextFrameMutex); pthread_mutex_lock(&m_nextFrameMutex);
unsigned char *tmp1=m_frameYUV420; // unsigned char *tmp1=m_frameYUV420;
unsigned char *tmp2=m_frameYUV420next; // unsigned char *tmp2=m_frameYUV420next;
m_frameYUV420 = 0; // m_frameYUV420 = 0;
m_frameYUV420next = 0; // m_frameYUV420next = 0;
delete tmp1; // delete tmp1;
delete tmp2; // delete tmp2;
m_dataState=CVCAPTURE_ANDROID_STATE_NO_FRAME; m_dataState=CVCAPTURE_ANDROID_STATE_NO_FRAME;
pthread_cond_broadcast(&m_nextFrameCond); pthread_cond_broadcast(&m_nextFrameCond);
@ -355,17 +357,17 @@ bool CvCapture_Android::grabFrame()
m_dataState= CVCAPTURE_ANDROID_STATE_NO_FRAME;//we will wait new frame m_dataState= CVCAPTURE_ANDROID_STATE_NO_FRAME;//we will wait new frame
} }
if (m_dataState!=CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) { if (m_dataState!=CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED)
{
m_waitingNextFrame = true; m_waitingNextFrame = true;
pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex); pthread_cond_wait(&m_nextFrameCond, &m_nextFrameMutex);
} }
if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED) { if (m_dataState == CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED)
{
//LOGD("CvCapture_Android::grabFrame: get new frame"); //LOGD("CvCapture_Android::grabFrame: get new frame");
//swap current and new frames //swap current and new frames
unsigned char* tmp = m_frameYUV420; cv::swap(m_frameYUV420, m_frameYUV420next);
m_frameYUV420 = m_frameYUV420next;
m_frameYUV420next = tmp;
//discard cached frames //discard cached frames
m_hasGray = false; m_hasGray = false;
@ -393,7 +395,8 @@ IplImage* CvCapture_Android::retrieveFrame( int outputType )
{ {
IplImage* image = NULL; IplImage* image = NULL;
unsigned char *current_frameYUV420=m_frameYUV420; cv::Mat m_frameYUV420_ref = m_frameYUV420;
unsigned char *current_frameYUV420=m_frameYUV420_ref.ptr();
//Attention! all the operations in this function below should occupy less time than the period between two frames from camera //Attention! all the operations in this function below should occupy less time than the period between two frames from camera
if (NULL != current_frameYUV420) if (NULL != current_frameYUV420)
{ {
@ -456,19 +459,10 @@ void CvCapture_Android::setFrame(const void* buffer, int bufferSize)
prepareCacheForYUV(width, height); prepareCacheForYUV(width, height);
//copy data //copy data
memcpy(m_frameYUV420next, buffer, bufferSize); cv::Mat m_frameYUV420next_ref = m_frameYUV420next;
memcpy(m_frameYUV420next_ref.ptr(), buffer, bufferSize);
// LOGD("CvCapture_Android::setFrame -- memcpy is done"); // LOGD("CvCapture_Android::setFrame -- memcpy is done");
// ((HighguiAndroidCameraActivity*)m_activity)->LogFramesRate();
#if 0 //moved this part of code into grabFrame
//swap current and new frames
unsigned char* tmp = m_frameYUV420;
m_frameYUV420 = m_frameYUV420next;
m_frameYUV420next = tmp;
//discard cached frames
m_hasGray = false;
m_hasColor = false;
#endif
m_dataState = CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED; m_dataState = CVCAPTURE_ANDROID_STATE_HAS_NEW_FRAME_UNGRABBED;
m_waitingNextFrame = false;//set flag that no more frames required at this moment m_waitingNextFrame = false;//set flag that no more frames required at this moment
@ -482,17 +476,22 @@ void CvCapture_Android::prepareCacheForYUV(int width, int height)
LOGD("CvCapture_Android::prepareCacheForYUV: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height); LOGD("CvCapture_Android::prepareCacheForYUV: Changing size of buffers: from width=%d height=%d to width=%d height=%d", m_width, m_height, width, height);
m_width = width; m_width = width;
m_height = height; m_height = height;
/*
unsigned char *tmp = m_frameYUV420next; unsigned char *tmp = m_frameYUV420next;
m_frameYUV420next = new unsigned char [width * height * 3 / 2]; m_frameYUV420next = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) { if (tmp != NULL)
{
delete[] tmp; delete[] tmp;
} }
tmp = m_frameYUV420; tmp = m_frameYUV420;
m_frameYUV420 = new unsigned char [width * height * 3 / 2]; m_frameYUV420 = new unsigned char [width * height * 3 / 2];
if (tmp != NULL) { if (tmp != NULL)
{
delete[] tmp; delete[] tmp;
} }*/
m_frameYUV420.create(height * 3 / 2, width, CV_8UC1);
m_frameYUV420next.create(height * 3 / 2, width, CV_8UC1);
} }
} }

View File

@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable { public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample::SurfaceView"; private static final String TAG = "Sample-15puzzle::SurfaceView";
private SurfaceHolder mHolder; private SurfaceHolder mHolder;
private VideoCapture mCamera; private VideoCapture mCamera;
@ -32,8 +32,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null;
Log.e(TAG, "Failed to open native camera"); Log.e(TAG, "Failed to open native camera");
return false; return false;
} }

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
/** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/ /** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/
public class puzzle15Activity extends Activity public class puzzle15Activity extends Activity
@ -21,6 +22,7 @@ public class puzzle15Activity extends Activity
private MenuItem mItemNewGame; private MenuItem mItemNewGame;
private MenuItem mItemToggleNumbers; private MenuItem mItemToggleNumbers;
private puzzle15View mView = null; private puzzle15View mView = null;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override @Override
public void onManagerConnected(int status) { public void onManagerConnected(int status) {
@ -68,45 +70,22 @@ public class puzzle15Activity extends Activity
} }
}; };
public puzzle15Activity() public puzzle15Activity() {
{
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (null != mView) if (null != mView)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( mView!=null && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState)
{
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library"); Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
@ -115,6 +94,16 @@ public class puzzle15Activity extends Activity
} }
} }
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override @Override
public boolean onCreateOptionsMenu(Menu menu) { public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "onCreateOptionsMenu"); Log.i(TAG, "onCreateOptionsMenu");

View File

@ -83,11 +83,11 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
return sum % 2 == 0; return sum % 2 == 0;
} }
private void createPuzzle(int cols, int rows) { private void createPuzzle(int cols, int rows, int type) {
mCells = new Mat[gridArea]; mCells = new Mat[gridArea];
mCells15 = new Mat[gridArea]; mCells15 = new Mat[gridArea];
mRgba15 = new Mat(rows, cols, mRgba.type()); mRgba15 = new Mat(rows, cols, type);
mIndexses = new int[gridArea]; mIndexses = new int[gridArea];
for (int i = 0; i < gridSize; i++) { for (int i = 0; i < gridSize; i++) {
@ -122,7 +122,11 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
cols = cols - cols%4; cols = cols - cols%4;
if (mCells == null) if (mCells == null)
createPuzzle(cols, rows); createPuzzle(cols, rows, mRgba.type());
else if(mRgba15.cols() != cols || mRgba15.rows() != rows) {
releaseMats();
createPuzzle(cols, rows, mRgba.type());
}
// copy shuffled tiles // copy shuffled tiles
for (int i = 0; i < gridArea; i++) { for (int i = 0; i < gridArea; i++) {
@ -162,6 +166,15 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
super.run(); super.run();
synchronized (this) { synchronized (this) {
releaseMats();
if (mRgba != null)
mRgba.release();
mRgba = null;
}
}
private void releaseMats() {
// Explicitly deallocate Mats // Explicitly deallocate Mats
if (mCells != null) { if (mCells != null) {
for (Mat m : mCells) for (Mat m : mCells)
@ -171,18 +184,15 @@ public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
for (Mat m : mCells15) for (Mat m : mCells15)
m.release(); m.release();
} }
if (mRgba != null)
mRgba.release();
if (mRgba15 != null) if (mRgba15 != null)
mRgba15.release(); mRgba15.release();
mRgba = null;
mRgba15 = null; mRgba15 = null;
mCells = null; mCells = null;
mCells15 = null; mCells15 = null;
mIndexses = null; mIndexses = null;
} }
}
public boolean onTouch(View v, MotionEvent event) { public boolean onTouch(View v, MotionEvent event) {
if(mRgba==null) return false; if(mRgba==null) return false;

View File

@ -10,10 +10,11 @@ import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class ColorBlobDetectionActivity extends Activity { public class ColorBlobDetectionActivity extends Activity {
private static final String TAG = "Example/ColorBlobDetection"; private static final String TAG = "Sample-ColorBlobDetection::Activity";
private ColorBlobDetectionView mView; private ColorBlobDetectionView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@ -63,34 +64,27 @@ public class ColorBlobDetectionActivity extends Activity {
} }
}; };
public ColorBlobDetectionActivity() public ColorBlobDetectionActivity() {
{
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
} }
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (null != mView) if (null != mView)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
} }
} }
@ -99,12 +93,7 @@ public class ColorBlobDetectionActivity extends Activity {
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library"); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
} }

View File

@ -33,13 +33,12 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
private static Size SPECTRUM_SIZE = new Size(200, 32); private static Size SPECTRUM_SIZE = new Size(200, 32);
// Logcat tag // Logcat tag
private static final String TAG = "Example/ColorBlobDetection"; private static final String TAG = "Sample-ColorBlobDetection::View";
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255); private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
public ColorBlobDetectionView(Context context) public ColorBlobDetectionView(Context context) {
{
super(context); super(context);
setOnTouchListener(this); setOnTouchListener(this);
} }
@ -54,8 +53,7 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
super.surfaceCreated(holder); super.surfaceCreated(holder);
} }
public boolean onTouch(View v, MotionEvent event) public boolean onTouch(View v, MotionEvent event) {
{
int cols = mRgba.cols(); int cols = mRgba.cols();
int rows = mRgba.rows(); int rows = mRgba.rows();
@ -86,9 +84,7 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
mBlobColorHsv = Core.sumElems(touchedRegionHsv); mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height; int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++) for (int i = 0; i < mBlobColorHsv.val.length; i++)
{
mBlobColorHsv.val[i] /= pointCount; mBlobColorHsv.val[i] /= pointCount;
}
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv); mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
@ -110,8 +106,7 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
if (mIsColorSelected) if (mIsColorSelected) {
{
mDetector.process(mRgba); mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours(); List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size()); Log.e(TAG, "Contours count: " + contours.size());
@ -135,8 +130,7 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
return bmp; return bmp;
} }
private Scalar converScalarHsv2Rgba(Scalar hsvColor) private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
{
Mat pointMatRgba = new Mat(); Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor); Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4); Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);

View File

@ -11,15 +11,12 @@ import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar; import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Imgproc;
public class ColorBlobDetector public class ColorBlobDetector {
{ public void setColorRadius(Scalar radius) {
public void setColorRadius(Scalar radius)
{
mColorRadius = radius; mColorRadius = radius;
} }
public void setHsvColor(Scalar hsvColor) public void setHsvColor(Scalar hsvColor) {
{
double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0; double minH = (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0]-mColorRadius.val[0] : 0;
double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255; double maxH = (hsvColor.val[0]+mColorRadius.val[0] <= 255) ? hsvColor.val[0]+mColorRadius.val[0] : 255;
@ -37,28 +34,23 @@ public class ColorBlobDetector
Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3); Mat spectrumHsv = new Mat(1, (int)(maxH-minH), CvType.CV_8UC3);
for (int j = 0; j < maxH-minH; j++) for (int j = 0; j < maxH-minH; j++) {
{
byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255}; byte[] tmp = {(byte)(minH+j), (byte)255, (byte)255};
spectrumHsv.put(0, j, tmp); spectrumHsv.put(0, j, tmp);
} }
Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4); Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
} }
public Mat getSpectrum() public Mat getSpectrum() {
{
return mSpectrum; return mSpectrum;
} }
public void setMinContourArea(double area) public void setMinContourArea(double area) {
{
mMinContourArea = area; mMinContourArea = area;
} }
public void process(Mat rgbaImage) public void process(Mat rgbaImage) {
{
Mat pyrDownMat = new Mat(); Mat pyrDownMat = new Mat();
Imgproc.pyrDown(rgbaImage, pyrDownMat); Imgproc.pyrDown(rgbaImage, pyrDownMat);
@ -80,8 +72,7 @@ public class ColorBlobDetector
// Find max contour area // Find max contour area
double maxArea = 0; double maxArea = 0;
Iterator<MatOfPoint> each = contours.iterator(); Iterator<MatOfPoint> each = contours.iterator();
while (each.hasNext()) while (each.hasNext()) {
{
MatOfPoint wrapper = each.next(); MatOfPoint wrapper = each.next();
double area = Imgproc.contourArea(wrapper); double area = Imgproc.contourArea(wrapper);
if (area > maxArea) if (area > maxArea)
@ -91,19 +82,16 @@ public class ColorBlobDetector
// Filter contours by area and resize to fit the original image size // Filter contours by area and resize to fit the original image size
mContours.clear(); mContours.clear();
each = contours.iterator(); each = contours.iterator();
while (each.hasNext()) while (each.hasNext()) {
{
MatOfPoint contour = each.next(); MatOfPoint contour = each.next();
if (Imgproc.contourArea(contour) > mMinContourArea*maxArea) if (Imgproc.contourArea(contour) > mMinContourArea*maxArea) {
{
Core.multiply(contour, new Scalar(4,4), contour); Core.multiply(contour, new Scalar(4,4), contour);
mContours.add(contour); mContours.add(contour);
} }
} }
} }
public List<MatOfPoint> getContours() public List<MatOfPoint> getContours() {
{
return mContours; return mContours;
} }

View File

@ -32,8 +32,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null;
Log.e(TAG, "Failed to open native camera"); Log.e(TAG, "Failed to open native camera");
return false; return false;
} }

View File

@ -21,6 +21,7 @@ inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
(JNIEnv * jenv, jclass, jstring jFileName, jint faceSize) (JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject enter");
const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL); const char* jnamestr = jenv->GetStringUTFChars(jFileName, NULL);
string stdFileName(jnamestr); string stdFileName(jnamestr);
jlong result = 0; jlong result = 0;
@ -48,17 +49,22 @@ JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeC
return 0; return 0;
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject exit");
return result; return result;
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject
(JNIEnv * jenv, jclass, jlong thiz) (JNIEnv * jenv, jclass, jlong thiz)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject enter");
try try
{
if(thiz != 0)
{ {
((DetectionBasedTracker*)thiz)->stop(); ((DetectionBasedTracker*)thiz)->stop();
delete (DetectionBasedTracker*)thiz; delete (DetectionBasedTracker*)thiz;
} }
}
catch(cv::Exception e) catch(cv::Exception e)
{ {
LOGD("nativeestroyObject catched cv::Exception: %s", e.what()); LOGD("nativeestroyObject catched cv::Exception: %s", e.what());
@ -73,11 +79,13 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDe
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDestroyObject exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart
(JNIEnv * jenv, jclass, jlong thiz) (JNIEnv * jenv, jclass, jlong thiz)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart enter");
try try
{ {
((DetectionBasedTracker*)thiz)->run(); ((DetectionBasedTracker*)thiz)->run();
@ -96,11 +104,13 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSt
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStart exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop
(JNIEnv * jenv, jclass, jlong thiz) (JNIEnv * jenv, jclass, jlong thiz)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop enter");
try try
{ {
((DetectionBasedTracker*)thiz)->stop(); ((DetectionBasedTracker*)thiz)->stop();
@ -119,11 +129,13 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSt
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeStop exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize
(JNIEnv * jenv, jclass, jlong thiz, jint faceSize) (JNIEnv * jenv, jclass, jlong thiz, jint faceSize)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize enter");
try try
{ {
if (faceSize > 0) if (faceSize > 0)
@ -133,7 +145,6 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSe
DetectorParams.minObjectSize = faceSize; DetectorParams.minObjectSize = faceSize;
((DetectionBasedTracker*)thiz)->setParameters(DetectorParams); ((DetectionBasedTracker*)thiz)->setParameters(DetectorParams);
} }
} }
catch(cv::Exception e) catch(cv::Exception e)
{ {
@ -149,12 +160,14 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSe
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSetFaceSize exit");
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect
(JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces) (JNIEnv * jenv, jclass, jlong thiz, jlong imageGray, jlong faces)
{ {
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect enter");
try try
{ {
vector<Rect> RectFaces; vector<Rect> RectFaces;
@ -176,4 +189,5 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDe
jclass je = jenv->FindClass("java/lang/Exception"); jclass je = jenv->FindClass("java/lang/Exception");
jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}"); jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
} }
LOGD("Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDetect exit");
} }

View File

@ -5,33 +5,27 @@ import org.opencv.core.MatOfRect;
public class DetectionBasedTracker public class DetectionBasedTracker
{ {
public DetectionBasedTracker(String cascadeName, int minFaceSize) public DetectionBasedTracker(String cascadeName, int minFaceSize) {
{
mNativeObj = nativeCreateObject(cascadeName, minFaceSize); mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
} }
public void start() public void start() {
{
nativeStart(mNativeObj); nativeStart(mNativeObj);
} }
public void stop() public void stop() {
{
nativeStop(mNativeObj); nativeStop(mNativeObj);
} }
public void setMinFaceSize(int size) public void setMinFaceSize(int size) {
{
nativeSetFaceSize(mNativeObj, size); nativeSetFaceSize(mNativeObj, size);
} }
public void detect(Mat imageGray, MatOfRect faces) public void detect(Mat imageGray, MatOfRect faces) {
{
nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr()); nativeDetect(mNativeObj, imageGray.getNativeObjAddr(), faces.getNativeObjAddr());
} }
public void release() public void release() {
{
nativeDestroyObject(mNativeObj); nativeDestroyObject(mNativeObj);
mNativeObj = 0; mNativeObj = 0;
} }
@ -45,8 +39,7 @@ public class DetectionBasedTracker
private static native void nativeSetFaceSize(long thiz, int size); private static native void nativeSetFaceSize(long thiz, int size);
private static native void nativeDetect(long thiz, long inputImage, long faces); private static native void nativeDetect(long thiz, long inputImage, long faces);
static static {
{
System.loadLibrary("detection_based_tracker"); System.loadLibrary("detection_based_tracker");
} }
} }

View File

@ -12,16 +12,18 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class FdActivity extends Activity { public class FdActivity extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample-FD::Activity";
private MenuItem mItemFace50; private MenuItem mItemFace50;
private MenuItem mItemFace40; private MenuItem mItemFace40;
private MenuItem mItemFace30; private MenuItem mItemFace30;
private MenuItem mItemFace20; private MenuItem mItemFace20;
private MenuItem mItemType; private MenuItem mItemType;
private int mDetectorType = 0;
private String[] mDetectorName;
private FdView mView; private FdView mView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) { private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@ -77,10 +79,6 @@ public class FdActivity extends Activity {
} }
}; };
private int mDetectorType = 0;
private String[] mDetectorName;
public FdActivity() { public FdActivity() {
Log.i(TAG, "Instantiated new " + this.getClass()); Log.i(TAG, "Instantiated new " + this.getClass());
mDetectorName = new String[2]; mDetectorName = new String[2];
@ -91,26 +89,20 @@ public class FdActivity extends Activity {
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (mView != null) if (mView != null)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( mView != null && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
} }
} }
@ -120,12 +112,7 @@ public class FdActivity extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override
@ -136,7 +123,6 @@ public class FdActivity extends Activity {
mItemFace30 = menu.add("Face size 30%"); mItemFace30 = menu.add("Face size 30%");
mItemFace20 = menu.add("Face size 20%"); mItemFace20 = menu.add("Face size 20%");
mItemType = menu.add(mDetectorName[mDetectorType]); mItemType = menu.add(mDetectorName[mDetectorType]);
return true; return true;
} }

View File

@ -22,7 +22,7 @@ import android.util.Log;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase { class FdView extends SampleCvViewBase {
private static final String TAG = "Sample::FdView"; private static final String TAG = "Sample-FD::View";
private Mat mRgba; private Mat mRgba;
private Mat mGray; private Mat mGray;
private File mCascadeFile; private File mCascadeFile;
@ -39,25 +39,19 @@ class FdView extends SampleCvViewBase {
private float mRelativeFaceSize = 0; private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0; private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize) public void setMinFaceSize(float faceSize) {
{
mRelativeFaceSize = faceSize; mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0; mAbsoluteFaceSize = 0;
} }
public void setDetectorType(int type) public void setDetectorType(int type) {
{ if (mDetectorType != type) {
if (mDetectorType != type)
{
mDetectorType = type; mDetectorType = type;
if (type == NATIVE_DETECTOR) if (type == NATIVE_DETECTOR) {
{
Log.i(TAG, "Detection Based Tracker enabled"); Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start(); mNativeDetector.start();
} } else {
else
{
Log.i(TAG, "Cascade detector enabled"); Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop(); mNativeDetector.stop();
} }
@ -114,11 +108,9 @@ class FdView extends SampleCvViewBase {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mAbsoluteFaceSize == 0) if (mAbsoluteFaceSize == 0) {
{
int height = mGray.rows(); int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0); if (Math.round(height * mRelativeFaceSize) > 0) {
{
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
} }
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize); mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
@ -126,19 +118,16 @@ class FdView extends SampleCvViewBase {
MatOfRect faces = new MatOfRect(); MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) if (mDetectorType == JAVA_DETECTOR) {
{
if (mJavaDetector != null) if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
, new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
} }
else if (mDetectorType == NATIVE_DETECTOR) else if (mDetectorType == NATIVE_DETECTOR) {
{
if (mNativeDetector != null) if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces); mNativeDetector.detect(mGray, faces);
} }
else else {
{
Log.e(TAG, "Detection method is not selected!"); Log.e(TAG, "Detection method is not selected!");
} }

View File

@ -34,9 +34,8 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e(TAG, "Failed to open native camera"); Log.e(TAG, "Failed to open native camera");
releaseCamera();
return false; return false;
} }
} }
@ -93,6 +92,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "surfaceDestroyed"); Log.i(TAG, "surfaceDestroyed");
releaseCamera(); releaseCamera();
Log.i(TAG, "surfaceDestroyed2");
} }
protected abstract Bitmap processFrame(VideoCapture capture); protected abstract Bitmap processFrame(VideoCapture capture);

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class ImageManipulationsActivity extends Activity { public class ImageManipulationsActivity extends Activity {
@ -93,26 +94,20 @@ public class ImageManipulationsActivity extends Activity {
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (null != mView) if (null != mView)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
} }
} }
@ -122,12 +117,7 @@ public class ImageManipulationsActivity extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override

View File

@ -34,8 +34,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null;
Log.e(TAG, "Failed to open native camera"); Log.e(TAG, "Failed to open native camera");
return false; return false;
} }

View File

@ -35,7 +35,7 @@ class Sample0View extends SampleViewBase {
rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y; rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y;
} }
} else if (view_mode == VIEW_MODE_RGBA) { } else if (view_mode == VIEW_MODE_RGBA) {
for (int i = 0; i < getFrameHeight(); i++) for (int i = 0; i < getFrameHeight(); i++) {
for (int j = 0; j < getFrameWidth(); j++) { for (int j = 0; j < getFrameWidth(); j++) {
int index = i * getFrameWidth() + j; int index = i * getFrameWidth() + j;
int supply_index = frameSize + (i >> 1) * getFrameWidth() + (j & ~1); int supply_index = frameSize + (i >> 1) * getFrameWidth() + (j & ~1);
@ -56,6 +56,7 @@ class Sample0View extends SampleViewBase {
rgba[i * getFrameWidth() + j] = 0xff000000 + (b << 16) + (g << 8) + r; rgba[i * getFrameWidth() + j] = 0xff000000 + (b << 16) + (g << 8) + r;
} }
} }
}
mBitmap.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight()); mBitmap.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return mBitmap; return mBitmap;

View File

@ -25,6 +25,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -43,8 +44,10 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
@ -52,7 +55,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
public boolean openCamera() { public boolean openCamera() {
Log.i(TAG, "openCamera"); Log.i(TAG, "openCamera");
releaseCamera();
mCamera = Camera.open(); mCamera = Camera.open();
if(mCamera == null) { if(mCamera == null) {
Log.e(TAG, "Can't open camera!"); Log.e(TAG, "Can't open camera!");
@ -125,15 +127,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try { try {
setPreview(); setPreview();
} catch (IOException e) { } catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
} }
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample1Java extends Activity { public class Sample1Java extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
@ -45,6 +46,7 @@ public class Sample1Java extends Activity {
ad.show(); ad.show();
} }
} break; } break;
/** OpenCV loader cannot start Google Play **/ /** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR: case LoaderCallbackInterface.MARKET_ERROR:
{ {
@ -75,26 +77,20 @@ public class Sample1Java extends Activity {
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (null != mView) if (null != mView)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if( (null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
} }
} }
@ -104,12 +100,7 @@ public class Sample1Java extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override

View File

@ -25,6 +25,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -43,8 +44,10 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
@ -124,15 +127,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try { try {
setPreview(); setPreview();
} catch (IOException e) { } catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
} }
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample2NativeCamera extends Activity { public class Sample2NativeCamera extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
@ -82,26 +83,20 @@ public class Sample2NativeCamera extends Activity {
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (null != mView) if (null != mView)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
} }
} }
@ -111,11 +106,7 @@ public class Sample2NativeCamera extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
Log.i(TAG, "Trying to load OpenCV library"); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
@Override @Override

View File

@ -32,8 +32,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
releaseCamera(); releaseCamera();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) { if (!mCamera.isOpened()) {
mCamera.release(); releaseCamera();
mCamera = null;
Log.e(TAG, "Failed to open native camera"); Log.e(TAG, "Failed to open native camera");
return false; return false;
} }

View File

@ -10,6 +10,7 @@ import android.content.DialogInterface;
import android.os.Bundle; import android.os.Bundle;
import android.util.Log; import android.util.Log;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample3Native extends Activity { public class Sample3Native extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
@ -73,26 +74,20 @@ public class Sample3Native extends Activity {
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (null != mView) if (null != mView)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
} }
} }
@ -102,10 +97,6 @@ public class Sample3Native extends Activity {
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
} }

View File

@ -24,6 +24,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -42,8 +43,10 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
@ -123,15 +126,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try { try {
setPreview(); setPreview();
} catch (IOException e) { } catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
} }
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }

View File

@ -12,6 +12,7 @@ import android.util.Log;
import android.view.Menu; import android.view.Menu;
import android.view.MenuItem; import android.view.MenuItem;
import android.view.Window; import android.view.Window;
import android.view.WindowManager;
public class Sample4Mixed extends Activity { public class Sample4Mixed extends Activity {
private static final String TAG = "Sample::Activity"; private static final String TAG = "Sample::Activity";
@ -81,26 +82,20 @@ public class Sample4Mixed extends Activity {
@Override @Override
protected void onPause() { protected void onPause() {
Log.i(TAG, "onPause"); Log.i(TAG, "onPause");
super.onPause();
if (null != mView) if (null != mView)
mView.releaseCamera(); mView.releaseCamera();
super.onPause();
} }
@Override @Override
protected void onResume() { protected void onResume() {
Log.i(TAG, "onResume"); Log.i(TAG, "onResume");
super.onResume(); super.onResume();
if((null != mView) && !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(this).create(); Log.i(TAG, "Trying to load OpenCV library");
ad.setCancelable(false); // This blocks the 'BACK' button if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
ad.setMessage("Fatal error: can't open camera!"); {
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() { Log.e(TAG, "Cannot connect to OpenCV Manager");
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
} }
} }
@ -109,14 +104,8 @@ public class Sample4Mixed extends Activity {
public void onCreate(Bundle savedInstanceState) { public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState); super.onCreate(savedInstanceState);
Log.i(TAG, "onCreate"); Log.i(TAG, "onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE); requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
} }
public boolean onCreateOptionsMenu(Menu menu) { public boolean onCreateOptionsMenu(Menu menu) {

View File

@ -24,6 +24,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private byte[] mFrame; private byte[] mFrame;
private boolean mThreadRun; private boolean mThreadRun;
private byte[] mBuffer; private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) { public SampleViewBase(Context context) {
@ -42,8 +43,10 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
} }
public void setPreview() throws IOException { public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mCamera.setPreviewTexture( new SurfaceTexture(10) ); mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else else
mCamera.setPreviewDisplay(null); mCamera.setPreviewDisplay(null);
} }
@ -123,15 +126,15 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mFrame = new byte [size]; mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer); mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try { try {
setPreview(); setPreview();
} catch (IOException e) { } catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e); Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
} }
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */ /* Now we can start a preview */
mCamera.startPreview(); mCamera.startPreview();
} }