Fix Android samples for devices having front camera only (Nexus 7)
This commit is contained in:
@@ -13,9 +13,9 @@ import android.view.Window;
|
||||
import android.view.WindowManager;
|
||||
|
||||
public class ColorBlobDetectionActivity extends Activity {
|
||||
private static final String TAG = "OCVSample::Activity";
|
||||
|
||||
private static final String TAG = "Sample-ColorBlobDetection::Activity";
|
||||
private ColorBlobDetectionView mView;
|
||||
private ColorBlobDetectionView mView;
|
||||
|
||||
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
|
||||
@Override
|
||||
@@ -27,6 +27,7 @@ public class ColorBlobDetectionActivity extends Activity {
|
||||
// Create and set View
|
||||
mView = new ColorBlobDetectionView(mAppContext);
|
||||
setContentView(mView);
|
||||
|
||||
// Check native OpenCV camera
|
||||
if( !mView.openCamera() ) {
|
||||
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
|
||||
@@ -34,13 +35,14 @@ public class ColorBlobDetectionActivity extends Activity {
|
||||
ad.setMessage("Fatal error: can't open camera!");
|
||||
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
|
||||
public void onClick(DialogInterface dialog, int which) {
|
||||
dialog.dismiss();
|
||||
finish();
|
||||
dialog.dismiss();
|
||||
finish();
|
||||
}
|
||||
});
|
||||
ad.show();
|
||||
}
|
||||
} break;
|
||||
|
||||
/** OpenCV loader cannot start Google Play **/
|
||||
case LoaderCallbackInterface.MARKET_ERROR:
|
||||
{
|
||||
@@ -70,7 +72,7 @@ public class ColorBlobDetectionActivity extends Activity {
|
||||
|
||||
@Override
|
||||
protected void onPause() {
|
||||
Log.i(TAG, "onPause");
|
||||
Log.i(TAG, "called onPause");
|
||||
if (null != mView)
|
||||
mView.releaseCamera();
|
||||
super.onPause();
|
||||
@@ -78,12 +80,11 @@ public class ColorBlobDetectionActivity extends Activity {
|
||||
|
||||
@Override
|
||||
protected void onResume() {
|
||||
Log.i(TAG, "onResume");
|
||||
Log.i(TAG, "called onResume");
|
||||
super.onResume();
|
||||
|
||||
Log.i(TAG, "Trying to load OpenCV library");
|
||||
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
|
||||
{
|
||||
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
|
||||
Log.e(TAG, "Cannot connect to OpenCV Manager");
|
||||
}
|
||||
}
|
||||
@@ -91,7 +92,7 @@ public class ColorBlobDetectionActivity extends Activity {
|
||||
/** Called when the activity is first created. */
|
||||
@Override
|
||||
public void onCreate(Bundle savedInstanceState) {
|
||||
Log.i(TAG, "onCreate");
|
||||
Log.i(TAG, "called onCreate");
|
||||
super.onCreate(savedInstanceState);
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
|
||||
|
@@ -22,29 +22,27 @@ import android.view.View;
|
||||
import android.view.View.OnTouchListener;
|
||||
|
||||
public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchListener {
|
||||
private static final String TAG = "OCVSample::View";
|
||||
|
||||
private Mat mRgba;
|
||||
|
||||
private boolean mIsColorSelected = false;
|
||||
private Scalar mBlobColorRgba = new Scalar(255);
|
||||
private Scalar mBlobColorHsv = new Scalar(255);
|
||||
private ColorBlobDetector mDetector = new ColorBlobDetector();
|
||||
private Mat mSpectrum = new Mat();
|
||||
private static Size SPECTRUM_SIZE = new Size(200, 32);
|
||||
|
||||
// Logcat tag
|
||||
private static final String TAG = "Sample-ColorBlobDetection::View";
|
||||
|
||||
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
|
||||
private Mat mRgba;
|
||||
private boolean mIsColorSelected = false;
|
||||
private Scalar mBlobColorRgba = new Scalar(255);
|
||||
private Scalar mBlobColorHsv = new Scalar(255);
|
||||
private ColorBlobDetector mDetector = new ColorBlobDetector();
|
||||
private Mat mSpectrum = new Mat();
|
||||
private static Size SPECTRUM_SIZE = new Size(200, 32);
|
||||
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
|
||||
|
||||
|
||||
public ColorBlobDetectionView(Context context) {
|
||||
super(context);
|
||||
setOnTouchListener(this);
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "called surfaceCreated");
|
||||
synchronized (this) {
|
||||
// initialize Mat before usage
|
||||
mRgba = new Mat();
|
||||
@@ -53,6 +51,14 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
|
||||
super.surfaceCreated(holder);
|
||||
}
|
||||
|
||||
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
|
||||
Mat pointMatRgba = new Mat();
|
||||
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
|
||||
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
|
||||
|
||||
return new Scalar(pointMatRgba.get(0, 0));
|
||||
}
|
||||
|
||||
public boolean onTouch(View v, MotionEvent event) {
|
||||
int cols = mRgba.cols();
|
||||
int rows = mRgba.rows();
|
||||
@@ -110,8 +116,8 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
|
||||
mDetector.process(mRgba);
|
||||
List<MatOfPoint> contours = mDetector.getContours();
|
||||
Log.e(TAG, "Contours count: " + contours.size());
|
||||
Core.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
|
||||
|
||||
Core.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
|
||||
|
||||
Mat colorLabel = mRgba.submat(2, 34, 2, 34);
|
||||
colorLabel.setTo(mBlobColorRgba);
|
||||
|
||||
@@ -130,14 +136,6 @@ public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchL
|
||||
return bmp;
|
||||
}
|
||||
|
||||
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
|
||||
Mat pointMatRgba = new Mat();
|
||||
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
|
||||
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
|
||||
|
||||
return new Scalar(pointMatRgba.get(0, 0));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
super.run();
|
||||
|
@@ -12,6 +12,16 @@ import org.opencv.core.Scalar;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
public class ColorBlobDetector {
|
||||
// Lower and Upper bounds for range checking in HSV color space
|
||||
private Scalar mLowerBound = new Scalar(0);
|
||||
private Scalar mUpperBound = new Scalar(0);
|
||||
// Minimum contour area in percent for contours filtering
|
||||
private static double mMinContourArea = 0.1;
|
||||
// Color radius for range checking in HSV color space
|
||||
private Scalar mColorRadius = new Scalar(25,50,50,0);
|
||||
private Mat mSpectrum = new Mat();
|
||||
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
|
||||
|
||||
public void setColorRadius(Scalar radius) {
|
||||
mColorRadius = radius;
|
||||
}
|
||||
@@ -94,14 +104,4 @@ public class ColorBlobDetector {
|
||||
public List<MatOfPoint> getContours() {
|
||||
return mContours;
|
||||
}
|
||||
|
||||
// Lower and Upper bounds for range checking in HSV color space
|
||||
private Scalar mLowerBound = new Scalar(0);
|
||||
private Scalar mUpperBound = new Scalar(0);
|
||||
// Minimum contour area in percent for contours filtering
|
||||
private static double mMinContourArea = 0.1;
|
||||
// Color radius for range checking in HSV color space
|
||||
private Scalar mColorRadius = new Scalar(25,50,50,0);
|
||||
private Mat mSpectrum = new Mat();
|
||||
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
|
||||
}
|
||||
|
@@ -14,7 +14,7 @@ import android.view.SurfaceHolder;
|
||||
import android.view.SurfaceView;
|
||||
|
||||
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
|
||||
private static final String TAG = "Sample::SurfaceView";
|
||||
private static final String TAG = "OCVSample::BaseView";
|
||||
|
||||
private SurfaceHolder mHolder;
|
||||
private VideoCapture mCamera;
|
||||
@@ -26,76 +26,67 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
}
|
||||
|
||||
public boolean openCamera() {
|
||||
Log.i(TAG, "openCamera");
|
||||
synchronized (this) {
|
||||
public synchronized boolean openCamera() {
|
||||
Log.i(TAG, "Opening Camera");
|
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
|
||||
if (!mCamera.isOpened()) {
|
||||
releaseCamera();
|
||||
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
|
||||
if (!mCamera.isOpened()) {
|
||||
releaseCamera();
|
||||
Log.e(TAG, "Failed to open native camera");
|
||||
return false;
|
||||
}
|
||||
Log.e(TAG, "Can't open native camera");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public void releaseCamera() {
|
||||
Log.i(TAG, "releaseCamera");
|
||||
synchronized (this) {
|
||||
if (mCamera != null) {
|
||||
public synchronized void releaseCamera() {
|
||||
Log.i(TAG, "Releasing Camera");
|
||||
if (mCamera != null) {
|
||||
mCamera.release();
|
||||
mCamera = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void setupCamera(int width, int height) {
|
||||
Log.i(TAG, "setupCamera("+width+", "+height+")");
|
||||
synchronized (this) {
|
||||
if (mCamera != null && mCamera.isOpened()) {
|
||||
List<Size> sizes = mCamera.getSupportedPreviewSizes();
|
||||
int mFrameWidth = width;
|
||||
int mFrameHeight = height;
|
||||
public synchronized void setupCamera(int width, int height) {
|
||||
if (mCamera != null && mCamera.isOpened()) {
|
||||
Log.i(TAG, "Setup Camera - " + width + "x" + height);
|
||||
List<Size> sizes = mCamera.getSupportedPreviewSizes();
|
||||
int mFrameWidth = width;
|
||||
int mFrameHeight = height;
|
||||
|
||||
// selecting optimal camera preview size
|
||||
{
|
||||
double minDiff = Double.MAX_VALUE;
|
||||
for (Size size : sizes) {
|
||||
if (Math.abs(size.height - height) < minDiff) {
|
||||
mFrameWidth = (int) size.width;
|
||||
mFrameHeight = (int) size.height;
|
||||
minDiff = Math.abs(size.height - height);
|
||||
}
|
||||
// selecting optimal camera preview size
|
||||
{
|
||||
double minDiff = Double.MAX_VALUE;
|
||||
for (Size size : sizes) {
|
||||
if (Math.abs(size.height - height) < minDiff) {
|
||||
mFrameWidth = (int) size.width;
|
||||
mFrameHeight = (int) size.height;
|
||||
minDiff = Math.abs(size.height - height);
|
||||
}
|
||||
}
|
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
}
|
||||
}
|
||||
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
|
||||
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
|
||||
}
|
||||
}
|
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
|
||||
Log.i(TAG, "surfaceChanged");
|
||||
Log.i(TAG, "called surfaceChanged");
|
||||
setupCamera(width, height);
|
||||
}
|
||||
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
Log.i(TAG, "called surfaceCreated");
|
||||
(new Thread(this)).start();
|
||||
}
|
||||
|
||||
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceDestroyed");
|
||||
releaseCamera();
|
||||
Log.i(TAG, "called surfaceDestroyed");
|
||||
}
|
||||
|
||||
protected abstract Bitmap processFrame(VideoCapture capture);
|
||||
|
||||
public void run() {
|
||||
Log.i(TAG, "Starting processing thread");
|
||||
Log.i(TAG, "Started processing thread");
|
||||
while (true) {
|
||||
Bitmap bmp = null;
|
||||
|
||||
@@ -121,7 +112,6 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
|
||||
bmp.recycle();
|
||||
}
|
||||
}
|
||||
|
||||
Log.i(TAG, "Finishing processing thread");
|
||||
Log.i(TAG, "Finished processing thread");
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user