Merge 2.4.3-rc

This commit is contained in:
Andrey Kamaev
2012-10-24 19:56:27 +04:00
93 changed files with 53456 additions and 51646 deletions

View File

@@ -237,7 +237,7 @@ if(ANDROID)
set(lib_target_files ${ANDROID_LIB_PROJECT_FILES})
ocv_list_add_prefix(lib_target_files "${OpenCV_BINARY_DIR}/")
android_get_compatible_target(lib_target_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET})
android_get_compatible_target(lib_target_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET} 11)
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/android_lib/${ANDROID_MANIFEST_FILE}" "${CMAKE_CURRENT_BINARY_DIR}/${ANDROID_MANIFEST_FILE}")

View File

@@ -118,7 +118,7 @@ class AsyncServiceHelper
}
else
{
Log.d(TAG, "Wating current installation process");
Log.d(TAG, "Waiting current installation process");
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
@@ -268,17 +268,21 @@ class AsyncServiceHelper
{
Log.d(TAG, "OpenCV package was not installed!");
mStatus = LoaderCallbackInterface.MARKET_ERROR;
Log.d(TAG, "Init finished with status " + mStatus);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(mStatus);
}
} catch (RemoteException e) {
e.printStackTrace();
mStatus = LoaderCallbackInterface.INIT_FAILED;
Log.d(TAG, "Init finished with status " + mStatus);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(mStatus);
}
Log.d(TAG, "Init finished with status " + mStatus);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(mStatus);
}
};

View File

@@ -28,7 +28,7 @@ public abstract class BaseLoaderCallback implements LoaderCallbackInterface {
/** OpenCV loader can not start Google Play Market. **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not installed! You can get it here");
Log.e(TAG, "Package installation failed!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Package installation failed!");

View File

@@ -0,0 +1,335 @@
package org.opencv.android;
import java.util.List;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
* This is a basic class, implementing the interaction with Camera and OpenCV library.
* The main responsibility of it - is to control when camera can be enabled, process the frame,
* call external listener to make any adjustments to the frame and then draw the resulting
* frame to the screen.
* The clients shall implement CvCameraViewListener
* TODO: add method to control the format in which the frames will be delivered to CvCameraViewListener
*/
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final int MAX_UNSPECIFIED = -1;
protected int mFrameWidth;
protected int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected int mPreviewFormat = Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA;
private Bitmap mCacheBitmap;
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
private static final int STOPPED = 0;
private static final int STARTED = 1;
private static final String TAG = "CameraBridge";
private CvCameraViewListener mListener;
private int mState = STOPPED;
private boolean mEnabled;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
mListener = listener;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* @param maxWidth - the maximum width allowed for camera frame.
* @param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
int targetState;
if (mEnabled && mSurfaceExist) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* @param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(Mat frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame;
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas = getHolder().lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(mCacheBitmap, (canvas.getWidth() - mCacheBitmap.getWidth()) / 2, (canvas.getHeight() - mCacheBitmap.getHeight()) / 2, null);
getHolder().unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* @param width - the width of this SurfaceView
* @param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* @param supportedSizes
* @param surfaceWidth
* @param surfaceHeight
* @return
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}

View File

@@ -0,0 +1,242 @@
package org.opencv.android;
import java.io.IOException;
import java.util.List;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
/**
* This class is an implementation of the Bridge View between OpenCv and JAVA Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private Mat mBaseMat;
private byte mBuffer[];
private Thread mThread;
private boolean mStopThread;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
private Camera mCamera;
public JavaCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
Log.d(TAG, "Java camera view ctor");
}
@TargetApi(11)
protected boolean initializeCamera(int width, int height) {
Log.d(TAG, "Initialize java camera");
synchronized (this) {
mCamera = null;
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.d(TAG, "getSupportedPreviewSizes()");
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
params.setPreviewFormat(ImageFormat.NV21);
params.setPreviewSize((int)frameSize.width, (int)frameSize.height);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mBaseMat = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
AllocateCache();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
SurfaceTexture tex = new SurfaceTexture(MAGIC_TEXTURE_ID);
getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewTexture(tex);
} else
mCamera.setPreviewDisplay(null);
} catch (IOException e) {
e.printStackTrace();
}
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
}
return true;
}
protected void releaseCamera() {
synchronized (this) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(getWidth(), getHeight()))
return false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Wating for thread");
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
}
public void onPreviewFrame(byte[] frame, Camera arg1) {
Log.i(TAG, "Preview Frame received. Need to create MAT and deliver it to clients");
Log.i(TAG, "Frame size is " + frame.length);
synchronized (this)
{
mBaseMat.put(0, 0, frame);
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class CameraWorker implements Runnable {
public void run() {
do {
synchronized (JavaCameraView.this) {
try {
JavaCameraView.this.wait();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (!mStopThread) {
Mat frameMat = new Mat();
switch (mPreviewFormat) {
case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
Imgproc.cvtColor(mBaseMat, frameMat, Imgproc.COLOR_YUV2RGBA_NV21, 4);
break;
case Highgui.CV_CAP_ANDROID_GREY_FRAME:
frameMat = mBaseMat.submat(0, mFrameHeight, 0, mFrameWidth);
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
deliverAndDrawFrame(frameMat);
frameMat.release();
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}

View File

@@ -0,0 +1,145 @@
package org.opencv.android;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
/**
* This class is an implementation of a bridge between SurfaceView and native OpenCV camera.
* Due to the big amount of work done, by the base class this child is only responsible
* for creating camera, destroying camera and delivering frames while camera is enabled
*/
public class NativeCameraView extends CameraBridgeViewBase {
public static final String TAG = "NativeCameraView";
private boolean mStopThread;
private Thread mThread;
private VideoCapture mCamera;
public NativeCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
if (!initializeCamera(getWidth(), getHeight()))
return false;
/* now we can start update thread */
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
@Override
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
try {
mStopThread = true;
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
mStopThread = false;
}
/* Now release camera */
releaseCamera();
}
public static class OpenCvSizeAccessor implements ListItemAccessor {
public int getWidth(Object obj) {
Size size = (Size)obj;
return (int)size.width;
}
public int getHeight(Object obj) {
Size size = (Size)obj;
return (int)size.height;
}
}
private boolean initializeCamera(int width, int height) {
synchronized (this) {
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera == null)
return false;
//TODO: improve error handling
java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new OpenCvSizeAccessor(), width, height);
mFrameWidth = (int)frameSize.width;
mFrameHeight = (int)frameSize.height;
AllocateCache();
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
}
Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")");
return true;
}
private void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.release();
}
}
}
private class CameraWorker implements Runnable {
private Mat mRgba = new Mat();
private Mat mGray = new Mat();
public void run() {
do {
if (!mCamera.grab()) {
Log.e(TAG, "Camera frame grab failed");
break;
}
switch (mPreviewFormat) {
case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
{
mCamera.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
deliverAndDrawFrame(mRgba);
} break;
case Highgui.CV_CAP_ANDROID_GREY_FRAME:
mCamera.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
deliverAndDrawFrame(mGray);
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
}
} while (!mStopThread);
}
}
}

View File

@@ -12,6 +12,11 @@ public class OpenCVLoader
*/
public static final String OPENCV_VERSION_2_4_2 = "2.4.2";
/**
* OpenCV Library version 2.4.3.
*/
public static final String OPENCV_VERSION_2_4_3 = "2.4.3";
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @return Returns true is initialization of OpenCV was successful.