merged all the latest changes from 2.4 to trunk

This commit is contained in:
Vadim Pisarevsky
2012-04-13 21:50:59 +00:00
parent 020f9a6047
commit 2fd1e2ea57
416 changed files with 12852 additions and 6070 deletions

View File

@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>Tutorial 2 Advanced - 2. Mix Java+Native OpenCV</name>
<name>Tutorial 4 (Advanced) - Mix Java+Native OpenCV</name>
<comment></comment>
<projects>
</projects>

View File

@@ -8,19 +8,14 @@ import android.view.MenuItem;
import android.view.Window;
public class Sample4Mixed extends Activity {
private static final String TAG = "Sample::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_FEATURES = 5;
private static final String TAG = "Sample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewFeatures;
private Sample4View mView;
public static int viewMode = VIEW_MODE_RGBA;
public Sample4Mixed() {
Log.i(TAG, "Instantiated new " + this.getClass());
@@ -32,7 +27,8 @@ public class Sample4Mixed extends Activity {
super.onCreate(savedInstanceState);
Log.i(TAG, "onCreate");
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample4View(this));
mView = new Sample4View(this);
setContentView(mView);
}
public boolean onCreateOptionsMenu(Menu menu) {
@@ -46,14 +42,15 @@ public class Sample4Mixed extends Activity {
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA)
viewMode = VIEW_MODE_RGBA;
else if (item == mItemPreviewGray)
viewMode = VIEW_MODE_GRAY;
else if (item == mItemPreviewCanny)
viewMode = VIEW_MODE_CANNY;
else if (item == mItemPreviewFeatures)
viewMode = VIEW_MODE_FEATURES;
if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample4View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) {
mView.setViewMode(Sample4View.VIEW_MODE_GRAY);
} else if (item == mItemPreviewCanny) {
mView.setViewMode(Sample4View.VIEW_MODE_CANNY);
} else if (item == mItemPreviewFeatures) {
mView.setViewMode(Sample4View.VIEW_MODE_FEATURES);
}
return true;
}
}

View File

@@ -11,87 +11,106 @@ import android.util.Log;
import android.view.SurfaceHolder;
class Sample4View extends SampleViewBase {
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_FEATURES = 5;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private int mViewMode;
private Bitmap mBitmap;
public Sample4View(Context context) {
super(context);
}
@Override
protected void onPreviewStared(int previewWidtd, int previewHeight) {
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
@Override
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
super.surfaceChanged(_holder, format, width, height);
mRgba = new Mat();
mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
}
synchronized (this) {
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
@Override
protected void onPreviewStopped() {
if (mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
if (mRgba != null)
mRgba.release();
if (mGraySubmat != null)
mGraySubmat.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mYuv = null;
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
}
mRgba = new Mat();
mIntermediateMat = new Mat();
}
}
@Override
protected Bitmap processFrame(byte[] data) {
mYuv.put(0, 0, data);
switch (Sample4Mixed.viewMode) {
case Sample4Mixed.VIEW_MODE_GRAY:
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_GRAY:
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case Sample4Mixed.VIEW_MODE_RGBA:
case VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
break;
case Sample4Mixed.VIEW_MODE_CANNY:
case VIEW_MODE_CANNY:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_FEATURES:
case VIEW_MODE_FEATURES:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}
Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
Bitmap bmp = mBitmap;
try {
Utils.matToBitmap(mRgba, bmp);
return bmp;
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
bmp = null;
}
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
if (mRgba != null)
mRgba.release();
if (mGraySubmat != null)
mGraySubmat.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mYuv = null;
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
}
return bmp;
}
public native void FindFeatures(long matAddrGr, long matAddrRgba);
static {
System.loadLibrary("opencv_java");
System.loadLibrary("mixed_sample");
}
public void setViewMode(int viewMode) {
mViewMode = viewMode;
}
}

View File

@@ -6,6 +6,7 @@ import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
@@ -23,6 +24,8 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private byte[] mBuffer;
public SampleViewBase(Context context) {
super(context);
@@ -56,7 +59,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
@@ -67,12 +70,34 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be coppied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
/* Now we can start a preview */
mCamera.startPreview();
}
}
@@ -80,14 +105,17 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "surfaceCreated");
mCamera = Camera.open();
mCamera.setPreviewCallback(new PreviewCallback() {
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
mFrame = data;
SampleViewBase.this.notify();
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
(new Thread(this)).start();
}
@@ -102,10 +130,27 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
mCamera = null;
}
}
onPreviewStopped();
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
/**
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/
protected abstract void onPreviewStared(int previewWidtd, int previewHeight);
/**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resourcses used during the preview can be released.
*/
protected abstract void onPreviewStopped();
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting processing thread");
@@ -127,12 +172,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
}
static {
System.loadLibrary("opencv_java");
}
}