merged all the latest changes from 2.4 to trunk
This commit is contained in:
@@ -4,12 +4,11 @@ import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.android.Utils;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfRect;
|
||||
import org.opencv.core.Rect;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.core.Size;
|
||||
@@ -81,11 +80,11 @@ class FdView extends SampleCvViewBase {
|
||||
if (mCascade != null) {
|
||||
int height = mGray.rows();
|
||||
int faceSize = Math.round(height * FdActivity.minFaceSize);
|
||||
List<Rect> faces = new LinkedList<Rect>();
|
||||
MatOfRect faces = new MatOfRect();
|
||||
mCascade.detectMultiScale(mGray, faces, 1.1, 2, 2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
|
||||
, new Size(faceSize, faceSize), new Size());
|
||||
|
||||
for (Rect r : faces)
|
||||
for (Rect r : faces.toArray())
|
||||
Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
|
||||
}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>Tutorial 1 Basic - 0. Android Camera</name>
|
||||
<name>Tutorial 0 (Basic) - Android Camera</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
|
@@ -10,13 +10,10 @@ import android.view.Window;
|
||||
public class Sample0Base extends Activity {
|
||||
private static final String TAG = "Sample::Activity";
|
||||
|
||||
public static final int VIEW_MODE_RGBA = 0;
|
||||
public static final int VIEW_MODE_GRAY = 1;
|
||||
|
||||
private MenuItem mItemPreviewRGBA;
|
||||
private MenuItem mItemPreviewGray;
|
||||
private Sample0View mView;
|
||||
|
||||
public static int viewMode = VIEW_MODE_RGBA;
|
||||
|
||||
public Sample0Base() {
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
@@ -28,7 +25,8 @@ public class Sample0Base extends Activity {
|
||||
Log.i(TAG, "onCreate");
|
||||
super.onCreate(savedInstanceState);
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
setContentView(new Sample0View(this));
|
||||
mView = new Sample0View(this);
|
||||
setContentView(mView);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -43,9 +41,9 @@ public class Sample0Base extends Activity {
|
||||
public boolean onOptionsItemSelected(MenuItem item) {
|
||||
Log.i(TAG, "Menu Item selected " + item);
|
||||
if (item == mItemPreviewRGBA)
|
||||
viewMode = VIEW_MODE_RGBA;
|
||||
mView.setViewMode(Sample0View.VIEW_MODE_RGBA);
|
||||
else if (item == mItemPreviewGray)
|
||||
viewMode = VIEW_MODE_GRAY;
|
||||
mView.setViewMode(Sample0View.VIEW_MODE_GRAY);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@@ -2,34 +2,52 @@ package org.opencv.samples.tutorial0;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.util.Log;
|
||||
|
||||
class Sample0View extends SampleViewBase {
|
||||
|
||||
private static final String TAG = "Sample0View";
|
||||
int mSize;
|
||||
int[] mRGBA;
|
||||
private Bitmap mBitmap;
|
||||
private int mViewMode;
|
||||
|
||||
public static final int VIEW_MODE_RGBA = 0;
|
||||
public static final int VIEW_MODE_GRAY = 1;
|
||||
|
||||
|
||||
public Sample0View(Context context) {
|
||||
super(context);
|
||||
mSize = 0;
|
||||
mViewMode = VIEW_MODE_RGBA;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(byte[] data) {
|
||||
int frameSize = getFrameWidth() * getFrameHeight();
|
||||
int[] rgba = new int[frameSize];
|
||||
|
||||
int[] rgba = mRGBA;
|
||||
|
||||
int view_mode = Sample0Base.viewMode;
|
||||
if (view_mode == Sample0Base.VIEW_MODE_GRAY) {
|
||||
final int view_mode = mViewMode;
|
||||
if (view_mode == VIEW_MODE_GRAY) {
|
||||
for (int i = 0; i < frameSize; i++) {
|
||||
int y = (0xff & ((int) data[i]));
|
||||
rgba[i] = 0xff000000 + (y << 16) + (y << 8) + y;
|
||||
}
|
||||
} else if (view_mode == Sample0Base.VIEW_MODE_RGBA) {
|
||||
} else if (view_mode == VIEW_MODE_RGBA) {
|
||||
for (int i = 0; i < getFrameHeight(); i++)
|
||||
for (int j = 0; j < getFrameWidth(); j++) {
|
||||
int y = (0xff & ((int) data[i * getFrameWidth() + j]));
|
||||
int u = (0xff & ((int) data[frameSize + (i >> 1) * getFrameWidth() + (j & ~1) + 0]));
|
||||
int v = (0xff & ((int) data[frameSize + (i >> 1) * getFrameWidth() + (j & ~1) + 1]));
|
||||
int index = i * getFrameWidth() + j;
|
||||
int supply_index = frameSize + (i >> 1) * getFrameWidth() + (j & ~1);
|
||||
int y = (0xff & ((int) data[index]));
|
||||
int u = (0xff & ((int) data[supply_index + 0]));
|
||||
int v = (0xff & ((int) data[supply_index + 1]));
|
||||
y = y < 16 ? 16 : y;
|
||||
|
||||
int r = Math.round(1.164f * (y - 16) + 1.596f * (v - 128));
|
||||
int g = Math.round(1.164f * (y - 16) - 0.813f * (v - 128) - 0.391f * (u - 128));
|
||||
int b = Math.round(1.164f * (y - 16) + 2.018f * (u - 128));
|
||||
|
||||
float y_conv = 1.164f * (y - 16);
|
||||
int r = Math.round(y_conv + 1.596f * (v - 128));
|
||||
int g = Math.round(y_conv - 0.813f * (v - 128) - 0.391f * (u - 128));
|
||||
int b = Math.round(y_conv + 2.018f * (u - 128));
|
||||
|
||||
r = r < 0 ? 0 : (r > 255 ? 255 : r);
|
||||
g = g < 0 ? 0 : (g > 255 ? 255 : g);
|
||||
@@ -38,9 +56,26 @@ class Sample0View extends SampleViewBase {
|
||||
rgba[i * getFrameWidth() + j] = 0xff000000 + (b << 16) + (g << 8) + r;
|
||||
}
|
||||
}
|
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
|
||||
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
|
||||
return bmp;
|
||||
|
||||
mBitmap.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
|
||||
return mBitmap;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPreviewStared(int previewWidth, int previewHeight) {
|
||||
/* Create a bitmap that will be used through to calculate the image to */
|
||||
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
|
||||
mRGBA = new int[previewWidth * previewHeight];
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPreviewStopped() {
|
||||
mBitmap.recycle();
|
||||
mBitmap = null;
|
||||
mRGBA = null;
|
||||
}
|
||||
|
||||
public void setViewMode(int viewMode) {
|
||||
mViewMode = viewMode;
|
||||
}
|
||||
}
|
@@ -6,6 +6,7 @@ import java.util.List;
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.Camera;
|
||||
import android.hardware.Camera.PreviewCallback;
|
||||
@@ -23,6 +24,8 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
private int mFrameHeight;
|
||||
private byte[] mFrame;
|
||||
private boolean mThreadRun;
|
||||
private byte[] mBuffer;
|
||||
|
||||
|
||||
public SampleViewBase(Context context) {
|
||||
super(context);
|
||||
@@ -43,9 +46,10 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB)
|
||||
mCamera.setPreviewTexture( new SurfaceTexture(10) );
|
||||
else
|
||||
mCamera.setPreviewDisplay(null);
|
||||
}
|
||||
|
||||
mCamera.setPreviewDisplay(null);
|
||||
}
|
||||
|
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
if (mCamera != null) {
|
||||
@@ -56,7 +60,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
|
||||
// selecting optimal camera preview size
|
||||
{
|
||||
double minDiff = Double.MAX_VALUE;
|
||||
int minDiff = Integer.MAX_VALUE;
|
||||
for (Camera.Size size : sizes) {
|
||||
if (Math.abs(size.height - height) < minDiff) {
|
||||
mFrameWidth = size.width;
|
||||
@@ -67,12 +71,34 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
}
|
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight());
|
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes();
|
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
|
||||
{
|
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
|
||||
mCamera.setParameters(params);
|
||||
try {
|
||||
setPreview();
|
||||
|
||||
/* Now allocate the buffer */
|
||||
params = mCamera.getParameters();
|
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height;
|
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
|
||||
mBuffer = new byte[size];
|
||||
/* The buffer where the current frame will be coppied */
|
||||
mFrame = new byte [size];
|
||||
mCamera.addCallbackBuffer(mBuffer);
|
||||
|
||||
try {
|
||||
setPreview();
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
|
||||
}
|
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */
|
||||
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
|
||||
|
||||
/* Now we can start a preview */
|
||||
mCamera.startPreview();
|
||||
}
|
||||
}
|
||||
@@ -80,14 +106,17 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = Camera.open();
|
||||
mCamera.setPreviewCallback(new PreviewCallback() {
|
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
|
||||
public void onPreviewFrame(byte[] data, Camera camera) {
|
||||
synchronized (SampleViewBase.this) {
|
||||
mFrame = data;
|
||||
SampleViewBase.this.notify();
|
||||
System.arraycopy(data, 0, mFrame, 0, data.length);
|
||||
SampleViewBase.this.notify();
|
||||
}
|
||||
camera.addCallbackBuffer(mBuffer);
|
||||
}
|
||||
});
|
||||
|
||||
(new Thread(this)).start();
|
||||
}
|
||||
|
||||
@@ -102,10 +131,27 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
mCamera = null;
|
||||
}
|
||||
}
|
||||
onPreviewStopped();
|
||||
}
|
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
|
||||
protected abstract Bitmap processFrame(byte[] data);
|
||||
|
||||
/**
|
||||
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called
|
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
|
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
|
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
|
||||
*/
|
||||
protected abstract void onPreviewStared(int previewWidtd, int previewHeight);
|
||||
|
||||
/**
|
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
|
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
|
||||
* Any other resourcses used during the preview can be released.
|
||||
*/
|
||||
protected abstract void onPreviewStopped();
|
||||
|
||||
public void run() {
|
||||
mThreadRun = true;
|
||||
Log.i(TAG, "Starting processing thread");
|
||||
@@ -127,7 +173,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
|
||||
mHolder.unlockCanvasAndPost(canvas);
|
||||
}
|
||||
bmp.recycle();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>Tutorial 1 Basic - 1. Add OpenCV</name>
|
||||
<name>Tutorial 1 (Basic) - Add OpenCV</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
|
@@ -8,17 +8,12 @@ import android.view.MenuItem;
|
||||
import android.view.Window;
|
||||
|
||||
public class Sample1Java extends Activity {
|
||||
private static final String TAG = "Sample::Activity";
|
||||
|
||||
public static final int VIEW_MODE_RGBA = 0;
|
||||
public static final int VIEW_MODE_GRAY = 1;
|
||||
public static final int VIEW_MODE_CANNY = 2;
|
||||
private static final String TAG = "Sample::Activity";
|
||||
|
||||
private MenuItem mItemPreviewRGBA;
|
||||
private MenuItem mItemPreviewGray;
|
||||
private MenuItem mItemPreviewCanny;
|
||||
|
||||
public static int viewMode = VIEW_MODE_RGBA;
|
||||
private Sample1View mView;
|
||||
|
||||
public Sample1Java() {
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
@@ -30,7 +25,8 @@ public class Sample1Java extends Activity {
|
||||
Log.i(TAG, "onCreate");
|
||||
super.onCreate(savedInstanceState);
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
setContentView(new Sample1View(this));
|
||||
mView = new Sample1View(this);
|
||||
setContentView(mView);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -45,12 +41,13 @@ public class Sample1Java extends Activity {
|
||||
@Override
|
||||
public boolean onOptionsItemSelected(MenuItem item) {
|
||||
Log.i(TAG, "Menu Item selected " + item);
|
||||
if (item == mItemPreviewRGBA)
|
||||
viewMode = VIEW_MODE_RGBA;
|
||||
else if (item == mItemPreviewGray)
|
||||
viewMode = VIEW_MODE_GRAY;
|
||||
else if (item == mItemPreviewCanny)
|
||||
viewMode = VIEW_MODE_CANNY;
|
||||
if (item == mItemPreviewRGBA) {
|
||||
mView.setViewMode(Sample1View.VIEW_MODE_RGBA);
|
||||
} else if (item == mItemPreviewGray) {
|
||||
mView.setViewMode(Sample1View.VIEW_MODE_GRAY);
|
||||
} else if (item == mItemPreviewCanny) {
|
||||
mView.setViewMode(Sample1View.VIEW_MODE_CANNY);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@@ -14,64 +14,44 @@ import android.util.Log;
|
||||
import android.view.SurfaceHolder;
|
||||
|
||||
class Sample1View extends SampleViewBase {
|
||||
|
||||
public static final int VIEW_MODE_RGBA = 0;
|
||||
public static final int VIEW_MODE_GRAY = 1;
|
||||
public static final int VIEW_MODE_CANNY = 2;
|
||||
|
||||
private Mat mYuv;
|
||||
private Mat mRgba;
|
||||
private Mat mGraySubmat;
|
||||
private Mat mIntermediateMat;
|
||||
private Bitmap mBitmap;
|
||||
private int mViewMode;
|
||||
|
||||
public Sample1View(Context context) {
|
||||
super(context);
|
||||
mViewMode = VIEW_MODE_RGBA;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
|
||||
super.surfaceChanged(_holder, format, width, height);
|
||||
@Override
|
||||
protected void onPreviewStared(int previewWidth, int previewHeight) {
|
||||
synchronized (this) {
|
||||
// initialize Mats before usage
|
||||
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
|
||||
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
|
||||
|
||||
synchronized (this) {
|
||||
// initialize Mats before usage
|
||||
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
|
||||
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
|
||||
mRgba = new Mat();
|
||||
mIntermediateMat = new Mat();
|
||||
|
||||
mRgba = new Mat();
|
||||
mIntermediateMat = new Mat();
|
||||
}
|
||||
}
|
||||
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(byte[] data) {
|
||||
mYuv.put(0, 0, data);
|
||||
@Override
|
||||
protected void onPreviewStopped() {
|
||||
if(mBitmap != null) {
|
||||
mBitmap.recycle();
|
||||
}
|
||||
|
||||
switch (Sample1Java.viewMode) {
|
||||
case Sample1Java.VIEW_MODE_GRAY:
|
||||
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
|
||||
break;
|
||||
case Sample1Java.VIEW_MODE_RGBA:
|
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
|
||||
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
|
||||
break;
|
||||
case Sample1Java.VIEW_MODE_CANNY:
|
||||
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
|
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
}
|
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
|
||||
|
||||
try {
|
||||
Utils.matToBitmap(mRgba, bmp);
|
||||
return bmp;
|
||||
} catch(Exception e) {
|
||||
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
|
||||
bmp.recycle();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
super.run();
|
||||
|
||||
synchronized (this) {
|
||||
synchronized (this) {
|
||||
// Explicitly deallocate Mats
|
||||
if (mYuv != null)
|
||||
mYuv.release();
|
||||
@@ -88,4 +68,41 @@ class Sample1View extends SampleViewBase {
|
||||
mIntermediateMat = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(byte[] data) {
|
||||
mYuv.put(0, 0, data);
|
||||
|
||||
final int viewMode = mViewMode;
|
||||
|
||||
switch (viewMode) {
|
||||
case VIEW_MODE_GRAY:
|
||||
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
|
||||
break;
|
||||
case VIEW_MODE_RGBA:
|
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
|
||||
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
|
||||
break;
|
||||
case VIEW_MODE_CANNY:
|
||||
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
|
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
}
|
||||
|
||||
Bitmap bmp = mBitmap;
|
||||
|
||||
try {
|
||||
Utils.matToBitmap(mRgba, bmp);
|
||||
} catch(Exception e) {
|
||||
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
|
||||
bmp.recycle();
|
||||
bmp = null;
|
||||
}
|
||||
return bmp;
|
||||
}
|
||||
|
||||
public void setViewMode(int viewMode) {
|
||||
mViewMode = viewMode;
|
||||
}
|
||||
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@ import java.util.List;
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.Camera;
|
||||
import android.hardware.Camera.PreviewCallback;
|
||||
@@ -23,6 +24,8 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
private int mFrameHeight;
|
||||
private byte[] mFrame;
|
||||
private boolean mThreadRun;
|
||||
private byte[] mBuffer;
|
||||
|
||||
|
||||
public SampleViewBase(Context context) {
|
||||
super(context);
|
||||
@@ -49,6 +52,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
if (mCamera != null) {
|
||||
|
||||
Camera.Parameters params = mCamera.getParameters();
|
||||
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
|
||||
mFrameWidth = width;
|
||||
@@ -56,7 +60,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
|
||||
// selecting optimal camera preview size
|
||||
{
|
||||
double minDiff = Double.MAX_VALUE;
|
||||
int minDiff = Integer.MAX_VALUE;
|
||||
for (Camera.Size size : sizes) {
|
||||
if (Math.abs(size.height - height) < minDiff) {
|
||||
mFrameWidth = size.width;
|
||||
@@ -67,12 +71,34 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
}
|
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight());
|
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes();
|
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
|
||||
{
|
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
|
||||
mCamera.setParameters(params);
|
||||
try {
|
||||
setPreview();
|
||||
|
||||
/* Now allocate the buffer */
|
||||
params = mCamera.getParameters();
|
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height;
|
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
|
||||
mBuffer = new byte[size];
|
||||
/* The buffer where the current frame will be coppied */
|
||||
mFrame = new byte [size];
|
||||
mCamera.addCallbackBuffer(mBuffer);
|
||||
|
||||
try {
|
||||
setPreview();
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
|
||||
}
|
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */
|
||||
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
|
||||
|
||||
/* Now we can start a preview */
|
||||
mCamera.startPreview();
|
||||
}
|
||||
}
|
||||
@@ -80,14 +106,17 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = Camera.open();
|
||||
mCamera.setPreviewCallback(new PreviewCallback() {
|
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
|
||||
public void onPreviewFrame(byte[] data, Camera camera) {
|
||||
synchronized (SampleViewBase.this) {
|
||||
mFrame = data;
|
||||
SampleViewBase.this.notify();
|
||||
System.arraycopy(data, 0, mFrame, 0, data.length);
|
||||
SampleViewBase.this.notify();
|
||||
}
|
||||
camera.addCallbackBuffer(mBuffer);
|
||||
}
|
||||
});
|
||||
|
||||
(new Thread(this)).start();
|
||||
}
|
||||
|
||||
@@ -102,10 +131,27 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
mCamera = null;
|
||||
}
|
||||
}
|
||||
onPreviewStopped();
|
||||
}
|
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
|
||||
protected abstract Bitmap processFrame(byte[] data);
|
||||
|
||||
/**
|
||||
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called
|
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
|
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
|
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
|
||||
*/
|
||||
protected abstract void onPreviewStared(int previewWidtd, int previewHeight);
|
||||
|
||||
/**
|
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
|
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
|
||||
* Any other resourcses used during the preview can be released.
|
||||
*/
|
||||
protected abstract void onPreviewStopped();
|
||||
|
||||
public void run() {
|
||||
mThreadRun = true;
|
||||
Log.i(TAG, "Starting processing thread");
|
||||
@@ -127,7 +173,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
|
||||
mHolder.unlockCanvasAndPost(canvas);
|
||||
}
|
||||
bmp.recycle();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>Tutorial 1 Basic - 2. Use OpenCV Camera</name>
|
||||
<name>Tutorial 2 (Basic) - Use OpenCV Camera</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
|
@@ -1,13 +1,17 @@
|
||||
package org.opencv.samples.tutorial2;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opencv.android.Utils;
|
||||
import org.opencv.core.Core;
|
||||
import org.opencv.core.Mat;
|
||||
import org.opencv.core.MatOfPoint;
|
||||
import org.opencv.core.Point;
|
||||
import org.opencv.core.Scalar;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
import org.opencv.highgui.Highgui;
|
||||
import org.opencv.highgui.VideoCapture;
|
||||
import org.opencv.imgproc.Imgproc;
|
||||
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
@@ -18,6 +22,10 @@ class Sample2View extends SampleCvViewBase {
|
||||
private Mat mRgba;
|
||||
private Mat mGray;
|
||||
private Mat mIntermediateMat;
|
||||
private Mat mIntermediateMat2;
|
||||
private Mat mEmpty;
|
||||
private Scalar lo, hi;
|
||||
private Scalar bl, wh;
|
||||
|
||||
public Sample2View(Context context) {
|
||||
super(context);
|
||||
@@ -32,11 +40,18 @@ class Sample2View extends SampleCvViewBase {
|
||||
mGray = new Mat();
|
||||
mRgba = new Mat();
|
||||
mIntermediateMat = new Mat();
|
||||
mIntermediateMat2 = new Mat();
|
||||
mEmpty = new Mat();
|
||||
lo = new Scalar(85, 100, 30);
|
||||
hi = new Scalar(130, 255, 255);
|
||||
bl = new Scalar(0, 0, 0, 255);
|
||||
wh = new Scalar(255, 255, 255, 255);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(VideoCapture capture) {
|
||||
/**/
|
||||
switch (Sample2NativeCamera.viewMode) {
|
||||
case Sample2NativeCamera.VIEW_MODE_GRAY:
|
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
@@ -44,14 +59,39 @@ class Sample2View extends SampleCvViewBase {
|
||||
break;
|
||||
case Sample2NativeCamera.VIEW_MODE_RGBA:
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
|
||||
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new Scalar(255, 0, 0, 255), 3);
|
||||
break;
|
||||
case Sample2NativeCamera.VIEW_MODE_CANNY:
|
||||
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
/*capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
|
||||
Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
|
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
*/
|
||||
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
|
||||
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
|
||||
Core.inRange(mIntermediateMat, lo, hi, mIntermediateMat2); // green
|
||||
Imgproc.dilate(mIntermediateMat2, mIntermediateMat2, mEmpty);
|
||||
//
|
||||
List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
|
||||
Mat hierarchy = new Mat();
|
||||
Imgproc.findContours(mIntermediateMat2, contours, hierarchy,Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||
Log.d("processFrame", "contours.size()" + contours.size());
|
||||
double maxArea = 0;
|
||||
int indexMaxArea = -1;
|
||||
for (int i = 0; i < contours.size(); i++) {
|
||||
double s = Imgproc.contourArea(contours.get(i));
|
||||
if(s > maxArea){
|
||||
indexMaxArea = i;
|
||||
maxArea = s;
|
||||
}
|
||||
}
|
||||
|
||||
mRgba.setTo(bl);
|
||||
Imgproc.drawContours(mRgba, contours, indexMaxArea, wh);
|
||||
//
|
||||
//Imgproc.cvtColor(mIntermediateMat2, mRgba, Imgproc.COLOR_GRAY2RGBA);
|
||||
break;
|
||||
}
|
||||
/**/
|
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
|
||||
|
||||
@@ -78,6 +118,9 @@ class Sample2View extends SampleCvViewBase {
|
||||
if (mIntermediateMat != null)
|
||||
mIntermediateMat.release();
|
||||
|
||||
if (mIntermediateMat2 != null)
|
||||
mIntermediateMat2.release();
|
||||
|
||||
mRgba = null;
|
||||
mGray = null;
|
||||
mIntermediateMat = null;
|
||||
|
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>Tutorial 2 Advanced - 1. Add Native OpenCV</name>
|
||||
<name>Tutorial 3 (Advanced) - Add Native OpenCV</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
|
@@ -4,19 +4,40 @@ import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
|
||||
class Sample3View extends SampleViewBase {
|
||||
|
||||
private int mFrameSize;
|
||||
private Bitmap mBitmap;
|
||||
private int[] mRGBA;
|
||||
|
||||
public Sample3View(Context context) {
|
||||
super(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPreviewStared(int previewWidtd, int previewHeight) {
|
||||
mFrameSize = previewWidtd * previewHeight;
|
||||
mRGBA = new int[mFrameSize];
|
||||
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPreviewStopped() {
|
||||
if(mBitmap != null) {
|
||||
mBitmap.recycle();
|
||||
mBitmap = null;
|
||||
}
|
||||
mRGBA = null;
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(byte[] data) {
|
||||
int frameSize = getFrameWidth() * getFrameHeight();
|
||||
int[] rgba = new int[frameSize];
|
||||
int[] rgba = mRGBA;
|
||||
|
||||
FindFeatures(getFrameWidth(), getFrameHeight(), data, rgba);
|
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
|
||||
Bitmap bmp = mBitmap;
|
||||
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
|
||||
return bmp;
|
||||
}
|
||||
@@ -24,10 +45,6 @@ class Sample3View extends SampleViewBase {
|
||||
public native void FindFeatures(int width, int height, byte yuv[], int[] rgba);
|
||||
|
||||
static {
|
||||
try {
|
||||
System.loadLibrary("opencv_java");
|
||||
} catch(Exception e) {
|
||||
}
|
||||
System.loadLibrary("native_sample");
|
||||
}
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@ import java.util.List;
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.Camera;
|
||||
import android.hardware.Camera.PreviewCallback;
|
||||
@@ -23,6 +24,8 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
private int mFrameHeight;
|
||||
private byte[] mFrame;
|
||||
private boolean mThreadRun;
|
||||
private byte[] mBuffer;
|
||||
|
||||
|
||||
public SampleViewBase(Context context) {
|
||||
super(context);
|
||||
@@ -45,7 +48,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
else
|
||||
mCamera.setPreviewDisplay(null);
|
||||
}
|
||||
|
||||
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
if (mCamera != null) {
|
||||
@@ -56,7 +59,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
|
||||
// selecting optimal camera preview size
|
||||
{
|
||||
double minDiff = Double.MAX_VALUE;
|
||||
int minDiff = Integer.MAX_VALUE;
|
||||
for (Camera.Size size : sizes) {
|
||||
if (Math.abs(size.height - height) < minDiff) {
|
||||
mFrameWidth = size.width;
|
||||
@@ -67,12 +70,34 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
}
|
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight());
|
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes();
|
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
|
||||
{
|
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
|
||||
mCamera.setParameters(params);
|
||||
try {
|
||||
setPreview();
|
||||
|
||||
/* Now allocate the buffer */
|
||||
params = mCamera.getParameters();
|
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height;
|
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
|
||||
mBuffer = new byte[size];
|
||||
/* The buffer where the current frame will be coppied */
|
||||
mFrame = new byte [size];
|
||||
mCamera.addCallbackBuffer(mBuffer);
|
||||
|
||||
try {
|
||||
setPreview();
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
|
||||
}
|
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */
|
||||
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
|
||||
|
||||
/* Now we can start a preview */
|
||||
mCamera.startPreview();
|
||||
}
|
||||
}
|
||||
@@ -80,14 +105,17 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = Camera.open();
|
||||
mCamera.setPreviewCallback(new PreviewCallback() {
|
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
|
||||
public void onPreviewFrame(byte[] data, Camera camera) {
|
||||
synchronized (SampleViewBase.this) {
|
||||
mFrame = data;
|
||||
SampleViewBase.this.notify();
|
||||
System.arraycopy(data, 0, mFrame, 0, data.length);
|
||||
SampleViewBase.this.notify();
|
||||
}
|
||||
camera.addCallbackBuffer(mBuffer);
|
||||
}
|
||||
});
|
||||
|
||||
(new Thread(this)).start();
|
||||
}
|
||||
|
||||
@@ -102,10 +130,27 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
mCamera = null;
|
||||
}
|
||||
}
|
||||
onPreviewStopped();
|
||||
}
|
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
|
||||
protected abstract Bitmap processFrame(byte[] data);
|
||||
|
||||
/**
|
||||
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called
|
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
|
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
|
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
|
||||
*/
|
||||
protected abstract void onPreviewStared(int previewWidtd, int previewHeight);
|
||||
|
||||
/**
|
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
|
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
|
||||
* Any other resourcses used during the preview can be released.
|
||||
*/
|
||||
protected abstract void onPreviewStopped();
|
||||
|
||||
public void run() {
|
||||
mThreadRun = true;
|
||||
Log.i(TAG, "Starting processing thread");
|
||||
@@ -127,7 +172,6 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
|
||||
mHolder.unlockCanvasAndPost(canvas);
|
||||
}
|
||||
bmp.recycle();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>Tutorial 2 Advanced - 2. Mix Java+Native OpenCV</name>
|
||||
<name>Tutorial 4 (Advanced) - Mix Java+Native OpenCV</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
|
@@ -8,19 +8,14 @@ import android.view.MenuItem;
|
||||
import android.view.Window;
|
||||
|
||||
public class Sample4Mixed extends Activity {
|
||||
private static final String TAG = "Sample::Activity";
|
||||
|
||||
public static final int VIEW_MODE_RGBA = 0;
|
||||
public static final int VIEW_MODE_GRAY = 1;
|
||||
public static final int VIEW_MODE_CANNY = 2;
|
||||
public static final int VIEW_MODE_FEATURES = 5;
|
||||
private static final String TAG = "Sample::Activity";
|
||||
|
||||
private MenuItem mItemPreviewRGBA;
|
||||
private MenuItem mItemPreviewGray;
|
||||
private MenuItem mItemPreviewCanny;
|
||||
private MenuItem mItemPreviewFeatures;
|
||||
private Sample4View mView;
|
||||
|
||||
public static int viewMode = VIEW_MODE_RGBA;
|
||||
|
||||
public Sample4Mixed() {
|
||||
Log.i(TAG, "Instantiated new " + this.getClass());
|
||||
@@ -32,7 +27,8 @@ public class Sample4Mixed extends Activity {
|
||||
super.onCreate(savedInstanceState);
|
||||
Log.i(TAG, "onCreate");
|
||||
requestWindowFeature(Window.FEATURE_NO_TITLE);
|
||||
setContentView(new Sample4View(this));
|
||||
mView = new Sample4View(this);
|
||||
setContentView(mView);
|
||||
}
|
||||
|
||||
public boolean onCreateOptionsMenu(Menu menu) {
|
||||
@@ -46,14 +42,15 @@ public class Sample4Mixed extends Activity {
|
||||
|
||||
public boolean onOptionsItemSelected(MenuItem item) {
|
||||
Log.i(TAG, "Menu Item selected " + item);
|
||||
if (item == mItemPreviewRGBA)
|
||||
viewMode = VIEW_MODE_RGBA;
|
||||
else if (item == mItemPreviewGray)
|
||||
viewMode = VIEW_MODE_GRAY;
|
||||
else if (item == mItemPreviewCanny)
|
||||
viewMode = VIEW_MODE_CANNY;
|
||||
else if (item == mItemPreviewFeatures)
|
||||
viewMode = VIEW_MODE_FEATURES;
|
||||
if (item == mItemPreviewRGBA) {
|
||||
mView.setViewMode(Sample4View.VIEW_MODE_RGBA);
|
||||
} else if (item == mItemPreviewGray) {
|
||||
mView.setViewMode(Sample4View.VIEW_MODE_GRAY);
|
||||
} else if (item == mItemPreviewCanny) {
|
||||
mView.setViewMode(Sample4View.VIEW_MODE_CANNY);
|
||||
} else if (item == mItemPreviewFeatures) {
|
||||
mView.setViewMode(Sample4View.VIEW_MODE_FEATURES);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@@ -11,87 +11,106 @@ import android.util.Log;
|
||||
import android.view.SurfaceHolder;
|
||||
|
||||
class Sample4View extends SampleViewBase {
|
||||
|
||||
public static final int VIEW_MODE_RGBA = 0;
|
||||
public static final int VIEW_MODE_GRAY = 1;
|
||||
public static final int VIEW_MODE_CANNY = 2;
|
||||
public static final int VIEW_MODE_FEATURES = 5;
|
||||
|
||||
private Mat mYuv;
|
||||
private Mat mRgba;
|
||||
private Mat mGraySubmat;
|
||||
private Mat mIntermediateMat;
|
||||
|
||||
private int mViewMode;
|
||||
private Bitmap mBitmap;
|
||||
|
||||
public Sample4View(Context context) {
|
||||
super(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onPreviewStared(int previewWidtd, int previewHeight) {
|
||||
// initialize Mats before usage
|
||||
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
|
||||
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
|
||||
|
||||
@Override
|
||||
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
|
||||
super.surfaceChanged(_holder, format, width, height);
|
||||
mRgba = new Mat();
|
||||
mIntermediateMat = new Mat();
|
||||
|
||||
mBitmap = Bitmap.createBitmap(previewWidtd, previewHeight, Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
|
||||
synchronized (this) {
|
||||
// initialize Mats before usage
|
||||
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
|
||||
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
|
||||
@Override
|
||||
protected void onPreviewStopped() {
|
||||
|
||||
if (mBitmap != null) {
|
||||
mBitmap.recycle();
|
||||
mBitmap = null;
|
||||
}
|
||||
|
||||
// Explicitly deallocate Mats
|
||||
if (mYuv != null)
|
||||
mYuv.release();
|
||||
if (mRgba != null)
|
||||
mRgba.release();
|
||||
if (mGraySubmat != null)
|
||||
mGraySubmat.release();
|
||||
if (mIntermediateMat != null)
|
||||
mIntermediateMat.release();
|
||||
|
||||
mYuv = null;
|
||||
mRgba = null;
|
||||
mGraySubmat = null;
|
||||
mIntermediateMat = null;
|
||||
|
||||
}
|
||||
|
||||
mRgba = new Mat();
|
||||
mIntermediateMat = new Mat();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Bitmap processFrame(byte[] data) {
|
||||
mYuv.put(0, 0, data);
|
||||
|
||||
switch (Sample4Mixed.viewMode) {
|
||||
case Sample4Mixed.VIEW_MODE_GRAY:
|
||||
final int viewMode = mViewMode;
|
||||
|
||||
switch (viewMode) {
|
||||
case VIEW_MODE_GRAY:
|
||||
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
|
||||
break;
|
||||
case Sample4Mixed.VIEW_MODE_RGBA:
|
||||
case VIEW_MODE_RGBA:
|
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
|
||||
break;
|
||||
case Sample4Mixed.VIEW_MODE_CANNY:
|
||||
case VIEW_MODE_CANNY:
|
||||
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
|
||||
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
|
||||
break;
|
||||
case Sample4Mixed.VIEW_MODE_FEATURES:
|
||||
case VIEW_MODE_FEATURES:
|
||||
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
|
||||
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
|
||||
break;
|
||||
}
|
||||
|
||||
Bitmap bmp = Bitmap.createBitmap(getFrameWidth(), getFrameHeight(), Bitmap.Config.ARGB_8888);
|
||||
Bitmap bmp = mBitmap;
|
||||
|
||||
try {
|
||||
Utils.matToBitmap(mRgba, bmp);
|
||||
return bmp;
|
||||
Utils.matToBitmap(mRgba, bmp);
|
||||
} catch(Exception e) {
|
||||
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
|
||||
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
|
||||
bmp.recycle();
|
||||
return null;
|
||||
bmp = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
super.run();
|
||||
|
||||
synchronized (this) {
|
||||
// Explicitly deallocate Mats
|
||||
if (mYuv != null)
|
||||
mYuv.release();
|
||||
if (mRgba != null)
|
||||
mRgba.release();
|
||||
if (mGraySubmat != null)
|
||||
mGraySubmat.release();
|
||||
if (mIntermediateMat != null)
|
||||
mIntermediateMat.release();
|
||||
|
||||
mYuv = null;
|
||||
mRgba = null;
|
||||
mGraySubmat = null;
|
||||
mIntermediateMat = null;
|
||||
}
|
||||
return bmp;
|
||||
}
|
||||
|
||||
public native void FindFeatures(long matAddrGr, long matAddrRgba);
|
||||
|
||||
static {
|
||||
System.loadLibrary("opencv_java");
|
||||
System.loadLibrary("mixed_sample");
|
||||
}
|
||||
|
||||
public void setViewMode(int viewMode) {
|
||||
mViewMode = viewMode;
|
||||
}
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@ import java.util.List;
|
||||
import android.content.Context;
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.Canvas;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.SurfaceTexture;
|
||||
import android.hardware.Camera;
|
||||
import android.hardware.Camera.PreviewCallback;
|
||||
@@ -23,6 +24,8 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
private int mFrameHeight;
|
||||
private byte[] mFrame;
|
||||
private boolean mThreadRun;
|
||||
private byte[] mBuffer;
|
||||
|
||||
|
||||
public SampleViewBase(Context context) {
|
||||
super(context);
|
||||
@@ -56,7 +59,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
|
||||
// selecting optimal camera preview size
|
||||
{
|
||||
double minDiff = Double.MAX_VALUE;
|
||||
int minDiff = Integer.MAX_VALUE;
|
||||
for (Camera.Size size : sizes) {
|
||||
if (Math.abs(size.height - height) < minDiff) {
|
||||
mFrameWidth = size.width;
|
||||
@@ -67,12 +70,34 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
}
|
||||
|
||||
params.setPreviewSize(getFrameWidth(), getFrameHeight());
|
||||
|
||||
List<String> FocusModes = params.getSupportedFocusModes();
|
||||
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
|
||||
{
|
||||
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
|
||||
}
|
||||
|
||||
mCamera.setParameters(params);
|
||||
|
||||
/* Now allocate the buffer */
|
||||
params = mCamera.getParameters();
|
||||
int size = params.getPreviewSize().width * params.getPreviewSize().height;
|
||||
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
|
||||
mBuffer = new byte[size];
|
||||
/* The buffer where the current frame will be coppied */
|
||||
mFrame = new byte [size];
|
||||
mCamera.addCallbackBuffer(mBuffer);
|
||||
|
||||
try {
|
||||
setPreview();
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
|
||||
}
|
||||
setPreview();
|
||||
} catch (IOException e) {
|
||||
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
|
||||
}
|
||||
|
||||
/* Notify that the preview is about to be started and deliver preview size */
|
||||
onPreviewStared(params.getPreviewSize().width, params.getPreviewSize().height);
|
||||
|
||||
/* Now we can start a preview */
|
||||
mCamera.startPreview();
|
||||
}
|
||||
}
|
||||
@@ -80,14 +105,17 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
Log.i(TAG, "surfaceCreated");
|
||||
mCamera = Camera.open();
|
||||
mCamera.setPreviewCallback(new PreviewCallback() {
|
||||
|
||||
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
|
||||
public void onPreviewFrame(byte[] data, Camera camera) {
|
||||
synchronized (SampleViewBase.this) {
|
||||
mFrame = data;
|
||||
SampleViewBase.this.notify();
|
||||
System.arraycopy(data, 0, mFrame, 0, data.length);
|
||||
SampleViewBase.this.notify();
|
||||
}
|
||||
camera.addCallbackBuffer(mBuffer);
|
||||
}
|
||||
});
|
||||
|
||||
(new Thread(this)).start();
|
||||
}
|
||||
|
||||
@@ -102,10 +130,27 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
mCamera = null;
|
||||
}
|
||||
}
|
||||
onPreviewStopped();
|
||||
}
|
||||
|
||||
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
|
||||
protected abstract Bitmap processFrame(byte[] data);
|
||||
|
||||
/**
|
||||
* This method is called when the preview process is beeing started. It is called before the first frame delivered and processFrame is called
|
||||
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
|
||||
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
|
||||
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
|
||||
*/
|
||||
protected abstract void onPreviewStared(int previewWidtd, int previewHeight);
|
||||
|
||||
/**
|
||||
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
|
||||
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
|
||||
* Any other resourcses used during the preview can be released.
|
||||
*/
|
||||
protected abstract void onPreviewStopped();
|
||||
|
||||
public void run() {
|
||||
mThreadRun = true;
|
||||
Log.i(TAG, "Starting processing thread");
|
||||
@@ -127,12 +172,7 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
|
||||
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
|
||||
mHolder.unlockCanvasAndPost(canvas);
|
||||
}
|
||||
bmp.recycle();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static {
|
||||
System.loadLibrary("opencv_java");
|
||||
}
|
||||
}
|
@@ -12,7 +12,7 @@ ocv_check_dependencies(${OPENCV_C_SAMPLES_REQUIRED_DEPS})
|
||||
if(BUILD_EXAMPLES AND OCV_DEPENDENCIES_FOUND)
|
||||
project(c_samples)
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCXX)
|
||||
if(CMAKE_COMPILER_IS_GNUCXX AND NOT ENABLE_NOISY_WARNINGS)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-function")
|
||||
endif()
|
||||
|
||||
|
@@ -49,7 +49,7 @@ static void findCComp( IplImage* img )
|
||||
}
|
||||
|
||||
|
||||
int main( int argc, char** argv )
|
||||
int main()
|
||||
{
|
||||
int i, j;
|
||||
CvMemStorage* storage = cvCreateMemStorage(0);
|
||||
|
@@ -24,7 +24,7 @@ void drawOptFlowMap(const CvMat* flow, CvMat* cflowmap, int step,
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, char** argv)
|
||||
int main()
|
||||
{
|
||||
CvCapture* capture = cvCreateCameraCapture(0);
|
||||
CvMat* prevgray = 0, *gray = 0, *flow = 0, *cflow = 0;
|
||||
|
@@ -192,7 +192,9 @@ void print_variable_importance( CvDTree* dtree, const char** var_desc )
|
||||
}
|
||||
|
||||
printf( "Print variable importance information? (y/n) " );
|
||||
scanf( "%1s", input );
|
||||
int values_read = scanf( "%1s", input );
|
||||
CV_Assert(values_read == 1);
|
||||
|
||||
if( input[0] != 'y' && input[0] != 'Y' )
|
||||
return;
|
||||
|
||||
@@ -230,7 +232,9 @@ void interactive_classification( CvDTree* dtree, const char** var_desc )
|
||||
const CvDTreeNode* node;
|
||||
|
||||
printf( "Start/Proceed with interactive mushroom classification (y/n): " );
|
||||
scanf( "%1s", input );
|
||||
int values_read = scanf( "%1s", input );
|
||||
CV_Assert(values_read == 1);
|
||||
|
||||
if( input[0] != 'y' && input[0] != 'Y' )
|
||||
break;
|
||||
printf( "Enter 1-letter answers, '?' for missing/unknown value...\n" );
|
||||
@@ -252,7 +256,8 @@ void interactive_classification( CvDTree* dtree, const char** var_desc )
|
||||
const int* map = data->cat_map->data.i + data->cat_ofs->data.i[vi];
|
||||
|
||||
printf( "%s: ", var_desc[vi] );
|
||||
scanf( "%1s", input );
|
||||
values_read = scanf( "%1s", input );
|
||||
CV_Assert(values_read == 1);
|
||||
|
||||
if( input[0] == '?' )
|
||||
{
|
||||
|
@@ -121,7 +121,7 @@ int main(int argc, char** argv)
|
||||
print_result( rtrees.calc_error( &data, CV_TRAIN_ERROR), rtrees.calc_error( &data, CV_TEST_ERROR ), rtrees.get_var_importance() );
|
||||
|
||||
printf("======ERTREES=====\n");
|
||||
ertrees.train( &data, CvRTParams( 10, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
|
||||
ertrees.train( &data, CvRTParams( 18, 2, 0, false, 16, 0, true, 0, 100, 0, CV_TERMCRIT_ITER ));
|
||||
print_result( ertrees.calc_error( &data, CV_TRAIN_ERROR), ertrees.calc_error( &data, CV_TEST_ERROR ), ertrees.get_var_importance() );
|
||||
|
||||
printf("======GBTREES=====\n");
|
||||
|
@@ -20,7 +20,7 @@ if(BUILD_EXAMPLES AND OCV_DEPENDENCIES_FOUND)
|
||||
ocv_include_directories("${OpenCV_SOURCE_DIR}/modules/gpu/include")
|
||||
endif()
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCXX)
|
||||
if(CMAKE_COMPILER_IS_GNUCXX AND NOT ENABLE_NOISY_WARNINGS)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-function")
|
||||
endif()
|
||||
|
||||
|
@@ -273,7 +273,8 @@ int main(int argc, char** argv)
|
||||
if( dir[dir.size()-1] != '\\' && dir[dir.size()-1] != '/' )
|
||||
dir += "/";
|
||||
|
||||
system(("mkdir " + dir).c_str());
|
||||
int result = system(("mkdir " + dir).c_str());
|
||||
CV_Assert(result == 0);
|
||||
|
||||
for( int i = 0; ddms[i*4] != 0; i++ )
|
||||
{
|
||||
|
@@ -1,4 +1,4 @@
|
||||
#include "opencv2/ml/ml.hpp"
|
||||
#include "opencv2/legacy/legacy.hpp"
|
||||
#include "opencv2/highgui/highgui.hpp"
|
||||
|
||||
using namespace cv;
|
||||
|
@@ -82,7 +82,8 @@ int main(int argc, char** argv)
|
||||
sprintf(test_file, "%s", argv[1]);
|
||||
f = fopen(test_file, "r");
|
||||
char vid[20];
|
||||
fscanf(f, "%s\n", vid);
|
||||
int values_read = fscanf(f, "%s\n", vid);
|
||||
CV_Assert(values_read == 1);
|
||||
cout << "Benchmarking against " << vid << endl;
|
||||
live = 0;
|
||||
}
|
||||
@@ -133,7 +134,8 @@ int main(int argc, char** argv)
|
||||
}
|
||||
else
|
||||
{
|
||||
fscanf(f, "%d %f %f %f %f\n", &i, &w[0], &w[1], &w[2], &w[3]);
|
||||
int values_read = fscanf(f, "%d %f %f %f %f\n", &i, &w[0], &w[1], &w[2], &w[3]);
|
||||
CV_Assert(values_read == 5);
|
||||
sprintf(img_file, "seqG/%04d.png", i);
|
||||
image = imread(img_file, CV_LOAD_IMAGE_COLOR);
|
||||
if (image.empty())
|
||||
|
28
samples/cpp/opencv_version.cpp
Normal file
28
samples/cpp/opencv_version.cpp
Normal file
@@ -0,0 +1,28 @@
|
||||
#include "opencv2/core/core.hpp"
|
||||
#include <iostream>
|
||||
|
||||
const char* keys =
|
||||
{
|
||||
"{ b |build |false | print complete build info }"
|
||||
"{ h |help |false | print this help }"
|
||||
};
|
||||
|
||||
int main(int argc, const char* argv[])
|
||||
{
|
||||
cv::CommandLineParser parser(argc, argv, keys);
|
||||
|
||||
if (parser.get<bool>("help"))
|
||||
{
|
||||
parser.printParams();
|
||||
}
|
||||
else if (parser.get<bool>("build"))
|
||||
{
|
||||
std::cout << cv::getBuildInformation() << std::endl;
|
||||
}
|
||||
else
|
||||
{
|
||||
std::cout << "OpenCV " << CV_VERSION << std::endl;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
@@ -11,7 +11,6 @@ const Scalar WHITE_COLOR = CV_RGB(255,255,255);
|
||||
const string winName = "points";
|
||||
const int testStep = 5;
|
||||
|
||||
|
||||
Mat img, imgDst;
|
||||
RNG rng;
|
||||
|
||||
@@ -19,16 +18,16 @@ vector<Point> trainedPoints;
|
||||
vector<int> trainedPointsMarkers;
|
||||
vector<Scalar> classColors;
|
||||
|
||||
#define NBC 0 // normal Bayessian classifier
|
||||
#define KNN 0 // k nearest neighbors classifier
|
||||
#define SVM 0 // support vectors machine
|
||||
#define DT 1 // decision tree
|
||||
#define BT 0 // ADA Boost
|
||||
#define GBT 0 // gradient boosted trees
|
||||
#define RF 0 // random forest
|
||||
#define ERT 0 // extremely randomized trees
|
||||
#define ANN 0 // artificial neural networks
|
||||
#define EM 0 // expectation-maximization
|
||||
#define _NBC_ 0 // normal Bayessian classifier
|
||||
#define _KNN_ 0 // k nearest neighbors classifier
|
||||
#define _SVM_ 0 // support vectors machine
|
||||
#define _DT_ 1 // decision tree
|
||||
#define _BT_ 0 // ADA Boost
|
||||
#define _GBT_ 0 // gradient boosted trees
|
||||
#define _RF_ 0 // random forest
|
||||
#define _ERT_ 0 // extremely randomized trees
|
||||
#define _ANN_ 0 // artificial neural networks
|
||||
#define _EM_ 0 // expectation-maximization
|
||||
|
||||
void on_mouse( int event, int x, int y, int /*flags*/, void* )
|
||||
{
|
||||
@@ -48,13 +47,13 @@ void on_mouse( int event, int x, int y, int /*flags*/, void* )
|
||||
}
|
||||
else if( event == CV_EVENT_RBUTTONUP )
|
||||
{
|
||||
#if BT
|
||||
#if _BT_
|
||||
if( classColors.size() < 2 )
|
||||
{
|
||||
#endif
|
||||
classColors.push_back( Scalar((uchar)rng(256), (uchar)rng(256), (uchar)rng(256)) );
|
||||
updateFlag = true;
|
||||
#if BT
|
||||
#if _BT_
|
||||
}
|
||||
else
|
||||
cout << "New class can not be added, because CvBoost can only be used for 2-class classification" << endl;
|
||||
@@ -98,7 +97,7 @@ void prepare_train_data( Mat& samples, Mat& classes )
|
||||
samples.convertTo( samples, CV_32FC1 );
|
||||
}
|
||||
|
||||
#if NBC
|
||||
#if _NBC_
|
||||
void find_decision_boundary_NBC()
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -125,7 +124,7 @@ void find_decision_boundary_NBC()
|
||||
#endif
|
||||
|
||||
|
||||
#if KNN
|
||||
#if _KNN_
|
||||
void find_decision_boundary_KNN( int K )
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -151,7 +150,7 @@ void find_decision_boundary_KNN( int K )
|
||||
}
|
||||
#endif
|
||||
|
||||
#if SVM
|
||||
#if _SVM_
|
||||
void find_decision_boundary_SVM( CvSVMParams params )
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -185,7 +184,7 @@ void find_decision_boundary_SVM( CvSVMParams params )
|
||||
}
|
||||
#endif
|
||||
|
||||
#if DT
|
||||
#if _DT_
|
||||
void find_decision_boundary_DT()
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -225,7 +224,7 @@ void find_decision_boundary_DT()
|
||||
}
|
||||
#endif
|
||||
|
||||
#if BT
|
||||
#if _BT_
|
||||
void find_decision_boundary_BT()
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -265,7 +264,7 @@ void find_decision_boundary_BT()
|
||||
|
||||
#endif
|
||||
|
||||
#if GBT
|
||||
#if _GBT_
|
||||
void find_decision_boundary_GBT()
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -305,7 +304,7 @@ void find_decision_boundary_GBT()
|
||||
|
||||
#endif
|
||||
|
||||
#if RF
|
||||
#if _RF_
|
||||
void find_decision_boundary_RF()
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -346,7 +345,7 @@ void find_decision_boundary_RF()
|
||||
|
||||
#endif
|
||||
|
||||
#if ERT
|
||||
#if _ERT_
|
||||
void find_decision_boundary_ERT()
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -390,7 +389,7 @@ void find_decision_boundary_ERT()
|
||||
}
|
||||
#endif
|
||||
|
||||
#if ANN
|
||||
#if _ANN_
|
||||
void find_decision_boundary_ANN( const Mat& layer_sizes )
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -435,7 +434,7 @@ void find_decision_boundary_ANN( const Mat& layer_sizes )
|
||||
}
|
||||
#endif
|
||||
|
||||
#if EM
|
||||
#if _EM_
|
||||
void find_decision_boundary_EM()
|
||||
{
|
||||
img.copyTo( imgDst );
|
||||
@@ -443,19 +442,12 @@ void find_decision_boundary_EM()
|
||||
Mat trainSamples, trainClasses;
|
||||
prepare_train_data( trainSamples, trainClasses );
|
||||
|
||||
CvEM em;
|
||||
CvEMParams params;
|
||||
params.covs = NULL;
|
||||
params.means = NULL;
|
||||
params.weights = NULL;
|
||||
params.probs = NULL;
|
||||
cv::EM em;
|
||||
cv::EM::Params params;
|
||||
params.nclusters = classColors.size();
|
||||
params.cov_mat_type = CvEM::COV_MAT_GENERIC;
|
||||
params.start_step = CvEM::START_AUTO_STEP;
|
||||
params.term_crit.max_iter = 10;
|
||||
params.term_crit.epsilon = 0.1;
|
||||
params.term_crit.type = CV_TERMCRIT_ITER | CV_TERMCRIT_EPS;
|
||||
|
||||
params.covMatType = cv::EM::COV_MAT_GENERIC;
|
||||
params.startStep = cv::EM::START_AUTO_STEP;
|
||||
params.termCrit = cv::TermCriteria(cv::TermCriteria::COUNT + cv::TermCriteria::COUNT, 10, 0.1);
|
||||
|
||||
// learn classifier
|
||||
em.train( trainSamples, Mat(), params, &trainClasses );
|
||||
@@ -509,12 +501,12 @@ int main()
|
||||
|
||||
if( key == 'r' ) // run
|
||||
{
|
||||
#if NBC
|
||||
#if _NBC_
|
||||
find_decision_boundary_NBC();
|
||||
cvNamedWindow( "NormalBayesClassifier", WINDOW_AUTOSIZE );
|
||||
imshow( "NormalBayesClassifier", imgDst );
|
||||
#endif
|
||||
#if KNN
|
||||
#if _KNN_
|
||||
int K = 3;
|
||||
find_decision_boundary_KNN( K );
|
||||
namedWindow( "kNN", WINDOW_AUTOSIZE );
|
||||
@@ -526,7 +518,7 @@ int main()
|
||||
imshow( "kNN2", imgDst );
|
||||
#endif
|
||||
|
||||
#if SVM
|
||||
#if _SVM_
|
||||
//(1)-(2)separable and not sets
|
||||
CvSVMParams params;
|
||||
params.svm_type = CvSVM::C_SVC;
|
||||
@@ -549,37 +541,37 @@ int main()
|
||||
imshow( "classificationSVM2", imgDst );
|
||||
#endif
|
||||
|
||||
#if DT
|
||||
#if _DT_
|
||||
find_decision_boundary_DT();
|
||||
namedWindow( "DT", WINDOW_AUTOSIZE );
|
||||
imshow( "DT", imgDst );
|
||||
#endif
|
||||
|
||||
#if BT
|
||||
#if _BT_
|
||||
find_decision_boundary_BT();
|
||||
namedWindow( "BT", WINDOW_AUTOSIZE );
|
||||
imshow( "BT", imgDst);
|
||||
#endif
|
||||
|
||||
#if GBT
|
||||
#if _GBT_
|
||||
find_decision_boundary_GBT();
|
||||
namedWindow( "GBT", WINDOW_AUTOSIZE );
|
||||
imshow( "GBT", imgDst);
|
||||
#endif
|
||||
|
||||
#if RF
|
||||
#if _RF_
|
||||
find_decision_boundary_RF();
|
||||
namedWindow( "RF", WINDOW_AUTOSIZE );
|
||||
imshow( "RF", imgDst);
|
||||
#endif
|
||||
|
||||
#if ERT
|
||||
#if _ERT_
|
||||
find_decision_boundary_ERT();
|
||||
namedWindow( "ERT", WINDOW_AUTOSIZE );
|
||||
imshow( "ERT", imgDst);
|
||||
#endif
|
||||
|
||||
#if ANN
|
||||
#if _ANN_
|
||||
Mat layer_sizes1( 1, 3, CV_32SC1 );
|
||||
layer_sizes1.at<int>(0) = 2;
|
||||
layer_sizes1.at<int>(1) = 5;
|
||||
@@ -589,7 +581,7 @@ int main()
|
||||
imshow( "ANN", imgDst );
|
||||
#endif
|
||||
|
||||
#if EM
|
||||
#if _EM_
|
||||
find_decision_boundary_EM();
|
||||
namedWindow( "EM", WINDOW_AUTOSIZE );
|
||||
imshow( "EM", imgDst );
|
||||
|
@@ -491,7 +491,8 @@ int main(int argc, char** argv)
|
||||
if( outbarename )
|
||||
{
|
||||
cmd[6 + outbarename - outprefix] = '\0';
|
||||
system(cmd);
|
||||
int result = system(cmd);
|
||||
CV_Assert(result == 0);
|
||||
outbarename++;
|
||||
}
|
||||
else
|
||||
|
@@ -21,7 +21,7 @@ if(BUILD_EXAMPLES AND OCV_DEPENDENCIES_FOUND)
|
||||
ocv_include_directories(${CUDA_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
if(CMAKE_COMPILER_IS_GNUCXX)
|
||||
if(CMAKE_COMPILER_IS_GNUCXX AND NOT ENABLE_NOISY_WARNINGS)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-function")
|
||||
endif()
|
||||
|
||||
|
@@ -215,7 +215,6 @@ int main(int argc, const char* argv[])
|
||||
switch (key)
|
||||
{
|
||||
case 27:
|
||||
return 0;
|
||||
break;
|
||||
|
||||
case 'A':
|
||||
|
Reference in New Issue
Block a user