Added new Android sample (Java API + Native)

This commit is contained in:
Andrey Kamaev 2011-06-29 13:17:02 +00:00
parent 4b61683071
commit d6b6734619
12 changed files with 384 additions and 0 deletions

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="src" path="OpenCVJavaAPI_src"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>Sample 4 Mixed</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
<linkedResources>
<link>
<name>OpenCVJavaAPI_src</name>
<type>2</type>
<locationURI>_android_OpenCVJavaAPI_583dbd7b/src</locationURI>
</link>
</linkedResources>
</projectDescription>

View File

@ -0,0 +1,5 @@
#Wed Jun 29 04:36:40 MSD 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.source=1.5

View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.s4"
android:versionCode="1"
android:versionName="1.0">
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<application android:label="@string/app_name" android:icon="@drawable/icon">
<activity android:name="Sample4Mixed"
android:label="@string/app_name">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="8" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
</manifest>

View File

@ -0,0 +1,12 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-8
android.library.reference.1=../../../android/build

View File

@ -0,0 +1,17 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
OPENCV_CAMERA_MODULES:=off
OPENCV_MK_BUILD_PATH:=../../../../android/build/OpenCV.mk
ifeq ("$(wildcard $(OPENCV_MK_BUILD_PATH))","")
include $(TOOLCHAIN_PREBUILT_ROOT)/user/share/OpenCV/OpenCV.mk
else
include $(OPENCV_MK_BUILD_PATH)
endif
LOCAL_MODULE := mixed_sample
LOCAL_SRC_FILES := jni_part.cpp
LOCAL_LDLIBS += -llog -ldl
include $(BUILD_SHARED_LIBRARY)

View File

@ -0,0 +1,3 @@
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a

View File

@ -0,0 +1,23 @@
#include <jni.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <vector>
using namespace std;
using namespace cv;
extern "C" {
JNIEXPORT void JNICALL Java_org_opencv_samples_s4_Sample4View_FindFeatures(JNIEnv* env, jobject thiz, jlong addrGray, jlong addrRgba)
{
Mat* pMatGr=(Mat*)addrGray;
Mat* pMatRgb=(Mat*)addrRgba;
vector<KeyPoint> v;
FastFeatureDetector detector(50);
detector.detect(*pMatGr, v);
for( size_t i = 0; i < v.size(); i++ )
circle(*pMatRgb, Point(v[i].pt.x, v[i].pt.y), 10, Scalar(255,0,0,255));
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.6 KiB

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Sample 4: Mixed</string>
</resources>

View File

@ -0,0 +1,64 @@
package org.opencv.samples.s4;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
public class Sample4Mixed extends Activity {
private static final String TAG = "Sample4Mixed::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_SOBEL = 3;
public static final int VIEW_MODE_BLUR = 4;
public static final int VIEW_MODE_FEATURES = 5;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewSobel;
private MenuItem mItemPreviewBlur;
private MenuItem mItemPreviewFeatures;
public int viewMode;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(new Sample4View(this));
viewMode = VIEW_MODE_RGBA;
}
public boolean onCreateOptionsMenu(Menu menu) {
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny");
mItemPreviewSobel = menu.add("Sobel");
mItemPreviewBlur = menu.add("Blur");
mItemPreviewFeatures = menu.add("Find features");
return true;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item == mItemPreviewRGBA)
viewMode = VIEW_MODE_RGBA;
else if (item == mItemPreviewGray)
viewMode = VIEW_MODE_GRAY;
else if (item == mItemPreviewCanny)
viewMode = VIEW_MODE_CANNY;
else if (item == mItemPreviewSobel)
viewMode = VIEW_MODE_SOBEL;
else if (item == mItemPreviewBlur)
viewMode = VIEW_MODE_BLUR;
else if (item == mItemPreviewFeatures)
viewMode = VIEW_MODE_FEATURES;
return true;
}
}

View File

@ -0,0 +1,179 @@
package org.opencv.samples.s4;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import org.opencv.Mat;
import org.opencv.Size;
import org.opencv.core;
import org.opencv.imgproc;
import org.opencv.utils;
import java.util.List;
class Sample4View extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "Sample4Mixed::View";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private boolean mThreadRun;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
public Sample4View(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
if ( mCamera != null) {
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
//selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(mFrameWidth, mFrameHeight);
mCamera.setParameters(params);
mCamera.startPreview();
// initialize all required Mats before usage to minimize number of auxiliary jni calls
if(mYuv != null) mYuv.dispose();
mYuv = new Mat(mFrameHeight+mFrameHeight/2, mFrameWidth, Mat.CvType.CV_8UC1);
if(mRgba != null) mRgba.dispose();
mRgba = new Mat(mFrameHeight, mFrameWidth, Mat.CvType.CV_8UC4);
if(mGraySubmat != null) mGraySubmat.dispose();
mGraySubmat = mYuv.submat(0, mFrameHeight, 0, mFrameWidth);
if(mIntermediateMat != null) mIntermediateMat.dispose();
mIntermediateMat = new Mat(mFrameHeight, mFrameWidth, Mat.CvType.CV_8UC1);
}
}
public void surfaceCreated(SurfaceHolder holder) {
mCamera = Camera.open();
mCamera.setPreviewCallback(
new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized(Sample4View.this) {
mFrame = data;
Sample4View.this.notify();
}
}
}
);
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
mThreadRun = false;
if(mCamera != null) {
synchronized(Sample4View.this) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
// Explicitly dispose Mats
if(mYuv != null) {
mYuv.dispose();
mYuv = null;
}
if(mRgba != null) {
mRgba.dispose();
mRgba = null;
}
if(mGraySubmat != null) {
mGraySubmat.dispose();
mGraySubmat = null;
}
if(mIntermediateMat != null) {
mIntermediateMat.dispose();
mIntermediateMat = null;
}
}
public void run() {
mThreadRun = true;
Log.i(TAG, "Starting thread");
while(mThreadRun) {
synchronized(this) {
try {
this.wait();
mYuv.put(0, 0, mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Sample4Mixed a = (Sample4Mixed)getContext();
switch(a.viewMode)
{
case Sample4Mixed.VIEW_MODE_GRAY:
imgproc.cvtColor(mGraySubmat, mRgba, imgproc.CV_GRAY2RGBA, 4);
break;
case Sample4Mixed.VIEW_MODE_RGBA:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
break;
case Sample4Mixed.VIEW_MODE_CANNY:
imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.CV_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_SOBEL:
imgproc.Sobel(mGraySubmat, mIntermediateMat, core.CV_8U, 1, 1);
core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 8);
imgproc.cvtColor(mIntermediateMat, mRgba, imgproc.CV_GRAY2BGRA, 4);
break;
case Sample4Mixed.VIEW_MODE_BLUR:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
imgproc.blur(mRgba, mRgba, new Size(15, 15));
break;
case Sample4Mixed.VIEW_MODE_FEATURES:
imgproc.cvtColor(mYuv, mRgba, imgproc.CV_YUV420i2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}
Bitmap bmp = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
utils.MatToBitmap(mRgba, bmp);
Canvas canvas = mHolder.lockCanvas();
canvas.drawBitmap(bmp, (canvas.getWidth()-mFrameWidth)/2, (canvas.getHeight()-mFrameHeight)/2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
public native void FindFeatures(long matAddrGr, long matAddrRgba);
static {
System.loadLibrary("mixed_sample");
}
}