From 8e088d38a50f381285102fb687934b62cc261b3b Mon Sep 17 00:00:00 2001 From: Andrey Pavlenko Date: Sat, 19 Sep 2015 16:16:13 +0300 Subject: [PATCH 1/5] draft implementation of alternative CameraBridge via GLES a simple sample will look like: ```java public class MainActivity extends Activity implements CameraGLSurfaceView.CameraTextureListener { CameraGLSurfaceView mView; ByteBuffer buf; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_NO_TITLE); getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN); getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON, WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); mView = new CameraGLSurfaceView(this, null); mView.setCameraTextureListener(this); setContentView(mView); buf = ByteBuffer.allocateDirect(1920*1080*4); } @Override protected void onPause() { mView.onPause(); super.onPause(); } @Override protected void onResume() { super.onResume(); mView.onResume(); } @Override public void onCameraViewStarted(int width, int height) { // TODO Auto-generated method stub } @Override public void onCameraViewStopped() { // TODO Auto-generated method stub } @Override public boolean onCameraFrame(int texIn, int texOut, int width, int height) { Log.i("MAIN", "onCameraFrame"); int w=width, h=height; /* // option 1: // just return 'false' to display texIn on screen retutn false; */ /* // option 2: // fast copy texIn to texOut GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texOut); GLES20.glCopyTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, 0, 0, w, h, 0); return true; */ // option 3: // read, modify and write back pixels GLES20.glReadPixels(0, 0, w, h, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); buf.rewind(); // red line for(int i=0; i= 21) + mRenderer = new Camera2Renderer(this); + else*/ + mRenderer = new CameraRenderer(this); + + setEGLContextClientVersion(2); + setRenderer(mRenderer); + setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); + } + + public void setCameraTextureListener(CameraTextureListener texListener) + { + mTexListener = texListener; + } + + public CameraTextureListener getCameraTextureListener() + { + return mTexListener; + } + + @Override + public void surfaceCreated(SurfaceHolder holder) { + super.surfaceCreated(holder); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + super.surfaceDestroyed(holder); + } + + @Override + public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { + super.surfaceChanged(holder, format, w, h); + } + + @Override + public void onResume() { + super.onResume(); + mRenderer.onResume(); + } + + @Override + public void onPause() { + mRenderer.onPause(); + super.onPause(); + } + + @Override + public boolean onTouchEvent(MotionEvent e) { + if(e.getAction() == MotionEvent.ACTION_DOWN) + ((Activity)getContext()).openOptionsMenu(); + return true; + } +} diff --git a/modules/java/generator/src/java/android+CameraRenderer.java b/modules/java/generator/src/java/android+CameraRenderer.java new file mode 100644 index 000000000..ae8edda01 --- /dev/null +++ b/modules/java/generator/src/java/android+CameraRenderer.java @@ -0,0 +1,353 @@ +package org.opencv.android; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; +import java.nio.IntBuffer; +import java.util.List; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +import org.opencv.android.CameraGLSurfaceView.CameraTextureListener; + +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.os.Build; +import android.util.Log; +import android.annotation.TargetApi; +import android.graphics.SurfaceTexture; +import android.hardware.Camera; +import android.hardware.Camera.Size; + +@TargetApi(15) +public class CameraRenderer implements GLSurfaceView.Renderer, + SurfaceTexture.OnFrameAvailableListener { + + public static final String LOGTAG = "CameraRenderer"; + + // shaders + private final String vss = "" + + "attribute vec2 vPosition;\n" + + "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n" + + "void main() {\n" + " texCoord = vTexCoord;\n" + + " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n" + + "}"; + + private final String fssOES = "" + + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "uniform samplerExternalOES sTexture;\n" + + "varying vec2 texCoord;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; + + private final String fss2D = "" + + "precision mediump float;\n" + + "uniform sampler2D sTexture;\n" + + "varying vec2 texCoord;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; + + // coord-s + private final float vertices[] = { + -1, -1, + -1, 1, + 1, -1, + 1, 1 }; + private final float texCoordOES[] = { + 0, 1, + 0, 0, + 1, 1, + 1, 0 }; + private final float texCoord2D[] = { + 0, 0, + 0, 1, + 1, 0, + 1, 1 }; + + private int[] texCamera = {0}, texFBO = {0}, texDraw = {0}; + private int[] FBO = {0}; + private int progOES, prog2D; + private int vPosOES, vTCOES, vPos2D, vTC2D; + + private FloatBuffer vert, texOES, tex2D; + + private Camera mCamera; + private boolean mPreviewStarted = false; + private int mPreviewWidth, mPreviewHeight; + + private SurfaceTexture mSTexture; + + private boolean mGLInit = false; + private boolean mUpdateST = false; + + private CameraGLSurfaceView mView; + + CameraRenderer(CameraGLSurfaceView view) { + mView = view; + int bytes = vertices.length * Float.SIZE / Byte.SIZE; + vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); + texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); + tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); + vert.put(vertices).position(0); + texOES.put(texCoordOES).position(0); + tex2D.put(texCoord2D).position(0); + } + + public void onResume() { + //nothing + Log.i(LOGTAG, "onResume"); + } + + public void onPause() { + Log.i(LOGTAG, "onPause"); + mGLInit = false; + mUpdateST = false; + + if(mCamera != null) { + mCamera.stopPreview(); + mCamera.release(); + mCamera = null; + } + + if(mSTexture != null) { + mSTexture.release(); + mSTexture = null; + deleteTex(texCamera); + } + } + + public void onSurfaceCreated(GL10 unused, EGLConfig config) { + GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); + + progOES = loadShader(vss, fssOES); + vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition"); + vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord"); + GLES20.glEnableVertexAttribArray(vPosOES); + GLES20.glEnableVertexAttribArray(vTCOES); + + prog2D = loadShader(vss, fss2D); + vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition"); + vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord"); + GLES20.glEnableVertexAttribArray(vPos2D); + GLES20.glEnableVertexAttribArray(vTC2D); + + initTexOES(texCamera); + mSTexture = new SurfaceTexture(texCamera[0]); + mSTexture.setOnFrameAvailableListener(this); + + mCamera = Camera.open(); + try { + mCamera.setPreviewTexture(mSTexture); + } catch (IOException ioe) { + } + + mGLInit = true; + } + + public void onDrawFrame(GL10 unused) { + if (!mGLInit) + return; + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + + synchronized (this) { + if (mUpdateST) { + mSTexture.updateTexImage(); + mUpdateST = false; + } + } + + CameraTextureListener texListener = mView.getCameraTextureListener(); + if(texListener != null) { + Log.d(LOGTAG, "haveUserCallback"); + // texCamera(OES) -> texFBO + drawTex(texCamera[0], true, FBO[0]); + + // call user code (texFBO -> texDraw) + boolean modified = texListener.onCameraFrame(texFBO[0], texDraw[0], mPreviewWidth, mPreviewHeight); + + if(modified) { + // texDraw -> screen + drawTex(texDraw[0], false, 0); + } else { + // texFBO -> screen + drawTex(texFBO[0], false, 0); + } + } else { + // texCamera(OES) -> screen + drawTex(texCamera[0], true, 0); + } + } + + public void onSurfaceChanged(GL10 unused, int width, int height) { + Log.i(LOGTAG, "onSurfaceChanged("+width+"x"+height+")"); + + if(mCamera == null) + return; + if(mPreviewStarted) { + mCamera.stopPreview(); + mPreviewStarted = false; + } + + Camera.Parameters param = mCamera.getParameters(); + List psize = param.getSupportedPreviewSizes(); + int bestWidth = 0, bestHeight = 0; + if (psize.size() > 0) { + float aspect = (float)width / height; + for (Size size : psize) { + int w = size.width, h = size.height; + Log.d(LOGTAG, "checking camera preview size: "+w+"x"+h); + if ( w <= width && h <= height && + w >= bestWidth && h >= bestHeight && + Math.abs(aspect - (float)w/h) < 0.2 ) { + bestWidth = w; + bestHeight = h; + } + } + if(bestWidth > 0 && bestHeight > 0) { + param.setPreviewSize(bestWidth, bestHeight); + Log.i(LOGTAG, "selected size: "+bestWidth+" x "+bestHeight); + + GLES20.glViewport(0, 0, bestWidth, bestWidth); + initFBO(bestWidth, bestHeight); + mPreviewWidth = bestWidth; + mPreviewHeight = bestHeight; + } + } + //param.set("orientation", "landscape"); + mCamera.setParameters(param); + mCamera.startPreview(); + mPreviewStarted = true; + } + + public synchronized void onFrameAvailable(SurfaceTexture st) { + mUpdateST = true; + mView.requestRender(); + } + + private void initTexOES(int[] tex) { + if(tex.length == 1) { + GLES20.glGenTextures(1, tex, 0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); + } + } + + private void deleteTex(int[] tex) { + if(tex.length == 1) { + GLES20.glDeleteTextures(1, tex, 0); + } + } + + private static int loadShader(String vss, String fss) { + int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); + GLES20.glShaderSource(vshader, vss); + GLES20.glCompileShader(vshader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e(LOGTAG, "Could not compile vertex shader"); + Log.v(LOGTAG, "Could not compile vertex shader:"+GLES20.glGetShaderInfoLog(vshader)); + GLES20.glDeleteShader(vshader); + vshader = 0; + } + + int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); + GLES20.glShaderSource(fshader, fss); + GLES20.glCompileShader(fshader); + GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e("Renderer", "Could not compile fragment shader"); + Log.v("Renderer", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader)); + GLES20.glDeleteShader(fshader); + fshader = 0; + } + + int program = GLES20.glCreateProgram(); + GLES20.glAttachShader(program, vshader); + GLES20.glAttachShader(program, fshader); + GLES20.glLinkProgram(program); + + return program; + } + + private void releaseFBO() + { + + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + GLES20.glDeleteFramebuffers(1, FBO, 0); + + deleteTex(texFBO); + deleteTex(texDraw); + } + + private void initFBO(int width, int height) + { + Log.d(LOGTAG, "initFBO("+width+"x"+height+")"); + releaseFBO(); + + GLES20.glGenTextures(1, texDraw, 0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); + + GLES20.glGenTextures(1, texFBO, 0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); + + //int hFBO; + GLES20.glGenFramebuffers(1, FBO, 0); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0); + Log.d(LOGTAG, "initFBO status: " + GLES20.glGetError()); + + if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) + Log.e(LOGTAG, "initFBO failed: " + GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER)); + + //GLES20.glViewport(0, 0, width, height); + } + + // draw texture to FBO or to screen if fbo == 0 + private void drawTex(int tex, boolean isOES, int fbo) + { + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo); + GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight); + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + + if(isOES) { + GLES20.glUseProgram(progOES); + GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert); + GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES); + } else { + GLES20.glUseProgram(prog2D); + GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert); + GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D); + } + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + + if(isOES) { + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex); + GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0); + } else { + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex); + GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0); + } + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + GLES20.glFlush(); + } +} \ No newline at end of file From 15db8243efd24ea35ed8151425a399a9ad1a712a Mon Sep 17 00:00:00 2001 From: Andrey Pavlenko Date: Wed, 7 Oct 2015 11:28:57 +0300 Subject: [PATCH 2/5] refactored; added Camera2, notify callbacks, front/back maxCamera sizes; disable new stuff if target API < 21 --- modules/java/CMakeLists.txt | 11 + .../src/java/android+Camera2Renderer.java | 302 +++++++++++++ .../java/android+CameraGLRendererBase.java | 424 ++++++++++++++++++ .../src/java/android+CameraGLSurfaceView.java | 55 ++- .../src/java/android+CameraRenderer.java | 401 +++++------------ 5 files changed, 882 insertions(+), 311 deletions(-) create mode 100644 modules/java/generator/src/java/android+Camera2Renderer.java create mode 100644 modules/java/generator/src/java/android+CameraGLRendererBase.java diff --git a/modules/java/CMakeLists.txt b/modules/java/CMakeLists.txt index 17d190781..3906040df 100644 --- a/modules/java/CMakeLists.txt +++ b/modules/java/CMakeLists.txt @@ -181,6 +181,17 @@ else() list(REMOVE_ITEM handwrittren_lib_project_files_rel "${ANDROID_MANIFEST_FILE}") endif() +# Calc default SDK Target +android_get_compatible_target(android_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET} 11) +string(REGEX REPLACE "android-" "" android_sdk_target_num ${android_sdk_target}) + +if( (ANDROID_SDK_TARGET AND ANDROID_SDK_TARGET LESS 21) OR (android_sdk_target_num LESS 21) ) + message(STATUS "[OpenCV for Android SDK]: A new OpenGL Camera Bridge (CameraGLSurfaceView, CameraGLRendererBase, CameraRenderer, Camera2Renderer) is disabled, because ANDROID_SDK_TARGET (${android_sdk_target_num}) < 21") + ocv_list_filterout(handwritten_java_sources "android\\\\+CameraGL") + ocv_list_filterout(handwritten_java_sources "android\\\\+Camera.?Renderer") +endif() + + # IMPORTANT: add dependencies to cmake (we should rerun cmake if any of these files is modified) add_cmake_dependencies(${scripts_gen_java} ${scripts_hdr_parser} ${opencv_public_headers}) diff --git a/modules/java/generator/src/java/android+Camera2Renderer.java b/modules/java/generator/src/java/android+Camera2Renderer.java new file mode 100644 index 000000000..408214057 --- /dev/null +++ b/modules/java/generator/src/java/android+Camera2Renderer.java @@ -0,0 +1,302 @@ +package org.opencv.android; + +import java.util.Arrays; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; +import android.annotation.TargetApi; +import android.content.Context; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Log; +import android.util.Size; +import android.view.Surface; + +@TargetApi(21) +public class Camera2Renderer extends CameraGLRendererBase { + + protected final String LOGTAG = "Camera2Renderer"; + private CameraDevice mCameraDevice; + private CameraCaptureSession mCaptureSession; + private CaptureRequest.Builder mPreviewRequestBuilder; + private String mCameraID; + private Size mPreviewSize = new Size(-1, -1); + + private HandlerThread mBackgroundThread; + private Handler mBackgroundHandler; + private Semaphore mCameraOpenCloseLock = new Semaphore(1); + + Camera2Renderer(CameraGLSurfaceView view) { + super(view); + } + + @Override + protected void doStart() { + Log.d(LOGTAG, "doStart"); + startBackgroundThread(); + super.doStart(); + } + + + @Override + protected void doStop() { + Log.d(LOGTAG, "doStop"); + super.doStop(); + stopBackgroundThread(); + } + + boolean cacPreviewSize(final int width, final int height) { + Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height); + if(mCameraID == null) { + Log.e(LOGTAG, "Camera isn't initialized!"); + return false; + } + CameraManager manager = (CameraManager) mView.getContext() + .getSystemService(Context.CAMERA_SERVICE); + try { + CameraCharacteristics characteristics = manager + .getCameraCharacteristics(mCameraID); + StreamConfigurationMap map = characteristics + .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + int bestWidth = 0, bestHeight = 0; + float aspect = (float)width / height; + for (Size psize : map.getOutputSizes(SurfaceTexture.class)) { + int w = psize.getWidth(), h = psize.getHeight(); + Log.d(LOGTAG, "trying size: "+w+"x"+h); + if ( width >= w && height >= h && + bestWidth <= w && bestHeight <= h && + Math.abs(aspect - (float)w/h) < 0.2 ) { + bestWidth = w; + bestHeight = h; + } + } + Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight); + if( bestWidth == 0 || bestHeight == 0 || + mPreviewSize.getWidth() == bestWidth && + mPreviewSize.getHeight() == bestHeight ) + return false; + else { + mPreviewSize = new Size(bestWidth, bestHeight); + return true; + } + } catch (CameraAccessException e) { + Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception"); + } catch (IllegalArgumentException e) { + Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception"); + } catch (SecurityException e) { + Log.e(LOGTAG, "cacPreviewSize - Security Exception"); + } + return false; + } + + @Override + protected void openCamera(int id) { + Log.i(LOGTAG, "openCamera"); + CameraManager manager = (CameraManager) mView.getContext().getSystemService(Context.CAMERA_SERVICE); + try { + String camList[] = manager.getCameraIdList(); + if(camList.length == 0) { + Log.e(LOGTAG, "Error: camera isn't detected."); + return; + } + if(id == CameraBridgeViewBase.CAMERA_ID_ANY) { + mCameraID = camList[0]; + } else { + for (String cameraID : camList) { + CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID); + if( id == CameraBridgeViewBase.CAMERA_ID_BACK && + characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK || + id == CameraBridgeViewBase.CAMERA_ID_FRONT && + characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) { + mCameraID = cameraID; + break; + } + } + } + if(mCameraID != null) { + if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { + throw new RuntimeException( + "Time out waiting to lock camera opening."); + } + Log.i(LOGTAG, "Opening camera: " + mCameraID); + manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler); + } + } catch (CameraAccessException e) { + Log.e(LOGTAG, "OpenCamera - Camera Access Exception"); + } catch (IllegalArgumentException e) { + Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception"); + } catch (SecurityException e) { + Log.e(LOGTAG, "OpenCamera - Security Exception"); + } catch (InterruptedException e) { + Log.e(LOGTAG, "OpenCamera - Interrupted Exception"); + } + } + + @Override + protected void closeCamera() { + Log.i(LOGTAG, "closeCamera"); + try { + mCameraOpenCloseLock.acquire(); + if (null != mCaptureSession) { + mCaptureSession.close(); + mCaptureSession = null; + } + if (null != mCameraDevice) { + mCameraDevice.close(); + mCameraDevice = null; + } + } catch (InterruptedException e) { + throw new RuntimeException("Interrupted while trying to lock camera closing.", e); + } finally { + mCameraOpenCloseLock.release(); + } + } + + private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { + + @Override + public void onOpened(CameraDevice cameraDevice) { + mCameraDevice = cameraDevice; + mCameraOpenCloseLock.release(); + createCameraPreviewSession(); + } + + @Override + public void onDisconnected(CameraDevice cameraDevice) { + cameraDevice.close(); + mCameraDevice = null; + mCameraOpenCloseLock.release(); + } + + @Override + public void onError(CameraDevice cameraDevice, int error) { + cameraDevice.close(); + mCameraDevice = null; + mCameraOpenCloseLock.release(); + } + + }; + + private void createCameraPreviewSession() { + int w=mPreviewSize.getWidth(), h=mPreviewSize.getHeight(); + Log.i(LOGTAG, "createCameraPreviewSession("+w+"x"+h+")"); + if(w<0 || h<0) + return; + try { + mCameraOpenCloseLock.acquire(); + if (null == mCameraDevice) { + mCameraOpenCloseLock.release(); + Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened"); + return; + } + if (null != mCaptureSession) { + mCameraOpenCloseLock.release(); + Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started"); + return; + } + if(null == mSTexture) { + mCameraOpenCloseLock.release(); + Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null"); + return; + } + mSTexture.setDefaultBufferSize(w, h); + + Surface surface = new Surface(mSTexture); + + mPreviewRequestBuilder = mCameraDevice + .createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + mPreviewRequestBuilder.addTarget(surface); + + mCameraDevice.createCaptureSession(Arrays.asList(surface), + new CameraCaptureSession.StateCallback() { + @Override + public void onConfigured( CameraCaptureSession cameraCaptureSession) { + mCaptureSession = cameraCaptureSession; + try { + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); + + mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler); + Log.i(LOGTAG, "CameraPreviewSession has been started"); + } catch (CameraAccessException e) { + Log.e(LOGTAG, "createCaptureSession failed"); + } + mCameraOpenCloseLock.release(); + } + + @Override + public void onConfigureFailed( + CameraCaptureSession cameraCaptureSession) { + Log.e(LOGTAG, "createCameraPreviewSession failed"); + mCameraOpenCloseLock.release(); + } + }, mBackgroundHandler); + } catch (CameraAccessException e) { + Log.e(LOGTAG, "createCameraPreviewSession"); + } catch (InterruptedException e) { + throw new RuntimeException( + "Interrupted while createCameraPreviewSession", e); + } + finally { + //mCameraOpenCloseLock.release(); + } + } + + private void startBackgroundThread() { + Log.i(LOGTAG, "startBackgroundThread"); + stopBackgroundThread(); + mBackgroundThread = new HandlerThread("CameraBackground"); + mBackgroundThread.start(); + mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); + } + + private void stopBackgroundThread() { + Log.i(LOGTAG, "stopBackgroundThread"); + if(mBackgroundThread == null) + return; + mBackgroundThread.quitSafely(); + try { + mBackgroundThread.join(); + mBackgroundThread = null; + mBackgroundHandler = null; + } catch (InterruptedException e) { + Log.e(LOGTAG, "stopBackgroundThread"); + } + } + + @Override + protected void setCameraPreviewSize(int width, int height) { + Log.i(LOGTAG, "setCameraPreviewSize("+width+"x"+height+")"); + if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth; + if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight; + try { + mCameraOpenCloseLock.acquire(); + + boolean needReconfig = cacPreviewSize(width, height); + mCameraWidth = mPreviewSize.getWidth(); + mCameraHeight = mPreviewSize.getHeight(); + + if( !needReconfig ) { + mCameraOpenCloseLock.release(); + return; + } + if (null != mCaptureSession) { + Log.d(LOGTAG, "closing existing previewSession"); + mCaptureSession.close(); + mCaptureSession = null; + } + mCameraOpenCloseLock.release(); + createCameraPreviewSession(); + } catch (InterruptedException e) { + mCameraOpenCloseLock.release(); + throw new RuntimeException("Interrupted while setCameraPreviewSize.", e); + } + } +} diff --git a/modules/java/generator/src/java/android+CameraGLRendererBase.java b/modules/java/generator/src/java/android+CameraGLRendererBase.java new file mode 100644 index 000000000..3971d0cb4 --- /dev/null +++ b/modules/java/generator/src/java/android+CameraGLRendererBase.java @@ -0,0 +1,424 @@ +package org.opencv.android; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.FloatBuffer; + +import javax.microedition.khronos.egl.EGLConfig; +import javax.microedition.khronos.opengles.GL10; + +import org.opencv.android.CameraGLSurfaceView.CameraTextureListener; + +import android.annotation.TargetApi; +import android.graphics.SurfaceTexture; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.opengl.GLSurfaceView; +import android.util.Log; +import android.view.View; + +@TargetApi(15) +public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener { + + protected final String LOGTAG = "CameraGLRendererBase"; + + // shaders + private final String vss = "" + + "attribute vec2 vPosition;\n" + + "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n" + + "void main() {\n" + " texCoord = vTexCoord;\n" + + " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n" + + "}"; + + private final String fssOES = "" + + "#extension GL_OES_EGL_image_external : require\n" + + "precision mediump float;\n" + + "uniform samplerExternalOES sTexture;\n" + + "varying vec2 texCoord;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; + + private final String fss2D = "" + + "precision mediump float;\n" + + "uniform sampler2D sTexture;\n" + + "varying vec2 texCoord;\n" + + "void main() {\n" + + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; + + // coord-s + private final float vertices[] = { + -1, -1, + -1, 1, + 1, -1, + 1, 1 }; + private final float texCoordOES[] = { + 0, 1, + 0, 0, + 1, 1, + 1, 0 }; + private final float texCoord2D[] = { + 0, 0, + 0, 1, + 1, 0, + 1, 1 }; + + private int[] texCamera = {0}, texFBO = {0}, texDraw = {0}; + private int[] FBO = {0}; + private int progOES = -1, prog2D = -1; + private int vPosOES, vTCOES, vPos2D, vTC2D; + + private FloatBuffer vert, texOES, tex2D; + + protected int mCameraWidth = -1, mCameraHeight = -1; + protected int mFBOWidth = -1, mFBOHeight = -1; + protected int mMaxCameraWidth = -1, mMaxCameraHeight = -1; + protected int mCameraIndex = CameraBridgeViewBase.CAMERA_ID_ANY; + + protected SurfaceTexture mSTexture; + + protected boolean mHaveSurface = false; + protected boolean mHaveFBO = false; + protected boolean mUpdateST = false; + protected boolean mEnabled = true; + protected boolean mIsStarted = false; + + protected CameraGLSurfaceView mView; + + protected abstract void openCamera(int id); + protected abstract void closeCamera(); + protected abstract void setCameraPreviewSize(int width, int height); // updates mCameraWidth & mCameraHeight + + public CameraGLRendererBase(CameraGLSurfaceView view) { + mView = view; + int bytes = vertices.length * Float.SIZE / Byte.SIZE; + vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); + texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); + tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); + vert.put(vertices).position(0); + texOES.put(texCoordOES).position(0); + tex2D.put(texCoord2D).position(0); + } + + @Override + public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) { + //Log.i(LOGTAG, "onFrameAvailable"); + mUpdateST = true; + mView.requestRender(); + } + + @Override + public void onDrawFrame(GL10 gl) { + //Log.i(LOGTAG, "onDrawFrame start"); + + if (!mHaveFBO) + return; + + synchronized(this) { + if (mUpdateST) { + mSTexture.updateTexImage(); + mUpdateST = false; + } + + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + + CameraTextureListener texListener = mView.getCameraTextureListener(); + if(texListener != null) { + //Log.d(LOGTAG, "haveUserCallback"); + // texCamera(OES) -> texFBO + drawTex(texCamera[0], true, FBO[0]); + + // call user code (texFBO -> texDraw) + boolean modified = texListener.onCameraTexture(texFBO[0], texDraw[0], mCameraWidth, mCameraHeight); + + if(modified) { + // texDraw -> screen + drawTex(texDraw[0], false, 0); + } else { + // texFBO -> screen + drawTex(texFBO[0], false, 0); + } + } else { + Log.d(LOGTAG, "texCamera(OES) -> screen"); + // texCamera(OES) -> screen + drawTex(texCamera[0], true, 0); + } + //Log.i(LOGTAG, "onDrawFrame end"); + } + } + + @Override + public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) { + Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")"); + mHaveSurface = true; + updateState(); + setPreviewSize(surfaceWidth, surfaceHeight); + } + + @Override + public void onSurfaceCreated(GL10 gl, EGLConfig config) { + Log.i(LOGTAG, "onSurfaceCreated"); + initShaders(); + } + + private void initShaders() { + String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION); + if (strGLVersion != null) + Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion); + + GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); + + progOES = loadShader(vss, fssOES); + vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition"); + vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord"); + GLES20.glEnableVertexAttribArray(vPosOES); + GLES20.glEnableVertexAttribArray(vTCOES); + + prog2D = loadShader(vss, fss2D); + vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition"); + vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord"); + GLES20.glEnableVertexAttribArray(vPos2D); + GLES20.glEnableVertexAttribArray(vTC2D); + } + + private void initSurfaceTexture() { + Log.d(LOGTAG, "initSurfaceTexture"); + deleteSurfaceTexture(); + initTexOES(texCamera); + mSTexture = new SurfaceTexture(texCamera[0]); + mSTexture.setOnFrameAvailableListener(this); + } + + private void deleteSurfaceTexture() { + Log.d(LOGTAG, "deleteSurfaceTexture"); + if(mSTexture != null) { + mSTexture.release(); + mSTexture = null; + deleteTex(texCamera); + } + } + + private void initTexOES(int[] tex) { + if(tex.length == 1) { + GLES20.glGenTextures(1, tex, 0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); + } + } + + private static void deleteTex(int[] tex) { + if(tex.length == 1) { + GLES20.glDeleteTextures(1, tex, 0); + } + } + + private static int loadShader(String vss, String fss) { + Log.d("CameraGLRendererBase", "loadShader"); + int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); + GLES20.glShaderSource(vshader, vss); + GLES20.glCompileShader(vshader); + int[] compiled = new int[1]; + GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e("CameraGLRendererBase", "Could not compile vertex shader: "+GLES20.glGetShaderInfoLog(vshader)); + GLES20.glDeleteShader(vshader); + vshader = 0; + return 0; + } + + int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); + GLES20.glShaderSource(fshader, fss); + GLES20.glCompileShader(fshader); + GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0); + if (compiled[0] == 0) { + Log.e("CameraGLRendererBase", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader)); + GLES20.glDeleteShader(vshader); + GLES20.glDeleteShader(fshader); + fshader = 0; + return 0; + } + + int program = GLES20.glCreateProgram(); + GLES20.glAttachShader(program, vshader); + GLES20.glAttachShader(program, fshader); + GLES20.glLinkProgram(program); + Log.d("CameraGLRendererBase", "shaders were compiled OK"); + GLES20.glDeleteShader(vshader); + GLES20.glDeleteShader(fshader); + + return program; + } + + private void deleteFBO() + { + Log.d(LOGTAG, "deleteFBO("+mFBOWidth+"x"+mFBOHeight+")"); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); + GLES20.glDeleteFramebuffers(1, FBO, 0); + + deleteTex(texFBO); + deleteTex(texDraw); + mFBOWidth = mFBOHeight = 0; + } + + private void initFBO(int width, int height) + { + Log.d(LOGTAG, "initFBO("+width+"x"+height+")"); + + deleteFBO(); + + GLES20.glGenTextures(1, texDraw, 0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); + + GLES20.glGenTextures(1, texFBO, 0); + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]); + GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); + GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); + + //int hFBO; + GLES20.glGenFramebuffers(1, FBO, 0); + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]); + GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0); + Log.d(LOGTAG, "initFBO error status: " + GLES20.glGetError()); + + int FBOstatus = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER); + if (FBOstatus != GLES20.GL_FRAMEBUFFER_COMPLETE) + Log.e(LOGTAG, "initFBO failed, status: " + FBOstatus); + + mFBOWidth = width; + mFBOHeight = height; + } + + // draw texture to FBO or to screen if fbo == 0 + private void drawTex(int tex, boolean isOES, int fbo) + { + GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo); + + if(fbo == 0) + GLES20.glViewport(0, 0, mView.getWidth(), mView.getHeight()); + else + GLES20.glViewport(0, 0, mFBOWidth, mFBOHeight); + + GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); + + if(isOES) { + GLES20.glUseProgram(progOES); + GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert); + GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES); + } else { + GLES20.glUseProgram(prog2D); + GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert); + GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D); + } + + GLES20.glActiveTexture(GLES20.GL_TEXTURE0); + + if(isOES) { + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex); + GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0); + } else { + GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex); + GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0); + } + + GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); + GLES20.glFlush(); + } + + public synchronized void enableView() { + Log.d(LOGTAG, "enableView"); + mEnabled = true; + updateState(); + } + + public synchronized void disableView() { + Log.d(LOGTAG, "disableView"); + mEnabled = false; + updateState(); + } + + protected void updateState() { + Log.d(LOGTAG, "updateState"); + Log.d(LOGTAG, "mEnabled="+mEnabled+", mHaveSurface="+mHaveSurface); + boolean willStart = mEnabled && mHaveSurface && mView.getVisibility() == View.VISIBLE; + if (willStart != mIsStarted) { + if(willStart) doStart(); + else doStop(); + } else { + Log.d(LOGTAG, "keeping State unchanged"); + } + Log.d(LOGTAG, "updateState end"); + } + + protected synchronized void doStart() { + Log.d(LOGTAG, "doStart"); + initSurfaceTexture(); + openCamera(mCameraIndex); + mIsStarted = true; + if(mCameraWidth>0 && mCameraHeight>0) + setPreviewSize(mCameraWidth, mCameraHeight); // start preview and call listener.onCameraViewStarted() + } + + + protected void doStop() { + Log.d(LOGTAG, "doStop"); + synchronized(this) { + mUpdateST = false; + mIsStarted = false; + mHaveFBO = false; + closeCamera(); + deleteSurfaceTexture(); + } + CameraTextureListener listener = mView.getCameraTextureListener(); + if(listener != null) listener.onCameraViewStopped(); + + } + + protected void setPreviewSize(int width, int height) { + synchronized(this) { + mHaveFBO = false; + mCameraWidth = width; + mCameraHeight = height; + setCameraPreviewSize(width, height); // can change mCameraWidth & mCameraHeight + initFBO(mCameraWidth, mCameraHeight); + mHaveFBO = true; + } + + CameraTextureListener listener = mView.getCameraTextureListener(); + if(listener != null) listener.onCameraViewStarted(mCameraWidth, mCameraHeight); + } + + public void setCameraIndex(int cameraIndex) { + disableView(); + mCameraIndex = cameraIndex; + enableView(); + } + + public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) { + disableView(); + mMaxCameraWidth = maxWidth; + mMaxCameraHeight = maxHeight; + enableView(); + } + + public void onResume() { + Log.i(LOGTAG, "onResume"); + } + + public void onPause() { + Log.i(LOGTAG, "onPause"); + mHaveSurface = false; + updateState(); + mCameraWidth = mCameraHeight = -1; + } + +} diff --git a/modules/java/generator/src/java/android+CameraGLSurfaceView.java b/modules/java/generator/src/java/android+CameraGLSurfaceView.java index 45212081e..05f950b47 100644 --- a/modules/java/generator/src/java/android+CameraGLSurfaceView.java +++ b/modules/java/generator/src/java/android+CameraGLSurfaceView.java @@ -1,18 +1,18 @@ package org.opencv.android; -import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; -import org.opencv.core.Mat; +import org.opencv.R; -import android.app.Activity; import android.content.Context; +import android.content.res.TypedArray; import android.opengl.GLSurfaceView; import android.util.AttributeSet; -import android.view.MotionEvent; +import android.util.Log; import android.view.SurfaceHolder; -import android.widget.TextView; public class CameraGLSurfaceView extends GLSurfaceView { + private static final String LOGTAG = "CameraGLSurfaceView"; + public interface CameraTextureListener { /** * This method is invoked when camera preview has started. After this method is invoked @@ -29,24 +29,33 @@ public class CameraGLSurfaceView extends GLSurfaceView { public void onCameraViewStopped(); /** - * This method is invoked when delivery of the frame needs to be done. - * The returned values - is a modified frame which needs to be displayed on the screen. - * TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc) + * This method is invoked when a new preview frame from Camera is ready. + * @param texIn - the OpenGL texture ID that contains frame in RGBA format + * @param texOut - the OpenGL texture ID that can be used to store modified frame image t display + * @param width - the width of the frame + * @param height - the height of the frame + * @return `true` if `texOut` should be displayed, `false` - to show `texIn` */ - public boolean onCameraFrame(int texIn, int texOut, int width, int height); + public boolean onCameraTexture(int texIn, int texOut, int width, int height); }; private CameraTextureListener mTexListener; - private CameraRenderer mRenderer; + private CameraGLRendererBase mRenderer; public CameraGLSurfaceView(Context context, AttributeSet attrs) { super(context, attrs); - /*if(android.os.Build.VERSION.SDK_INT >= 21) + TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase); + int cameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1); + styledAttrs.recycle(); + + if(android.os.Build.VERSION.SDK_INT >= 21) mRenderer = new Camera2Renderer(this); - else*/ + else mRenderer = new CameraRenderer(this); + setCameraIndex(cameraIndex); + setEGLContextClientVersion(2); setRenderer(mRenderer); setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); @@ -62,6 +71,14 @@ public class CameraGLSurfaceView extends GLSurfaceView { return mTexListener; } + public void setCameraIndex(int cameraIndex) { + mRenderer.setCameraIndex(cameraIndex); + } + + public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) { + mRenderer.setMaxCameraPreviewSize(maxWidth, maxHeight); + } + @Override public void surfaceCreated(SurfaceHolder holder) { super.surfaceCreated(holder); @@ -69,6 +86,7 @@ public class CameraGLSurfaceView extends GLSurfaceView { @Override public void surfaceDestroyed(SurfaceHolder holder) { + mRenderer.mHaveSurface = false; super.surfaceDestroyed(holder); } @@ -79,20 +97,23 @@ public class CameraGLSurfaceView extends GLSurfaceView { @Override public void onResume() { + Log.i(LOGTAG, "onResume"); super.onResume(); mRenderer.onResume(); } @Override public void onPause() { + Log.i(LOGTAG, "onPause"); mRenderer.onPause(); super.onPause(); } - @Override - public boolean onTouchEvent(MotionEvent e) { - if(e.getAction() == MotionEvent.ACTION_DOWN) - ((Activity)getContext()).openOptionsMenu(); - return true; + public void enableView() { + mRenderer.enableView(); + } + + public void disableView() { + mRenderer.disableView(); } } diff --git a/modules/java/generator/src/java/android+CameraRenderer.java b/modules/java/generator/src/java/android+CameraRenderer.java index ae8edda01..2d668ffa6 100644 --- a/modules/java/generator/src/java/android+CameraRenderer.java +++ b/modules/java/generator/src/java/android+CameraRenderer.java @@ -1,197 +1,132 @@ package org.opencv.android; import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.nio.FloatBuffer; -import java.nio.IntBuffer; import java.util.List; -import javax.microedition.khronos.egl.EGLConfig; -import javax.microedition.khronos.opengles.GL10; - -import org.opencv.android.CameraGLSurfaceView.CameraTextureListener; - -import android.opengl.GLES11Ext; -import android.opengl.GLES20; -import android.opengl.GLSurfaceView; -import android.os.Build; -import android.util.Log; import android.annotation.TargetApi; -import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.hardware.Camera.Size; +import android.os.Build; +import android.util.Log; @TargetApi(15) -public class CameraRenderer implements GLSurfaceView.Renderer, - SurfaceTexture.OnFrameAvailableListener { +@SuppressWarnings("deprecation") +public class CameraRenderer extends CameraGLRendererBase { public static final String LOGTAG = "CameraRenderer"; - // shaders - private final String vss = "" - + "attribute vec2 vPosition;\n" - + "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n" - + "void main() {\n" + " texCoord = vTexCoord;\n" - + " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n" - + "}"; - - private final String fssOES = "" - + "#extension GL_OES_EGL_image_external : require\n" - + "precision mediump float;\n" - + "uniform samplerExternalOES sTexture;\n" - + "varying vec2 texCoord;\n" - + "void main() {\n" - + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; - - private final String fss2D = "" - + "precision mediump float;\n" - + "uniform sampler2D sTexture;\n" - + "varying vec2 texCoord;\n" - + "void main() {\n" - + " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}"; - - // coord-s - private final float vertices[] = { - -1, -1, - -1, 1, - 1, -1, - 1, 1 }; - private final float texCoordOES[] = { - 0, 1, - 0, 0, - 1, 1, - 1, 0 }; - private final float texCoord2D[] = { - 0, 0, - 0, 1, - 1, 0, - 1, 1 }; - - private int[] texCamera = {0}, texFBO = {0}, texDraw = {0}; - private int[] FBO = {0}; - private int progOES, prog2D; - private int vPosOES, vTCOES, vPos2D, vTC2D; - - private FloatBuffer vert, texOES, tex2D; - private Camera mCamera; private boolean mPreviewStarted = false; - private int mPreviewWidth, mPreviewHeight; - - private SurfaceTexture mSTexture; - - private boolean mGLInit = false; - private boolean mUpdateST = false; - - private CameraGLSurfaceView mView; CameraRenderer(CameraGLSurfaceView view) { - mView = view; - int bytes = vertices.length * Float.SIZE / Byte.SIZE; - vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); - texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); - tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer(); - vert.put(vertices).position(0); - texOES.put(texCoordOES).position(0); - tex2D.put(texCoord2D).position(0); + super(view); } - public void onResume() { - //nothing - Log.i(LOGTAG, "onResume"); - } - - public void onPause() { - Log.i(LOGTAG, "onPause"); - mGLInit = false; - mUpdateST = false; - + @Override + protected synchronized void closeCamera() { + Log.i(LOGTAG, "closeCamera"); if(mCamera != null) { mCamera.stopPreview(); + mPreviewStarted = false; mCamera.release(); mCamera = null; } - - if(mSTexture != null) { - mSTexture.release(); - mSTexture = null; - deleteTex(texCamera); - } } - public void onSurfaceCreated(GL10 unused, EGLConfig config) { - GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f); + @Override + protected synchronized void openCamera(int id) { + Log.i(LOGTAG, "openCamera"); + closeCamera(); + if (id == CameraBridgeViewBase.CAMERA_ID_ANY) { + Log.d(LOGTAG, "Trying to open camera with old open()"); + try { + mCamera = Camera.open(); + } + catch (Exception e){ + Log.e(LOGTAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage()); + } - progOES = loadShader(vss, fssOES); - vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition"); - vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord"); - GLES20.glEnableVertexAttribArray(vPosOES); - GLES20.glEnableVertexAttribArray(vTCOES); + if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { + boolean connected = false; + for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { + Log.d(LOGTAG, "Trying to open camera with new open(" + camIdx + ")"); + try { + mCamera = Camera.open(camIdx); + connected = true; + } catch (RuntimeException e) { + Log.e(LOGTAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage()); + } + if (connected) break; + } + } + } else { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) { + int localCameraIndex = mCameraIndex; + if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) { + Log.i(LOGTAG, "Trying to open BACK camera"); + Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); + for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { + Camera.getCameraInfo( camIdx, cameraInfo ); + if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { + localCameraIndex = camIdx; + break; + } + } + } else if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) { + Log.i(LOGTAG, "Trying to open FRONT camera"); + Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); + for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) { + Camera.getCameraInfo( camIdx, cameraInfo ); + if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { + localCameraIndex = camIdx; + break; + } + } + } + if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) { + Log.e(LOGTAG, "Back camera not found!"); + } else if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) { + Log.e(LOGTAG, "Front camera not found!"); + } else { + Log.d(LOGTAG, "Trying to open camera with new open(" + localCameraIndex + ")"); + try { + mCamera = Camera.open(localCameraIndex); + } catch (RuntimeException e) { + Log.e(LOGTAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage()); + } + } + } + } + if(mCamera == null) { + Log.e(LOGTAG, "Error: can't open camera"); + return; + } + Camera.Parameters params = mCamera.getParameters(); + List FocusModes = params.getSupportedFocusModes(); + if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) + { + params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + } + mCamera.setParameters(params); - prog2D = loadShader(vss, fss2D); - vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition"); - vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord"); - GLES20.glEnableVertexAttribArray(vPos2D); - GLES20.glEnableVertexAttribArray(vTC2D); - - initTexOES(texCamera); - mSTexture = new SurfaceTexture(texCamera[0]); - mSTexture.setOnFrameAvailableListener(this); - - mCamera = Camera.open(); try { mCamera.setPreviewTexture(mSTexture); } catch (IOException ioe) { - } - - mGLInit = true; - } - - public void onDrawFrame(GL10 unused) { - if (!mGLInit) - return; - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - - synchronized (this) { - if (mUpdateST) { - mSTexture.updateTexImage(); - mUpdateST = false; - } - } - - CameraTextureListener texListener = mView.getCameraTextureListener(); - if(texListener != null) { - Log.d(LOGTAG, "haveUserCallback"); - // texCamera(OES) -> texFBO - drawTex(texCamera[0], true, FBO[0]); - - // call user code (texFBO -> texDraw) - boolean modified = texListener.onCameraFrame(texFBO[0], texDraw[0], mPreviewWidth, mPreviewHeight); - - if(modified) { - // texDraw -> screen - drawTex(texDraw[0], false, 0); - } else { - // texFBO -> screen - drawTex(texFBO[0], false, 0); - } - } else { - // texCamera(OES) -> screen - drawTex(texCamera[0], true, 0); + Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage()); } } - public void onSurfaceChanged(GL10 unused, int width, int height) { - Log.i(LOGTAG, "onSurfaceChanged("+width+"x"+height+")"); - - if(mCamera == null) + @Override + public synchronized void setCameraPreviewSize(int width, int height) { + Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height); + if(mCamera == null) { + Log.e(LOGTAG, "Camera isn't initialized!"); return; - if(mPreviewStarted) { - mCamera.stopPreview(); - mPreviewStarted = false; } + if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth; + if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight; + Camera.Parameters param = mCamera.getParameters(); List psize = param.getSupportedPreviewSizes(); int bestWidth = 0, bestHeight = 0; @@ -207,147 +142,25 @@ public class CameraRenderer implements GLSurfaceView.Renderer, bestHeight = h; } } - if(bestWidth > 0 && bestHeight > 0) { - param.setPreviewSize(bestWidth, bestHeight); - Log.i(LOGTAG, "selected size: "+bestWidth+" x "+bestHeight); - - GLES20.glViewport(0, 0, bestWidth, bestWidth); - initFBO(bestWidth, bestHeight); - mPreviewWidth = bestWidth; - mPreviewHeight = bestHeight; + if(bestWidth <= 0 || bestHeight <= 0) { + bestWidth = psize.get(0).width; + bestHeight = psize.get(0).height; + Log.e(LOGTAG, "Error: best size was not selected, using "+bestWidth+" x "+bestHeight); + } else { + Log.i(LOGTAG, "Selected best size: "+bestWidth+" x "+bestHeight); } + + if(mPreviewStarted) { + mCamera.stopPreview(); + mPreviewStarted = false; + } + mCameraWidth = bestWidth; + mCameraHeight = bestHeight; + param.setPreviewSize(bestWidth, bestHeight); } - //param.set("orientation", "landscape"); + param.set("orientation", "landscape"); mCamera.setParameters(param); mCamera.startPreview(); mPreviewStarted = true; } - - public synchronized void onFrameAvailable(SurfaceTexture st) { - mUpdateST = true; - mView.requestRender(); - } - - private void initTexOES(int[] tex) { - if(tex.length == 1) { - GLES20.glGenTextures(1, tex, 0); - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); - GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); - } - } - - private void deleteTex(int[] tex) { - if(tex.length == 1) { - GLES20.glDeleteTextures(1, tex, 0); - } - } - - private static int loadShader(String vss, String fss) { - int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); - GLES20.glShaderSource(vshader, vss); - GLES20.glCompileShader(vshader); - int[] compiled = new int[1]; - GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0); - if (compiled[0] == 0) { - Log.e(LOGTAG, "Could not compile vertex shader"); - Log.v(LOGTAG, "Could not compile vertex shader:"+GLES20.glGetShaderInfoLog(vshader)); - GLES20.glDeleteShader(vshader); - vshader = 0; - } - - int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); - GLES20.glShaderSource(fshader, fss); - GLES20.glCompileShader(fshader); - GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0); - if (compiled[0] == 0) { - Log.e("Renderer", "Could not compile fragment shader"); - Log.v("Renderer", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader)); - GLES20.glDeleteShader(fshader); - fshader = 0; - } - - int program = GLES20.glCreateProgram(); - GLES20.glAttachShader(program, vshader); - GLES20.glAttachShader(program, fshader); - GLES20.glLinkProgram(program); - - return program; - } - - private void releaseFBO() - { - - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); - GLES20.glDeleteFramebuffers(1, FBO, 0); - - deleteTex(texFBO); - deleteTex(texDraw); - } - - private void initFBO(int width, int height) - { - Log.d(LOGTAG, "initFBO("+width+"x"+height+")"); - releaseFBO(); - - GLES20.glGenTextures(1, texDraw, 0); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]); - GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); - - GLES20.glGenTextures(1, texFBO, 0); - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]); - GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); - GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); - - //int hFBO; - GLES20.glGenFramebuffers(1, FBO, 0); - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]); - GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0); - Log.d(LOGTAG, "initFBO status: " + GLES20.glGetError()); - - if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) - Log.e(LOGTAG, "initFBO failed: " + GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER)); - - //GLES20.glViewport(0, 0, width, height); - } - - // draw texture to FBO or to screen if fbo == 0 - private void drawTex(int tex, boolean isOES, int fbo) - { - GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo); - GLES20.glViewport(0, 0, mPreviewWidth, mPreviewHeight); - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - - if(isOES) { - GLES20.glUseProgram(progOES); - GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert); - GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES); - } else { - GLES20.glUseProgram(prog2D); - GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert); - GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D); - } - - GLES20.glActiveTexture(GLES20.GL_TEXTURE0); - - if(isOES) { - GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex); - GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0); - } else { - GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex); - GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0); - } - - GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); - GLES20.glFlush(); - } } \ No newline at end of file From a0411054919432b30e70aadba3bc647654ca570d Mon Sep 17 00:00:00 2001 From: Andrey Pavlenko Date: Wed, 7 Oct 2015 18:59:43 +0300 Subject: [PATCH 3/5] refactoring Tutorial-4 using new OpenCV CameraGLSurfaceView --- samples/android/tutorial-4-opencl/.cproject | 4 +- .../android/tutorial-4-opencl/jni/Android.mk | 4 +- .../tutorial-4-opencl/jni/CLprocessor.cpp | 76 +++- .../tutorial-4-opencl/jni/GLrender.cpp | 375 ------------------ .../android/tutorial-4-opencl/jni/common.hpp | 2 +- samples/android/tutorial-4-opencl/jni/jni.c | 30 +- .../samples/tutorial4/Camera2Renderer.java | 281 ------------- .../samples/tutorial4/CameraRenderer.java | 75 ---- .../samples/tutorial4/MyGLRendererBase.java | 117 ------ .../samples/tutorial4/MyGLSurfaceView.java | 133 +++++-- ...{NativeGLRenderer.java => NativePart.java} | 12 +- .../samples/tutorial4/Tutorial4Activity.java | 13 +- 12 files changed, 188 insertions(+), 934 deletions(-) delete mode 100644 samples/android/tutorial-4-opencl/jni/GLrender.cpp delete mode 100644 samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Camera2Renderer.java delete mode 100644 samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/CameraRenderer.java delete mode 100644 samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLRendererBase.java rename samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/{NativeGLRenderer.java => NativePart.java} (51%) diff --git a/samples/android/tutorial-4-opencl/.cproject b/samples/android/tutorial-4-opencl/.cproject index 9f3b5fd84..90fc6c97f 100644 --- a/samples/android/tutorial-4-opencl/.cproject +++ b/samples/android/tutorial-4-opencl/.cproject @@ -5,6 +5,7 @@ + @@ -12,7 +13,6 @@ - @@ -28,7 +28,7 @@ diff --git a/samples/android/tutorial-4-opencl/jni/Android.mk b/samples/android/tutorial-4-opencl/jni/Android.mk index fa44bf8c5..dacd0f665 100644 --- a/samples/android/tutorial-4-opencl/jni/Android.mk +++ b/samples/android/tutorial-4-opencl/jni/Android.mk @@ -21,7 +21,7 @@ endif LOCAL_C_INCLUDES += $(OPENCL_SDK)/include LOCAL_LDLIBS += -L$(OPENCL_SDK)/lib/$(TARGET_ARCH_ABI) -lOpenCL -LOCAL_MODULE := JNIrender -LOCAL_SRC_FILES := jni.c GLrender.cpp CLprocessor.cpp +LOCAL_MODULE := JNIpart +LOCAL_SRC_FILES := jni.c CLprocessor.cpp LOCAL_LDLIBS += -llog -lGLESv2 -lEGL include $(BUILD_SHARED_LIBRARY) \ No newline at end of file diff --git a/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp b/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp index a46cac424..b7bf530b9 100644 --- a/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp +++ b/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp @@ -2,6 +2,7 @@ #define CL_USE_DEPRECATED_OPENCL_1_1_APIS /*let's give a chance for OpenCL 1.1 devices*/ #include +#include #include #include @@ -82,7 +83,7 @@ cl::CommandQueue theQueue; cl::Program theProgB2B, theProgI2B, theProgI2I; bool haveOpenCL = false; -void initCL() +extern "C" void initCL() { dumpCLinfo(); @@ -144,14 +145,19 @@ void initCL() LOGD("initCL completed"); } -void closeCL() +extern "C" void closeCL() { } #define GL_TEXTURE_2D 0x0DE1 void procOCL_I2I(int texIn, int texOut, int w, int h) { - if(!haveOpenCL) return; + LOGD("Processing OpenCL Direct (image2d)"); + if(!haveOpenCL) + { + LOGE("OpenCL isn't initialized"); + return; + } LOGD("procOCL_I2I(%d, %d, %d, %d)", texIn, texOut, w, h); cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn); @@ -185,7 +191,12 @@ void procOCL_I2I(int texIn, int texOut, int w, int h) void procOCL_OCV(int texIn, int texOut, int w, int h) { - if(!haveOpenCL) return; + LOGD("Processing OpenCL via OpenCV"); + if(!haveOpenCL) + { + LOGE("OpenCL isn't initialized"); + return; + } int64_t t = getTimeMs(); cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn); @@ -219,3 +230,60 @@ void procOCL_OCV(int texIn, int texOut, int w, int h) cv::ocl::finish(); LOGD("uploading results to texture costs %d ms", getTimeInterval(t)); } + +void drawFrameProcCPU(int w, int h, int texOut) +{ + LOGD("Processing on CPU"); + int64_t t; + + // let's modify pixels in FBO texture in C++ code (on CPU) + const int BUFF_SIZE = 1<<24;//2k*2k*4; + static char tmpBuff[BUFF_SIZE]; + if(w*h > BUFF_SIZE) + { + LOGE("Internal temp buffer is too small, can't make CPU frame processing"); + return; + } + + // read + t = getTimeMs(); + // expecting FBO to be bound + glReadPixels(0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); + LOGD("glReadPixels() costs %d ms", getTimeInterval(t)); + + // modify + t = getTimeMs(); + cv::Mat m(h, w, CV_8UC4, tmpBuff); + cv::Laplacian(m, m, CV_8U); + m *= 10; + LOGD("Laplacian() costs %d ms", getTimeInterval(t)); + + // write back + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, texOut); + t = getTimeMs(); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); + LOGD("glTexSubImage2D() costs %d ms", getTimeInterval(t)); +} + + +enum ProcMode {PROC_MODE_NO_PROC=0, PROC_MODE_CPU=1, PROC_MODE_OCL_DIRECT=2, PROC_MODE_OCL_OCV=3}; + +extern "C" void processFrame(int tex1, int tex2, int w, int h, int mode) +{ + switch(mode) + { + //case PROC_MODE_NO_PROC: + case PROC_MODE_CPU: + drawFrameProcCPU(w, h, tex2); + break; + case PROC_MODE_OCL_DIRECT: + procOCL_I2I(tex1, tex2, w, h); + break; + case PROC_MODE_OCL_OCV: + procOCL_OCV(tex1, tex2, w, h); + break; + default: + LOGE("Unexpected processing mode: %d", mode); + } +} diff --git a/samples/android/tutorial-4-opencl/jni/GLrender.cpp b/samples/android/tutorial-4-opencl/jni/GLrender.cpp deleted file mode 100644 index 5c2862026..000000000 --- a/samples/android/tutorial-4-opencl/jni/GLrender.cpp +++ /dev/null @@ -1,375 +0,0 @@ -#include -#include - -#include -#include - -#include "common.hpp" - -float vertices[] = { - -1.0f, -1.0f, - -1.0f, 1.0f, - 1.0f, -1.0f, - 1.0f, 1.0f -}; -float texCoordOES[] = { - 0.0f, 1.0f, - 0.0f, 0.0f, - 1.0f, 1.0f, - 1.0f, 0.0f -}; -float texCoord2D[] = { - 0.0f, 0.0f, - 0.0f, 1.0f, - 1.0f, 0.0f, - 1.0f, 1.0f -}; - -const char vss[] = \ - "attribute vec2 vPosition;\n" \ - "attribute vec2 vTexCoord;\n" \ - "varying vec2 texCoord;\n" \ - "void main() {\n" \ - " texCoord = vTexCoord;\n" \ - " gl_Position = vec4 ( vPosition, 0.0, 1.0 );\n" \ - "}"; - -const char fssOES[] = \ - "#extension GL_OES_EGL_image_external : require\n" \ - "precision mediump float;\n" \ - "uniform samplerExternalOES sTexture;\n" \ - "varying vec2 texCoord;\n" \ - "void main() {\n" \ - " gl_FragColor = texture2D(sTexture,texCoord);\n" \ - "}"; - -const char fss2D[] = \ - "precision mediump float;\n" \ - "uniform sampler2D sTexture;\n" \ - "varying vec2 texCoord;\n" \ - "void main() {\n" \ - " gl_FragColor = texture2D(sTexture,texCoord);\n" \ - "}"; - -GLuint progOES = 0; -GLuint prog2D = 0; - -GLint vPosOES, vTCOES; -GLint vPos2D, vTC2D; - -GLuint FBOtex = 0, FBOtex2 = 0; -GLuint FBO = 0; - -GLuint texOES = 0; -int texWidth = 0, texHeight = 0; - -enum ProcMode {PROC_MODE_NO_PROC=0, PROC_MODE_CPU=1, PROC_MODE_OCL_DIRECT=2, PROC_MODE_OCL_OCV=3}; - -ProcMode procMode = PROC_MODE_NO_PROC; - -static inline void deleteTex(GLuint* tex) -{ - if(tex && *tex) - { - glDeleteTextures(1, tex); - *tex = 0; - } -} - -static void releaseFBO() -{ - if (FBO != 0) - { - glBindFramebuffer(GL_FRAMEBUFFER, 0); - glDeleteFramebuffers(1, &FBO); - FBO = 0; - } - deleteTex(&FBOtex); - deleteTex(&FBOtex2); - glDeleteProgram(prog2D); - prog2D = 0; -} - -static inline void logShaderCompileError(GLuint shader, bool isProgram = false) -{ - GLchar msg[512]; - msg[0] = 0; - GLsizei len; - if(isProgram) - glGetProgramInfoLog(shader, sizeof(msg)-1, &len, msg); - else - glGetShaderInfoLog(shader, sizeof(msg)-1, &len, msg); - LOGE("Could not compile shader/program: %s", msg); -} - -static int makeShaderProg(const char* vss, const char* fss) -{ - LOGD("makeShaderProg: setup GL_VERTEX_SHADER"); - GLuint vshader = glCreateShader(GL_VERTEX_SHADER); - const GLchar* text = vss; - glShaderSource(vshader, 1, &text, 0); - glCompileShader(vshader); - GLint compiled; - glGetShaderiv(vshader, GL_COMPILE_STATUS, &compiled); - if (!compiled) { - logShaderCompileError(vshader); - glDeleteShader(vshader); - vshader = 0; - } - - LOGD("makeShaderProg: setup GL_FRAGMENT_SHADER"); - GLuint fshader = glCreateShader(GL_FRAGMENT_SHADER); - text = fss; - glShaderSource(fshader, 1, &text, 0); - glCompileShader(fshader); - glGetShaderiv(fshader, GL_COMPILE_STATUS, &compiled); - if (!compiled) { - logShaderCompileError(fshader); - glDeleteShader(fshader); - fshader = 0; - } - - LOGD("makeShaderProg: glCreateProgram"); - GLuint program = glCreateProgram(); - glAttachShader(program, vshader); - glAttachShader(program, fshader); - glLinkProgram(program); - GLint linked; - glGetProgramiv(program, GL_LINK_STATUS, &linked); - if (!linked) - { - logShaderCompileError(program, true); - glDeleteProgram(program); - program = 0; - } - glValidateProgram(program); - GLint validated; - glGetProgramiv(program, GL_VALIDATE_STATUS, &validated); - if (!validated) - { - logShaderCompileError(program, true); - glDeleteProgram(program); - program = 0; - } - - if(vshader) glDeleteShader(vshader); - if(fshader) glDeleteShader(fshader); - - return program; -} - - -static void initFBO(int width, int height) -{ - LOGD("initFBO(%d, %d)", width, height); - releaseFBO(); - - glGenTextures(1, &FBOtex2); - glBindTexture(GL_TEXTURE_2D, FBOtex2); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); - - glGenTextures(1, &FBOtex); - glBindTexture(GL_TEXTURE_2D, FBOtex); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); - - //int hFBO; - glGenFramebuffers(1, &FBO); - glBindFramebuffer(GL_FRAMEBUFFER, FBO); - glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, FBOtex, 0); - LOGD("initFBO status: %d", glGetError()); - - if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) - LOGE("initFBO failed: %d", glCheckFramebufferStatus(GL_FRAMEBUFFER)); - - prog2D = makeShaderProg(vss, fss2D); - vPos2D = glGetAttribLocation(prog2D, "vPosition"); - vTC2D = glGetAttribLocation(prog2D, "vTexCoord"); - glEnableVertexAttribArray(vPos2D); - glEnableVertexAttribArray(vTC2D); -} - -void drawTex(int tex, GLenum texType, GLuint fbo) -{ - int64_t t = getTimeMs(); - //draw texture to FBO or to screen - glBindFramebuffer(GL_FRAMEBUFFER, fbo); - glViewport(0, 0, texWidth, texHeight); - - glClear(GL_COLOR_BUFFER_BIT); - - GLuint prog = texType == GL_TEXTURE_EXTERNAL_OES ? progOES : prog2D; - GLint vPos = texType == GL_TEXTURE_EXTERNAL_OES ? vPosOES : vPos2D; - GLint vTC = texType == GL_TEXTURE_EXTERNAL_OES ? vTCOES : vTC2D; - float* texCoord = texType == GL_TEXTURE_EXTERNAL_OES ? texCoordOES : texCoord2D; - glUseProgram(prog); - glVertexAttribPointer(vPos, 2, GL_FLOAT, false, 4*2, vertices); - glVertexAttribPointer(vTC, 2, GL_FLOAT, false, 4*2, texCoord); - - glActiveTexture(GL_TEXTURE0); - glBindTexture(texType, tex); - glUniform1i(glGetUniformLocation(prog, "sTexture"), 0); - - glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); - glFlush(); - LOGD("drawTex(%u) costs %d ms", tex, getTimeInterval(t)); -} - -void drawFrameOrig() -{ - drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, 0); -} - -void procCPU(char* buff, int w, int h) -{ - int64_t t = getTimeMs(); - cv::Mat m(h, w, CV_8UC4, buff); - cv::Laplacian(m, m, CV_8U); - m *= 10; - LOGD("procCPU() costs %d ms", getTimeInterval(t)); -} - -void drawFrameProcCPU() -{ - int64_t t; - drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, FBO); - - // let's modify pixels in FBO texture in C++ code (on CPU) - const int BUFF_SIZE = 1<<24;//2k*2k*4; - static char tmpBuff[BUFF_SIZE]; - if(texWidth*texHeight > BUFF_SIZE) - { - LOGE("Internal temp buffer is too small, can't make CPU frame processing"); - return; - } - - // read - t = getTimeMs(); - glReadPixels(0, 0, texWidth, texHeight, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); - LOGD("glReadPixels() costs %d ms", getTimeInterval(t)); - - // modify - procCPU(tmpBuff, texWidth, texHeight); - - // write back - t = getTimeMs(); - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, texWidth, texHeight, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); - LOGD("glTexSubImage2D() costs %d ms", getTimeInterval(t)); - - // render to screen - drawTex(FBOtex, GL_TEXTURE_2D, 0); -} - -void procOCL_I2I(int texIn, int texOut, int w, int h); -void procOCL_OCV(int texIn, int texOut, int w, int h); -void drawFrameProcOCL() -{ - drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, FBO); - - // modify pixels in FBO texture using OpenCL and CL-GL interop - procOCL_I2I(FBOtex, FBOtex2, texWidth, texHeight); - - // render to screen - drawTex(FBOtex2, GL_TEXTURE_2D, 0); -} - -void drawFrameProcOCLOCV() -{ - drawTex(texOES, GL_TEXTURE_EXTERNAL_OES, FBO); - - // modify pixels in FBO texture using OpenCL and CL-GL interop - procOCL_OCV(FBOtex, FBOtex2, texWidth, texHeight); - - // render to screen - drawTex(FBOtex2, GL_TEXTURE_2D, 0); -} - -extern "C" void drawFrame() -{ - LOGD("*** drawFrame() ***"); - int64_t t = getTimeMs(); - - switch(procMode) - { - case PROC_MODE_NO_PROC: drawFrameOrig(); break; - case PROC_MODE_CPU: drawFrameProcCPU(); break; - case PROC_MODE_OCL_DIRECT: drawFrameProcOCL(); break; - case PROC_MODE_OCL_OCV: drawFrameProcOCLOCV(); break; - default: drawFrameOrig(); - } - - glFinish(); - LOGD("*** drawFrame() costs %d ms ***", getTimeInterval(t)); -} - -void closeCL(); -extern "C" void closeGL() -{ - closeCL(); - LOGD("closeGL"); - deleteTex(&texOES); - - glUseProgram(0); - glDeleteProgram(progOES); - progOES = 0; - - releaseFBO(); -} - -void initCL(); -extern "C" int initGL() -{ - LOGD("initGL"); - - closeGL(); - - const char* vs = (const char*)glGetString(GL_VERSION); - LOGD("GL_VERSION = %s", vs); - - progOES = makeShaderProg(vss, fssOES); - vPosOES = glGetAttribLocation(progOES, "vPosition"); - vTCOES = glGetAttribLocation(progOES, "vTexCoord"); - glEnableVertexAttribArray(vPosOES); - glEnableVertexAttribArray(vTCOES); - - glClearColor(1.0f, 1.0f, 1.0f, 1.0f); - - texOES = 0; - glGenTextures(1, &texOES); - glBindTexture(GL_TEXTURE_EXTERNAL_OES, texOES); - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); - - initCL(); - - return texOES; -} - -extern "C" void changeSize(int width, int height) -{ - const int MAX_W=1<<11, MAX_H=1<<11; - LOGD("changeSize: %dx%d", width, height); - texWidth = width <= MAX_W ? width : MAX_W; - texHeight = height <= MAX_H ? height : MAX_H; - initFBO(texWidth, texHeight); -} - -extern "C" void setProcessingMode(int mode) -{ - switch(mode) - { - case PROC_MODE_NO_PROC: procMode = PROC_MODE_NO_PROC; break; - case PROC_MODE_CPU: procMode = PROC_MODE_CPU; break; - case PROC_MODE_OCL_DIRECT: procMode = PROC_MODE_OCL_DIRECT; break; - case PROC_MODE_OCL_OCV: procMode = PROC_MODE_OCL_OCV; break; - } -} diff --git a/samples/android/tutorial-4-opencl/jni/common.hpp b/samples/android/tutorial-4-opencl/jni/common.hpp index 20b882a9f..2923803f2 100644 --- a/samples/android/tutorial-4-opencl/jni/common.hpp +++ b/samples/android/tutorial-4-opencl/jni/common.hpp @@ -1,5 +1,5 @@ #include -#define LOG_TAG "JNIRenderer" +#define LOG_TAG "JNIpart" //#define LOGD(...) #define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)) #define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)) diff --git a/samples/android/tutorial-4-opencl/jni/jni.c b/samples/android/tutorial-4-opencl/jni/jni.c index 7be35a000..0c48ab606 100644 --- a/samples/android/tutorial-4-opencl/jni/jni.c +++ b/samples/android/tutorial-4-opencl/jni/jni.c @@ -1,32 +1,20 @@ #include -int initGL(); -void closeGL(); -void changeSize(int width, int height); -void drawFrame(); -void setProcessingMode(int mode); +int initCL(); +void closeCL(); +void processFrame(int tex1, int tex2, int w, int h, int mode); -JNIEXPORT jint JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_initGL(JNIEnv * env, jclass cls) +JNIEXPORT jint JNICALL Java_org_opencv_samples_tutorial4_NativePart_initCL(JNIEnv * env, jclass cls) { - return initGL(); + return initCL(); } -JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_closeGL(JNIEnv * env, jclass cls) +JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativePart_closeCL(JNIEnv * env, jclass cls) { - closeGL(); + closeCL(); } -JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_changeSize(JNIEnv * env, jclass cls, jint width, jint height) +JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativePart_processFrame(JNIEnv * env, jclass cls, jint tex1, jint tex2, jint w, jint h, jint mode) { - changeSize(width, height); -} - -JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_drawFrame(JNIEnv * env, jclass cls) -{ - drawFrame(); -} - -JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_setProcessingMode(JNIEnv * env, jclass cls, jint mode) -{ - setProcessingMode(mode); + processFrame(tex1, tex2, w, h, mode); } diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Camera2Renderer.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Camera2Renderer.java deleted file mode 100644 index 217268a78..000000000 --- a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Camera2Renderer.java +++ /dev/null @@ -1,281 +0,0 @@ -package org.opencv.samples.tutorial4; - -import java.util.Arrays; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; - -import android.annotation.SuppressLint; -import android.content.Context; -import android.graphics.SurfaceTexture; -import android.hardware.camera2.CameraAccessException; -import android.hardware.camera2.CameraCaptureSession; -import android.hardware.camera2.CameraCharacteristics; -import android.hardware.camera2.CameraDevice; -import android.hardware.camera2.CameraManager; -import android.hardware.camera2.CaptureRequest; -import android.hardware.camera2.params.StreamConfigurationMap; -import android.os.Handler; -import android.os.HandlerThread; -import android.util.Log; -import android.util.Size; -import android.view.Surface; - -@SuppressLint("NewApi") public class Camera2Renderer extends MyGLRendererBase { - - protected final String LOGTAG = "Camera2Renderer"; - private CameraDevice mCameraDevice; - private CameraCaptureSession mCaptureSession; - private CaptureRequest.Builder mPreviewRequestBuilder; - private String mCameraID; - private Size mPreviewSize = new Size(1280, 720); - - private HandlerThread mBackgroundThread; - private Handler mBackgroundHandler; - private Semaphore mCameraOpenCloseLock = new Semaphore(1); - - Camera2Renderer(MyGLSurfaceView view) { - super(view); - } - - public void onResume() { - stopBackgroundThread(); - super.onResume(); - startBackgroundThread(); - } - - public void onPause() { - super.onPause(); - stopBackgroundThread(); - } - - boolean cacPreviewSize(final int width, final int height) { - Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height); - if(mCameraID == null) - return false; - CameraManager manager = (CameraManager) mView.getContext() - .getSystemService(Context.CAMERA_SERVICE); - try { - CameraCharacteristics characteristics = manager - .getCameraCharacteristics(mCameraID); - StreamConfigurationMap map = characteristics - .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); - int bestWidth = 0, bestHeight = 0; - float aspect = (float)width / height; - for (Size psize : map.getOutputSizes(SurfaceTexture.class)) { - int w = psize.getWidth(), h = psize.getHeight(); - Log.d(LOGTAG, "trying size: "+w+"x"+h); - if ( width >= w && height >= h && - bestWidth <= w && bestHeight <= h && - Math.abs(aspect - (float)w/h) < 0.2 ) { - bestWidth = w; - bestHeight = h; - } - } - Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight); - if( mPreviewSize.getWidth() == bestWidth && - mPreviewSize.getHeight() == bestHeight ) - return false; - else { - mPreviewSize = new Size(bestWidth, bestHeight); - return true; - } - } catch (CameraAccessException e) { - Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception"); - } catch (IllegalArgumentException e) { - Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception"); - } catch (SecurityException e) { - Log.e(LOGTAG, "cacPreviewSize - Security Exception"); - } - return false; - } - - protected void openCamera() { - Log.i(LOGTAG, "openCamera"); - //closeCamera(); - CameraManager manager = (CameraManager) mView.getContext() - .getSystemService(Context.CAMERA_SERVICE); - try { - for (String cameraID : manager.getCameraIdList()) { - CameraCharacteristics characteristics = manager - .getCameraCharacteristics(cameraID); - if (characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) - continue; - - mCameraID = cameraID; - break; - } - if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { - throw new RuntimeException( - "Time out waiting to lock camera opening."); - } - manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler); - } catch (CameraAccessException e) { - Log.e(LOGTAG, "OpenCamera - Camera Access Exception"); - } catch (IllegalArgumentException e) { - Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception"); - } catch (SecurityException e) { - Log.e(LOGTAG, "OpenCamera - Security Exception"); - } catch (InterruptedException e) { - Log.e(LOGTAG, "OpenCamera - Interrupted Exception"); - } - } - - protected void closeCamera() { - Log.i(LOGTAG, "closeCamera"); - try { - mCameraOpenCloseLock.acquire(); - if (null != mCaptureSession) { - mCaptureSession.close(); - mCaptureSession = null; - } - if (null != mCameraDevice) { - mCameraDevice.close(); - mCameraDevice = null; - } - } catch (InterruptedException e) { - throw new RuntimeException( - "Interrupted while trying to lock camera closing.", e); - } finally { - mCameraOpenCloseLock.release(); - } - } - - private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() { - - @Override - public void onOpened(CameraDevice cameraDevice) { - mCameraDevice = cameraDevice; - mCameraOpenCloseLock.release(); - createCameraPreviewSession(); - } - - @Override - public void onDisconnected(CameraDevice cameraDevice) { - cameraDevice.close(); - mCameraDevice = null; - mCameraOpenCloseLock.release(); - } - - @Override - public void onError(CameraDevice cameraDevice, int error) { - cameraDevice.close(); - mCameraDevice = null; - mCameraOpenCloseLock.release(); - } - - }; - - private void createCameraPreviewSession() { - Log.i(LOGTAG, "createCameraPreviewSession"); - try { - mCameraOpenCloseLock.acquire(); - if (null == mCameraDevice) { - mCameraOpenCloseLock.release(); - Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened"); - return; - } - if (null != mCaptureSession) { - mCameraOpenCloseLock.release(); - Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started"); - return; - } - if(null == mSTex) { - mCameraOpenCloseLock.release(); - Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null"); - return; - } - Log.d(LOGTAG, "starting preview "+mPreviewSize.getWidth()+"x"+mPreviewSize.getHeight()); - mSTex.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); - - Surface surface = new Surface(mSTex); - - mPreviewRequestBuilder = mCameraDevice - .createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); - mPreviewRequestBuilder.addTarget(surface); - - mCameraDevice.createCaptureSession(Arrays.asList(surface), - new CameraCaptureSession.StateCallback() { - @Override - public void onConfigured( - CameraCaptureSession cameraCaptureSession) { - mCaptureSession = cameraCaptureSession; - try { - mPreviewRequestBuilder - .set(CaptureRequest.CONTROL_AF_MODE, - CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); - mPreviewRequestBuilder - .set(CaptureRequest.CONTROL_AE_MODE, - CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); - - mCaptureSession.setRepeatingRequest( - mPreviewRequestBuilder.build(), null, - mBackgroundHandler); - Log.i(LOGTAG, "CameraPreviewSession has been started"); - } catch (CameraAccessException e) { - Log.e(LOGTAG, "createCaptureSession failed"); - } - mCameraOpenCloseLock.release(); - } - - @Override - public void onConfigureFailed( - CameraCaptureSession cameraCaptureSession) { - Log.e(LOGTAG, "createCameraPreviewSession failed"); - mCameraOpenCloseLock.release(); - } - }, mBackgroundHandler); - } catch (CameraAccessException e) { - Log.e(LOGTAG, "createCameraPreviewSession"); - } catch (InterruptedException e) { - throw new RuntimeException( - "Interrupted while createCameraPreviewSession", e); - } - finally { - //mCameraOpenCloseLock.release(); - } - } - - private void startBackgroundThread() { - Log.i(LOGTAG, "startBackgroundThread"); - mBackgroundThread = new HandlerThread("CameraBackground"); - mBackgroundThread.start(); - mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); - } - - private void stopBackgroundThread() { - Log.i(LOGTAG, "stopBackgroundThread"); - if(mBackgroundThread == null) - return; - mBackgroundThread.quitSafely(); - try { - mBackgroundThread.join(); - mBackgroundThread = null; - mBackgroundHandler = null; - } catch (InterruptedException e) { - Log.e(LOGTAG, "stopBackgroundThread"); - } - } - - @Override - protected void setCameraPreviewSize(int width, int height) { - Log.i(LOGTAG, "setCameraPreviewSize("+width+"x"+height+")"); - try { - mCameraOpenCloseLock.acquire(); - if( !cacPreviewSize(width, height) ) { - mCameraOpenCloseLock.release(); - return; - } - if (null != mCaptureSession) { - Log.d(LOGTAG, "closing existing previewSession"); - mCaptureSession.close(); - mCaptureSession = null; - } - mCameraOpenCloseLock.release(); - createCameraPreviewSession(); - } catch (InterruptedException e) { - mCameraOpenCloseLock.release(); - throw new RuntimeException( - "Interrupted while setCameraPreviewSize.", e); - } - } -} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/CameraRenderer.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/CameraRenderer.java deleted file mode 100644 index 692ab9884..000000000 --- a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/CameraRenderer.java +++ /dev/null @@ -1,75 +0,0 @@ -package org.opencv.samples.tutorial4; - -import java.io.IOException; -import java.util.List; - -import android.hardware.Camera; -import android.hardware.Camera.Size; -import android.util.Log; - -@SuppressWarnings("deprecation") -public class CameraRenderer extends MyGLRendererBase { - - protected final String LOGTAG = "CameraRenderer"; - private Camera mCamera; - boolean mPreviewStarted = false; - - CameraRenderer(MyGLSurfaceView view) { - super(view); - } - - protected void closeCamera() { - Log.i(LOGTAG, "closeCamera"); - if(mCamera != null) { - mCamera.stopPreview(); - mPreviewStarted = false; - mCamera.release(); - mCamera = null; - } - } - - protected void openCamera() { - Log.i(LOGTAG, "openCamera"); - closeCamera(); - mCamera = Camera.open(); - try { - mCamera.setPreviewTexture(mSTex); - } catch (IOException ioe) { - Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage()); - } - } - - public void setCameraPreviewSize(int width, int height) { - Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height); - if(mCamera == null) - return; - if(mPreviewStarted) { - mCamera.stopPreview(); - mPreviewStarted = false; - } - Camera.Parameters param = mCamera.getParameters(); - List psize = param.getSupportedPreviewSizes(); - int bestWidth = 0, bestHeight = 0; - if (psize.size() > 0) { - float aspect = (float)width / height; - for (Size size : psize) { - int w = size.width, h = size.height; - Log.d("Renderer", "checking camera preview size: "+w+"x"+h); - if ( w <= width && h <= height && - w >= bestWidth && h >= bestHeight && - Math.abs(aspect - (float)w/h) < 0.2 ) { - bestWidth = w; - bestHeight = h; - } - } - if(bestWidth > 0 && bestHeight > 0) { - param.setPreviewSize(bestWidth, bestHeight); - Log.i(LOGTAG, "size: "+bestWidth+" x "+bestHeight); - } - } - param.set("orientation", "landscape"); - mCamera.setParameters(param); - mCamera.startPreview(); - mPreviewStarted = true; - } -} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLRendererBase.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLRendererBase.java deleted file mode 100644 index f3abe87b3..000000000 --- a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLRendererBase.java +++ /dev/null @@ -1,117 +0,0 @@ -package org.opencv.samples.tutorial4; - -import javax.microedition.khronos.egl.EGLConfig; -import javax.microedition.khronos.opengles.GL10; - -import android.graphics.SurfaceTexture; -import android.opengl.GLES20; -import android.opengl.GLSurfaceView; -import android.os.Handler; -import android.os.Looper; -import android.util.Log; -import android.widget.TextView; - -public abstract class MyGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener { - protected final String LOGTAG = "MyGLRendererBase"; - protected int frameCounter; - protected long lastNanoTime; - - protected SurfaceTexture mSTex; - protected MyGLSurfaceView mView; - protected TextView mFpsText; - - protected boolean mGLInit = false; - protected boolean mTexUpdate = false; - - MyGLRendererBase(MyGLSurfaceView view) { - mView = view; - } - - protected abstract void openCamera(); - protected abstract void closeCamera(); - protected abstract void setCameraPreviewSize(int width, int height); - - public void setFpsTextView(TextView fpsTV) - { - mFpsText = fpsTV; - } - - public void onResume() { - Log.i(LOGTAG, "onResume"); - frameCounter = 0; - lastNanoTime = System.nanoTime(); - } - - public void onPause() { - Log.i(LOGTAG, "onPause"); - mGLInit = false; - mTexUpdate = false; - closeCamera(); - if(mSTex != null) { - mSTex.release(); - mSTex = null; - NativeGLRenderer.closeGL(); - } - } - - @Override - public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) { - //Log.i(LOGTAG, "onFrameAvailable"); - mTexUpdate = true; - mView.requestRender(); - } - - @Override - public void onDrawFrame(GL10 gl) { - //Log.i(LOGTAG, "onDrawFrame"); - if (!mGLInit) - return; - - synchronized (this) { - if (mTexUpdate) { - mSTex.updateTexImage(); - mTexUpdate = false; - } - } - NativeGLRenderer.drawFrame(); - - // log FPS - frameCounter++; - if(frameCounter >= 10) - { - final int fps = (int) (frameCounter * 1e9 / (System.nanoTime() - lastNanoTime)); - Log.i(LOGTAG, "drawFrame() FPS: "+fps); - if(mFpsText != null) { - Runnable fpsUpdater = new Runnable() { - public void run() { - mFpsText.setText("FPS: " + fps); - } - }; - new Handler(Looper.getMainLooper()).post(fpsUpdater); - } - frameCounter = 0; - lastNanoTime = System.nanoTime(); - } - } - - @Override - public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) { - Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")"); - NativeGLRenderer.changeSize(surfaceWidth, surfaceHeight); - setCameraPreviewSize(surfaceWidth, surfaceHeight); - } - - @Override - public void onSurfaceCreated(GL10 gl, EGLConfig config) { - Log.i(LOGTAG, "onSurfaceCreated"); - String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION); - if (strGLVersion != null) - Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion); - - int hTex = NativeGLRenderer.initGL(); - mSTex = new SurfaceTexture(hTex); - mSTex.setOnFrameAvailableListener(this); - openCamera(); - mGLInit = true; - } -} diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLSurfaceView.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLSurfaceView.java index 8556b4181..edaf34631 100644 --- a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLSurfaceView.java +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/MyGLSurfaceView.java @@ -1,59 +1,29 @@ package org.opencv.samples.tutorial4; +import org.opencv.android.CameraGLSurfaceView; + import android.app.Activity; import android.content.Context; -import android.opengl.GLSurfaceView; +import android.os.Handler; +import android.os.Looper; import android.util.AttributeSet; +import android.util.Log; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.widget.TextView; +import android.widget.Toast; -public class MyGLSurfaceView extends GLSurfaceView { +public class MyGLSurfaceView extends CameraGLSurfaceView implements CameraGLSurfaceView.CameraTextureListener { - MyGLRendererBase mRenderer; + static final String LOGTAG = "MyGLSurfaceView"; + protected int procMode = NativePart.PROCESSING_MODE_NO_PROCESSING; + static final String[] procModeName = new String[] {"No Processing", "CPU", "OpenCL Direct", "OpenCL via OpenCV"}; + protected int frameCounter; + protected long lastNanoTime; + TextView mFpsText = null; public MyGLSurfaceView(Context context, AttributeSet attrs) { super(context, attrs); - - if(android.os.Build.VERSION.SDK_INT >= 21) - mRenderer = new Camera2Renderer(this); - else - mRenderer = new CameraRenderer(this); - - setEGLContextClientVersion(2); - setRenderer(mRenderer); - setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); - } - - public void setFpsTextView(TextView tv) { - mRenderer.setFpsTextView(tv); - } - - @Override - public void surfaceCreated(SurfaceHolder holder) { - super.surfaceCreated(holder); - } - - @Override - public void surfaceDestroyed(SurfaceHolder holder) { - super.surfaceDestroyed(holder); - } - - @Override - public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { - super.surfaceChanged(holder, format, w, h); - } - - @Override - public void onResume() { - super.onResume(); - mRenderer.onResume(); - } - - @Override - public void onPause() { - mRenderer.onPause(); - super.onPause(); } @Override @@ -62,4 +32,81 @@ public class MyGLSurfaceView extends GLSurfaceView { ((Activity)getContext()).openOptionsMenu(); return true; } + + @Override + public void surfaceCreated(SurfaceHolder holder) { + super.surfaceCreated(holder); + //NativePart.initCL(); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + //NativePart.closeCL(); + super.surfaceDestroyed(holder); + } + + public void setProcessingMode(int newMode) { + if(newMode>=0 && newMode= 30) + { + final int fps = (int) (frameCounter * 1e9 / (System.nanoTime() - lastNanoTime)); + Log.i(LOGTAG, "drawFrame() FPS: "+fps); + if(mFpsText != null) { + Runnable fpsUpdater = new Runnable() { + public void run() { + mFpsText.setText("FPS: " + fps); + } + }; + new Handler(Looper.getMainLooper()).post(fpsUpdater); + } else { + Log.d(LOGTAG, "mFpsText == null"); + mFpsText = (TextView)((Activity) getContext()).findViewById(R.id.fps_text_view); + } + frameCounter = 0; + lastNanoTime = System.nanoTime(); + } + + + if(procMode == NativePart.PROCESSING_MODE_NO_PROCESSING) + return false; + + NativePart.processFrame(texIn, texOut, width, height, procMode); + return true; + } } diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativeGLRenderer.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativePart.java similarity index 51% rename from samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativeGLRenderer.java rename to samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativePart.java index 8d9216c97..e5f11ba3a 100644 --- a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativeGLRenderer.java +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/NativePart.java @@ -1,10 +1,10 @@ package org.opencv.samples.tutorial4; -public class NativeGLRenderer { +public class NativePart { static { System.loadLibrary("opencv_java3"); - System.loadLibrary("JNIrender"); + System.loadLibrary("JNIpart"); } public static final int PROCESSING_MODE_NO_PROCESSING = 0; @@ -12,9 +12,7 @@ public class NativeGLRenderer { public static final int PROCESSING_MODE_OCL_DIRECT = 2; public static final int PROCESSING_MODE_OCL_OCV = 3; - public static native int initGL(); - public static native void closeGL(); - public static native void drawFrame(); - public static native void changeSize(int width, int height); - public static native void setProcessingMode(int mode); + public static native int initCL(); + public static native void closeCL(); + public static native void processFrame(int tex1, int tex2, int w, int h, int mode); } diff --git a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Tutorial4Activity.java b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Tutorial4Activity.java index 56b416c80..0be55df65 100644 --- a/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Tutorial4Activity.java +++ b/samples/android/tutorial-4-opencl/src/org/opencv/samples/tutorial4/Tutorial4Activity.java @@ -29,8 +29,8 @@ public class Tutorial4Activity extends Activity { //setContentView(mView); setContentView(R.layout.activity); mView = (MyGLSurfaceView) findViewById(R.id.my_gl_surface_view); + mView.setCameraTextureListener(mView); TextView tv = (TextView)findViewById(R.id.fps_text_view); - mView.setFpsTextView(tv); mProcMode = (TextView)findViewById(R.id.proc_mode_text_view); runOnUiThread(new Runnable() { public void run() { @@ -38,7 +38,8 @@ public class Tutorial4Activity extends Activity { } }); - NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_NO_PROCESSING); } + mView.setProcessingMode(NativePart.PROCESSING_MODE_NO_PROCESSING); + } @Override protected void onPause() { @@ -68,7 +69,7 @@ public class Tutorial4Activity extends Activity { mProcMode.setText("Processing mode: No Processing"); } }); - NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_NO_PROCESSING); + mView.setProcessingMode(NativePart.PROCESSING_MODE_NO_PROCESSING); return true; case R.id.cpu: runOnUiThread(new Runnable() { @@ -76,7 +77,7 @@ public class Tutorial4Activity extends Activity { mProcMode.setText("Processing mode: CPU"); } }); - NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_CPU); + mView.setProcessingMode(NativePart.PROCESSING_MODE_CPU); return true; case R.id.ocl_direct: runOnUiThread(new Runnable() { @@ -84,7 +85,7 @@ public class Tutorial4Activity extends Activity { mProcMode.setText("Processing mode: OpenCL direct"); } }); - NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_OCL_DIRECT); + mView.setProcessingMode(NativePart.PROCESSING_MODE_OCL_DIRECT); return true; case R.id.ocl_ocv: runOnUiThread(new Runnable() { @@ -92,7 +93,7 @@ public class Tutorial4Activity extends Activity { mProcMode.setText("Processing mode: OpenCL via OpenCV (TAPI)"); } }); - NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_OCL_OCV); + mView.setProcessingMode(NativePart.PROCESSING_MODE_OCL_OCV); return true; default: return false; From 23fea91e840960e666f00521b7ed77f847953d8a Mon Sep 17 00:00:00 2001 From: Andrey Pavlenko Date: Thu, 8 Oct 2015 12:21:05 +0300 Subject: [PATCH 4/5] minor fixes --- modules/java/CMakeLists.txt | 21 +++++++------- .../java/android+CameraGLRendererBase.java | 28 +++++++++++++++---- .../tutorial-4-opencl/jni/CLprocessor.cpp | 14 +++------- 3 files changed, 36 insertions(+), 27 deletions(-) diff --git a/modules/java/CMakeLists.txt b/modules/java/CMakeLists.txt index 3906040df..1a1512d36 100644 --- a/modules/java/CMakeLists.txt +++ b/modules/java/CMakeLists.txt @@ -179,19 +179,18 @@ if(NOT ANDROID) else() file(GLOB_RECURSE handwrittren_lib_project_files_rel RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}/android_lib/" "${CMAKE_CURRENT_SOURCE_DIR}/android_lib/*") list(REMOVE_ITEM handwrittren_lib_project_files_rel "${ANDROID_MANIFEST_FILE}") + + # calc default SDK Target + android_get_compatible_target(android_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET} 11) + string(REGEX REPLACE "android-" "" android_sdk_target_num ${android_sdk_target}) + # filter out + if( (ANDROID_SDK_TARGET AND ANDROID_SDK_TARGET LESS 21) OR (android_sdk_target_num LESS 21) ) + message(STATUS "[OpenCV for Android SDK]: A new OpenGL Camera Bridge (CameraGLSurfaceView, CameraGLRendererBase, CameraRenderer, Camera2Renderer) is disabled, because ANDROID_SDK_TARGET (${android_sdk_target_num}) < 21") + ocv_list_filterout(handwritten_java_sources "android\\\\+CameraGL") + ocv_list_filterout(handwritten_java_sources "android\\\\+Camera.?Renderer") + endif() endif() -# Calc default SDK Target -android_get_compatible_target(android_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET} 11) -string(REGEX REPLACE "android-" "" android_sdk_target_num ${android_sdk_target}) - -if( (ANDROID_SDK_TARGET AND ANDROID_SDK_TARGET LESS 21) OR (android_sdk_target_num LESS 21) ) - message(STATUS "[OpenCV for Android SDK]: A new OpenGL Camera Bridge (CameraGLSurfaceView, CameraGLRendererBase, CameraRenderer, Camera2Renderer) is disabled, because ANDROID_SDK_TARGET (${android_sdk_target_num}) < 21") - ocv_list_filterout(handwritten_java_sources "android\\\\+CameraGL") - ocv_list_filterout(handwritten_java_sources "android\\\\+Camera.?Renderer") -endif() - - # IMPORTANT: add dependencies to cmake (we should rerun cmake if any of these files is modified) add_cmake_dependencies(${scripts_gen_java} ${scripts_hdr_parser} ${opencv_public_headers}) diff --git a/modules/java/generator/src/java/android+CameraGLRendererBase.java b/modules/java/generator/src/java/android+CameraGLRendererBase.java index 3971d0cb4..60c37c304 100644 --- a/modules/java/generator/src/java/android+CameraGLRendererBase.java +++ b/modules/java/generator/src/java/android+CameraGLRendererBase.java @@ -219,9 +219,9 @@ public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, Su int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER); GLES20.glShaderSource(vshader, vss); GLES20.glCompileShader(vshader); - int[] compiled = new int[1]; - GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, compiled, 0); - if (compiled[0] == 0) { + int[] status = new int[1]; + GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, status, 0); + if (status[0] == 0) { Log.e("CameraGLRendererBase", "Could not compile vertex shader: "+GLES20.glGetShaderInfoLog(vshader)); GLES20.glDeleteShader(vshader); vshader = 0; @@ -231,8 +231,8 @@ public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, Su int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER); GLES20.glShaderSource(fshader, fss); GLES20.glCompileShader(fshader); - GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, compiled, 0); - if (compiled[0] == 0) { + GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, status, 0); + if (status[0] == 0) { Log.e("CameraGLRendererBase", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader)); GLES20.glDeleteShader(vshader); GLES20.glDeleteShader(fshader); @@ -244,9 +244,25 @@ public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, Su GLES20.glAttachShader(program, vshader); GLES20.glAttachShader(program, fshader); GLES20.glLinkProgram(program); - Log.d("CameraGLRendererBase", "shaders were compiled OK"); GLES20.glDeleteShader(vshader); GLES20.glDeleteShader(fshader); + GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0); + if (status[0] == 0) { + Log.e("CameraGLRendererBase", "Could not link shader program: "+GLES20.glGetProgramInfoLog(program)); + program = 0; + return 0; + } + GLES20.glValidateProgram(program); + GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0); + if (status[0] == 0) + { + Log.e("CameraGLRendererBase", "Shader program validation error: "+GLES20.glGetProgramInfoLog(program)); + GLES20.glDeleteProgram(program); + program = 0; + return 0; + } + + Log.d("CameraGLRendererBase", "Shader program is built OK"); return program; } diff --git a/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp b/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp index b7bf530b9..b71dc1018 100644 --- a/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp +++ b/samples/android/tutorial-4-opencl/jni/CLprocessor.cpp @@ -237,23 +237,17 @@ void drawFrameProcCPU(int w, int h, int texOut) int64_t t; // let's modify pixels in FBO texture in C++ code (on CPU) - const int BUFF_SIZE = 1<<24;//2k*2k*4; - static char tmpBuff[BUFF_SIZE]; - if(w*h > BUFF_SIZE) - { - LOGE("Internal temp buffer is too small, can't make CPU frame processing"); - return; - } + static cv::Mat m; + m.create(h, w, CV_8UC4); // read t = getTimeMs(); // expecting FBO to be bound - glReadPixels(0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); + glReadPixels(0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, m.data); LOGD("glReadPixels() costs %d ms", getTimeInterval(t)); // modify t = getTimeMs(); - cv::Mat m(h, w, CV_8UC4, tmpBuff); cv::Laplacian(m, m, CV_8U); m *= 10; LOGD("Laplacian() costs %d ms", getTimeInterval(t)); @@ -262,7 +256,7 @@ void drawFrameProcCPU(int w, int h, int texOut) glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, texOut); t = getTimeMs(); - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, m.data); LOGD("glTexSubImage2D() costs %d ms", getTimeInterval(t)); } From 24ca6aaadab50aeab8cbe5e4a19a17ae75bd7a38 Mon Sep 17 00:00:00 2001 From: Andrey Pavlenko Date: Thu, 8 Oct 2015 13:48:46 +0300 Subject: [PATCH 5/5] fix build with CMake now it builds by the command: `cmake.exe -Wno-dev -GNinja -DCMAKE_MAKE_PROGRAM="path\to\ninja\ninja.exe" -DCMAKE_TOOLCHAIN_FILE=../opencv3/platforms/android/android.toolchain.cmake -DANDROID_ABI="armeabi-v7a with NEON" -DANDROID_SDK_TARGET=21 -DANDROID_NATIVE_API_LEVEL=14 -DCMAKE_BUILD_WITH_INSTALL_RPATH=ON -DBUILD_ANDROID_EXAMPLES=ON -DINSTALL_ANDROID_EXAMPLES=ON -DWITH_OPENCL=YES -DANDROID_OPENCL_SDK=path\to\OpenCL ../opencv` --- samples/android/tutorial-4-opencl/CMakeLists.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samples/android/tutorial-4-opencl/CMakeLists.txt b/samples/android/tutorial-4-opencl/CMakeLists.txt index 96bacdeb4..e8036cfbd 100644 --- a/samples/android/tutorial-4-opencl/CMakeLists.txt +++ b/samples/android/tutorial-4-opencl/CMakeLists.txt @@ -15,11 +15,12 @@ else() endif() include_directories(${ANDROID_OPENCL_SDK}/include) -link_directories(${ANDROID_OPENCL_SDK}/lib/${ANDROID_ABI}) +link_directories(${ANDROID_OPENCL_SDK}/lib/${ANDROID_NDK_ABI_NAME}) add_android_project(${sample} "${CMAKE_CURRENT_SOURCE_DIR}" LIBRARY_DEPS ${OpenCV_BINARY_DIR} SDK_TARGET 21 ${ANDROID_SDK_TARGET} NATIVE_DEPS ${native_deps} -lGLESv2 -lEGL -lOpenCL + COPY_LIBS YES ) if(TARGET ${sample}) add_dependencies(opencv_android_examples ${sample})