refactor render java code

Review URL: http://webrtc-codereview.appspot.com/25017

git-svn-id: http://webrtc.googlecode.com/svn/trunk@54 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
leozwang@google.com
2011-06-07 17:53:23 +00:00
parent ed7f027037
commit 7a60252e4f
3 changed files with 476 additions and 477 deletions

View File

@@ -1,262 +1,263 @@
package org.webrtc.videoengine; /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
import java.util.concurrent.locks.ReentrantLock; * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
import javax.microedition.khronos.egl.EGL10; * tree. An additional intellectual property rights grant can be found
import javax.microedition.khronos.egl.EGLConfig; * in the file PATENTS. All contributing project authors may
import javax.microedition.khronos.egl.EGLContext; * be found in the AUTHORS file in the root of the source tree.
import javax.microedition.khronos.egl.EGLDisplay; */
import javax.microedition.khronos.opengles.GL10;
package org.webrtc.videoengine;
import android.app.ActivityManager;
import android.content.Context; import java.util.concurrent.locks.ReentrantLock;
import android.content.pm.ConfigurationInfo;
import android.opengl.GLSurfaceView; import javax.microedition.khronos.egl.EGL10;
import android.util.Log; import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
public class ViEAndroidGLES20 extends GLSurfaceView import javax.microedition.khronos.egl.EGLDisplay;
implements GLSurfaceView.Renderer import javax.microedition.khronos.opengles.GL10;
{
private boolean _surfaceCreated=false; // True if onSurfaceCreated has been called. import android.app.ActivityManager;
private boolean _openGLCreated=false; import android.content.Context;
private boolean _nativeFunctionsRegisted=false; // True if NativeFunctionsRegistered has been called. import android.content.pm.ConfigurationInfo;
private ReentrantLock _nativeFunctionLock = new ReentrantLock(); import android.opengl.GLSurfaceView;
private long _nativeObject=0; // Address of Native object that will do the drawing. import android.util.Log;
private int _viewWidth=0;
private int _viewHeight=0; public class ViEAndroidGLES20 extends GLSurfaceView
implements GLSurfaceView.Renderer {
public static boolean UseOpenGL2(Object renderWindow) // True if onSurfaceCreated has been called.
{ private boolean surfaceCreated = false;
return ViEAndroidGLES20.class.isInstance(renderWindow); private boolean openGLCreated = false;
} // True if NativeFunctionsRegistered has been called.
private boolean nativeFunctionsRegisted = false;
public ViEAndroidGLES20(Context context) { private ReentrantLock nativeFunctionLock = new ReentrantLock();
super(context); // Address of Native object that will do the drawing.
private long nativeObject = 0;
private int viewWidth = 0;
/* Setup the context factory for 2.0 rendering. private int viewHeight = 0;
* See ContextFactory class definition below
*/ public static boolean UseOpenGL2(Object renderWindow) {
setEGLContextFactory(new ContextFactory()); return ViEAndroidGLES20.class.isInstance(renderWindow);
}
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our public ViEAndroidGLES20(Context context) {
* custom config chooser. See ConfigChooser class definition super(context);
* below.
*/ // Setup the context factory for 2.0 rendering.
setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) ); // Use RGB 565 without an alpha channel. // See ContextFactory class definition below
setEGLContextFactory(new ContextFactory());
this.setRenderer(this);
this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); // We need to choose an EGLConfig that matches the format of
} // our surface exactly. This is going to be done in our
// custom config chooser. See ConfigChooser class definition below
/* IsSupported // Use RGB 565 without an alpha channel.
* Return true if this device support Open GL ES 2.0 rendering. setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) );
*/
public static boolean IsSupported(Context context) this.setRenderer(this);
{ this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); }
ConfigurationInfo info = am.getDeviceConfigurationInfo();
if(info.reqGlEsVersion >= 0x20000) // Open GL ES 2.0 is supported. // IsSupported
{ // Return true if this device support Open GL ES 2.0 rendering.
return true; public static boolean IsSupported(Context context) {
ActivityManager am =
} (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
return false; ConfigurationInfo info = am.getDeviceConfigurationInfo();
} if(info.reqGlEsVersion >= 0x20000) {
// Open GL ES 2.0 is supported.
public void onDrawFrame(GL10 gl) { return true;
}
_nativeFunctionLock.lock(); return false;
if(!_nativeFunctionsRegisted || !_surfaceCreated) }
{
_nativeFunctionLock.unlock(); public void onDrawFrame(GL10 gl) {
return; nativeFunctionLock.lock();
} if(!nativeFunctionsRegisted || !surfaceCreated) {
nativeFunctionLock.unlock();
if(!_openGLCreated) return;
{ }
if(0!=CreateOpenGLNative(_nativeObject,_viewWidth,_viewHeight))
{ if(!openGLCreated) {
return; // Failed to create OpenGL if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
} return; // Failed to create OpenGL
_openGLCreated=true; // Created OpenGL successfully }
} openGLCreated = true; // Created OpenGL successfully
DrawNative(_nativeObject); // Draw the new frame }
_nativeFunctionLock.unlock(); DrawNative(nativeObject); // Draw the new frame
nativeFunctionLock.unlock();
} }
public void onSurfaceChanged(GL10 gl, int width, int height) { public void onSurfaceChanged(GL10 gl, int width, int height) {
_surfaceCreated=true; surfaceCreated = true;
_viewWidth=width; viewWidth = width;
_viewHeight=height; viewHeight = height;
_nativeFunctionLock.lock(); nativeFunctionLock.lock();
if(_nativeFunctionsRegisted) if(nativeFunctionsRegisted) {
{ if(CreateOpenGLNative(nativeObject,width,height) == 0)
if(CreateOpenGLNative(_nativeObject,width,height)==0) openGLCreated = true;
_openGLCreated=true; }
} nativeFunctionLock.unlock();
_nativeFunctionLock.unlock(); }
} public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
public void RegisterNativeObject(long nativeObject) {
nativeFunctionLock.lock();
} nativeObject = nativeObject;
nativeFunctionsRegisted = true;
public void RegisterNativeObject(long nativeObject) nativeFunctionLock.unlock();
{ }
_nativeFunctionLock.lock();
_nativeObject=nativeObject; public void DeRegisterNativeObject() {
_nativeFunctionsRegisted=true; nativeFunctionLock.lock();
_nativeFunctionLock.unlock(); nativeFunctionsRegisted = false;
} openGLCreated = false;
nativeObject = 0;
nativeFunctionLock.unlock();
public void DeRegisterNativeObject() }
{
public void ReDraw() {
_nativeFunctionLock.lock(); if(surfaceCreated) {
_nativeFunctionsRegisted=false; // Request the renderer to redraw using the render thread context.
_openGLCreated=false; this.requestRender();
_nativeObject=0; }
_nativeFunctionLock.unlock(); }
}
// EGL Context factory used for creating EGL 2.0 context
public void ReDraw() // on Android 2.1(and later,
{ // though there are simpler ways in 2.2)
if(_surfaceCreated) // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.
this.requestRender(); // Request the renderer to redraw using the render thread context. private static class ContextFactory
} implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
/* public EGLContext createContext(EGL10 egl,
* EGL Context factory used for creating EGL 2.0 context on Android 2.1(and later, though there are simpler ways in 2.2) EGLDisplay display,
* Code is from the NDK samples\hello-gl2\src\com\android\gl2jni. EGLConfig eglConfig) {
*/ //checkEglError("Before eglCreateContext", egl);
private static class ContextFactory implements GLSurfaceView.EGLContextFactory { int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; // Create an Open GL ES 2.0 context
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) { EGLContext context = egl.eglCreateContext(display,
//checkEglError("Before eglCreateContext", egl); eglConfig,
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; // Create an Open GL ES 2.0 context EGL10.EGL_NO_CONTEXT,
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); attrib_list);
checkEglError("ContextFactory eglCreateContext", egl); checkEglError("ContextFactory eglCreateContext", egl);
return context; return context;
} }
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) { public void destroyContext(EGL10 egl, EGLDisplay display,
egl.eglDestroyContext(display, context); EGLContext context) {
} egl.eglDestroyContext(display, context);
} }
}
private static void checkEglError(String prompt, EGL10 egl) {
int error; private static void checkEglError(String prompt, EGL10 egl) {
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) { int error;
Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error)); while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
} Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
} }
}
/* Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.*/
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser { // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni
private static class ConfigChooser
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { implements GLSurfaceView.EGLConfigChooser {
mRedSize = r;
mGreenSize = g; public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mBlueSize = b; mRedSize = r;
mAlphaSize = a; mGreenSize = g;
mDepthSize = depth; mBlueSize = b;
mStencilSize = stencil; mAlphaSize = a;
} mDepthSize = depth;
mStencilSize = stencil;
/* This EGL config specification is used to specify 2.0 rendering. }
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below. // This EGL config specification is used to specify 2.0 rendering.
*/ // We use a minimum size of 4 bits for red/green/blue, but will
private static int EGL_OPENGL_ES2_BIT = 4; // perform actual matching in chooseConfig() below.
private static int[] s_configAttribs2 = private static int EGL_OPENGL_ES2_BIT = 4;
{ private static int[] s_configAttribs2 =
EGL10.EGL_RED_SIZE, 4, {
EGL10.EGL_GREEN_SIZE, 4, EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4, EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_NONE EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
}; EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/ // Get the number of minimally matching EGL configurations
int[] num_config = new int[1]; int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0]; int numConfigs = num_config[0];
if (numConfigs <= 0) { if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec"); throw new IllegalArgumentException("No configs match configSpec");
} }
/* Allocate then read the array of minimally matching EGL configs // Allocate then read the array of minimally matching EGL configs
*/ EGLConfig[] configs = new EGLConfig[numConfigs];
EGLConfig[] configs = new EGLConfig[numConfigs]; egl.eglChooseConfig(display, s_configAttribs2, configs,
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config); numConfigs, num_config);
/* Now return the "best" one // Now return the "best" one
*/ return chooseConfig(egl, display, configs);
return chooseConfig(egl, display, configs); }
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, EGLConfig[] configs) {
EGLConfig[] configs) { for(EGLConfig config : configs) {
for(EGLConfig config : configs) { int d = findConfigAttrib(egl, display, config,
int d = findConfigAttrib(egl, display, config, EGL10.EGL_DEPTH_SIZE, 0);
EGL10.EGL_DEPTH_SIZE, 0); int s = findConfigAttrib(egl, display, config,
int s = findConfigAttrib(egl, display, config, EGL10.EGL_STENCIL_SIZE, 0);
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
// We need at least mDepthSize and mStencilSize bits if (d < mDepthSize || s < mStencilSize)
if (d < mDepthSize || s < mStencilSize) continue;
continue;
// We want an *exact* match for red/green/blue/alpha
// We want an *exact* match for red/green/blue/alpha int r = findConfigAttrib(egl, display, config,
int r = findConfigAttrib(egl, display, config, EGL10.EGL_RED_SIZE, 0);
EGL10.EGL_RED_SIZE, 0); int g = findConfigAttrib(egl, display, config,
int g = findConfigAttrib(egl, display, config, EGL10.EGL_GREEN_SIZE, 0);
EGL10.EGL_GREEN_SIZE, 0); int b = findConfigAttrib(egl, display, config,
int b = findConfigAttrib(egl, display, config, EGL10.EGL_BLUE_SIZE, 0);
EGL10.EGL_BLUE_SIZE, 0); int a = findConfigAttrib(egl, display, config,
int a = findConfigAttrib(egl, display, config, EGL10.EGL_ALPHA_SIZE, 0);
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize &&
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize) b == mBlueSize && a == mAlphaSize)
return config; return config;
} }
return null; return null;
} }
private int findConfigAttrib(EGL10 egl, EGLDisplay display, private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) { EGLConfig config, int attribute,
int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0]; if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
} return mValue[0];
return defaultValue; }
} return defaultValue;
}
// Subclasses can adjust these values:
protected int mRedSize; // Subclasses can adjust these values:
protected int mGreenSize; protected int mRedSize;
protected int mBlueSize; protected int mGreenSize;
protected int mAlphaSize; protected int mBlueSize;
protected int mDepthSize; protected int mAlphaSize;
protected int mStencilSize; protected int mDepthSize;
private int[] mValue = new int[1]; protected int mStencilSize;
} private int[] mValue = new int[1];
}
private native int CreateOpenGLNative(long nativeObject,int width, int height);
private native void DrawNative(long nativeObject); private native int CreateOpenGLNative(long nativeObject,
int width, int height);
private native void DrawNative(long nativeObject);
}
}

View File

@@ -1,50 +1,58 @@
package org.webrtc.videoengine; /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
import android.content.Context; *
import android.view.SurfaceHolder; * Use of this source code is governed by a BSD-style license
import android.view.SurfaceView; * that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
public class ViERenderer { * in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
private static SurfaceHolder g_localRenderer; // View used for local rendering that Cameras can use for Video Overlay. */
public static SurfaceView CreateRenderer(Context context) package org.webrtc.videoengine;
{
return CreateRenderer(context,false); import android.content.Context;
} import android.view.SurfaceHolder;
public static SurfaceView CreateRenderer(Context context, boolean useOpenGLES2) import android.view.SurfaceView;
{
if(useOpenGLES2==true && ViEAndroidGLES20.IsSupported(context)) public class ViERenderer {
return new ViEAndroidGLES20(context);
else // View used for local rendering that Cameras can use for Video Overlay.
return new SurfaceView(context); private static SurfaceHolder g_localRenderer;
}
public static SurfaceView CreateRenderer(Context context) {
/* return CreateRenderer(context,false);
* Creates a SurfaceView to be used by Android Camera service to display a local preview. }
* This needs to be used on Android prior to version 2.1 in order to run the camera.
* Call this function before ViECapture::StartCapture. public static SurfaceView CreateRenderer(Context context,
* The created view needs to be added to a visible layout after a camera has been allocated (with the call ViECapture::AllocateCaptureDevice). boolean useOpenGLES2) {
* if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
* IE. return new ViEAndroidGLES20(context);
* CreateLocalRenderer else
* ViECapture::AllocateCaptureDevice return new SurfaceView(context);
* LinearLayout.addview }
* ViECapture::StartCapture
* // Creates a SurfaceView to be used by Android Camera
*/ // service to display a local preview.
public static SurfaceView CreateLocalRenderer(Context context) // This needs to be used on Android prior to version 2.1
{ // in order to run the camera.
SurfaceView localRender= new SurfaceView(context); // Call this function before ViECapture::StartCapture.
g_localRenderer=localRender.getHolder(); // The created view needs to be added to a visible layout
// after a camera has been allocated
g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); // (with the call ViECapture::AllocateCaptureDevice).
return localRender; // IE.
} // CreateLocalRenderer
// ViECapture::AllocateCaptureDevice
public static SurfaceHolder GetLocalRenderer() // LinearLayout.addview
{ // ViECapture::StartCapture
return g_localRenderer; public static SurfaceView CreateLocalRenderer(Context context) {
} SurfaceView localRender = new SurfaceView(context);
g_localRenderer = localRender.getHolder();
} g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
return localRender;
}
public static SurfaceHolder GetLocalRenderer() {
return g_localRenderer;
}
}

View File

@@ -1,165 +1,155 @@
package org.webrtc.videoengine; /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
import java.nio.ByteBuffer; *
* Use of this source code is governed by a BSD-style license
import android.graphics.Bitmap; * that can be found in the LICENSE file in the root of the source
import android.graphics.Canvas; * tree. An additional intellectual property rights grant can be found
import android.graphics.Rect; * in the file PATENTS. All contributing project authors may
import android.util.Log; * be found in the AUTHORS file in the root of the source tree.
import android.view.SurfaceHolder; */
import android.view.SurfaceView;
import android.view.SurfaceHolder.Callback; package org.webrtc.videoengine;
public class ViESurfaceRenderer implements Callback { import java.nio.ByteBuffer;
private Bitmap _bitmap=null; // the bitmap used for drawing. import android.graphics.Bitmap;
private ByteBuffer _byteBuffer; import android.graphics.Canvas;
private SurfaceHolder _surfaceHolder; import android.graphics.Rect;
private Rect _srcRect=new Rect(); // Rect of the source bitmap to draw import android.util.Log;
private Rect _dstRect=new Rect(); // Rect of the destination canvas to draw to import android.view.SurfaceHolder;
private int _dstHeight=0; import android.view.SurfaceView;
private int _dstWidth=0; import android.view.SurfaceHolder.Callback;
private float _dstTopScale=0;
private float _dstBottomScale=1; public class ViESurfaceRenderer implements Callback {
private float _dstLeftScale=0;
private float _dstRightScale=1; // the bitmap used for drawing.
private Bitmap bitmap = null;
private ByteBuffer byteBuffer;
public ViESurfaceRenderer(SurfaceView view) private SurfaceHolder surfaceHolder;
{ // Rect of the source bitmap to draw
_surfaceHolder=view.getHolder(); private Rect srcRect = new Rect();
if(_surfaceHolder==null) // Rect of the destination canvas to draw to
return; private Rect dstRect = new Rect();
private int dstHeight = 0;
Canvas canvas=_surfaceHolder.lockCanvas(); private int dstWidth = 0;
if(canvas!=null) private float dstTopScale = 0;
{ private float dstBottomScale = 1;
Rect dst=_surfaceHolder.getSurfaceFrame(); private float dstLeftScale = 0;
if(dst!=null) private float dstRightScale = 1;
{
_dstRect=dst; public ViESurfaceRenderer(SurfaceView view) {
_dstHeight=_dstRect.bottom-_dstRect.top; surfaceHolder = view.getHolder();
_dstWidth=_dstRect.right-_dstRect.left; if(surfaceHolder == null)
} return;
_surfaceHolder.unlockCanvasAndPost(canvas);
} Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
_surfaceHolder.addCallback(this); Rect dst =surfaceHolder.getSurfaceFrame();
if(dst != null) {
} dstRect = dst;
public void surfaceChanged(SurfaceHolder holder, int format, int width, dstHeight =dstRect.bottom-dstRect.top;
int height) { dstWidth = dstRect.right-dstRect.left;
}
_dstHeight=height; surfaceHolder.unlockCanvasAndPost(canvas);
_dstWidth=width; }
_dstRect.left=(int)(_dstLeftScale*_dstWidth); surfaceHolder.addCallback(this);
_dstRect.top=(int)(_dstTopScale*_dstHeight); }
_dstRect.bottom=(int)(_dstBottomScale*_dstHeight);
_dstRect.right=(int) (_dstRightScale*_dstWidth); public void surfaceChanged(SurfaceHolder holder, int format,
} int in_width, int in_height) {
public void surfaceCreated(SurfaceHolder holder) { dstHeight = in_height;
// TODO Auto-generated method stub dstWidth = in_width;
dstRect.left = (int)(dstLeftScale*dstWidth);
} dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
public void surfaceDestroyed(SurfaceHolder holder) { dstRect.right = (int) (dstRightScale*dstWidth);
// TODO Auto-generated method stub }
} public void surfaceCreated(SurfaceHolder holder) {
public Bitmap CreateBitmap(int width, int height) // TODO(leozwang) Auto-generated method stub
{ }
if (_bitmap == null)
{ public void surfaceDestroyed(SurfaceHolder holder) {
try { // TODO(leozwang) Auto-generated method stub
android.os.Process }
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
} catch (Exception e) { public Bitmap CreateBitmap(int width, int height) {
if (bitmap == null) {
} try {
} android.os.Process.setThreadPriority(
_bitmap=Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); android.os.Process.THREAD_PRIORITY_DISPLAY);
_srcRect.left=0; }
_srcRect.top=0; catch (Exception e) {
_srcRect.bottom=height; }
_srcRect.right=width; }
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
srcRect.left = 0;
return _bitmap; srcRect.top = 0;
} srcRect.bottom = height;
srcRect.right = width;
public ByteBuffer CreateByteBuffer(int width, int height)
{ return bitmap;
if (_bitmap == null) }
{
try { public ByteBuffer CreateByteBuffer(int width, int height) {
android.os.Process if (bitmap == null) {
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY); try {
} catch (Exception e) { android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
} }
} catch (Exception e) {
}
try { }
_bitmap=Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
_byteBuffer=ByteBuffer.allocateDirect(width*height*2); try {
_srcRect.left=0; bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
_srcRect.top=0; byteBuffer = ByteBuffer.allocateDirect(width*height*2);
_srcRect.bottom=height; srcRect.left = 0;
_srcRect.right=width; srcRect.top = 0;
} srcRect.bottom = height;
catch (Exception ex) { srcRect.right = width;
Log.e("*WEBRTC*", "Failed to CreateByteBuffer"); }
_bitmap=null; catch (Exception ex) {
_byteBuffer=null; Log.e("*WEBRTC*", "Failed to CreateByteBuffer");
} bitmap = null;
byteBuffer = null;
}
return _byteBuffer;
} return byteBuffer;
}
public void SetCoordinates(
float left, public void SetCoordinates(float left, float top,
float top, float right, float bottom) {
float right, dstLeftScale = left;
float bottom) dstTopScale = top;
{ dstRightScale = right;
_dstLeftScale=left; dstBottomScale = bottom;
_dstTopScale=top;
_dstRightScale=right; dstRect.left = (int)(dstLeftScale*dstWidth);
_dstBottomScale=bottom; dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
_dstRect.left=(int)(_dstLeftScale*_dstWidth); dstRect.right = (int) (dstRightScale*dstWidth);
_dstRect.top=(int)(_dstTopScale*_dstHeight); }
_dstRect.bottom=(int)(_dstBottomScale*_dstHeight);
_dstRect.right=(int) (_dstRightScale*_dstWidth); public void DrawByteBuffer() {
if(byteBuffer == null)
} return;
byteBuffer.rewind();
public void DrawByteBuffer() bitmap.copyPixelsFromBuffer(byteBuffer);
{ DrawBitmap();
if(_byteBuffer==null) }
return;
_byteBuffer.rewind(); public void DrawBitmap() {
_bitmap.copyPixelsFromBuffer(_byteBuffer); if(bitmap == null)
DrawBitmap(); return;
} Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
public void DrawBitmap() canvas.drawBitmap(bitmap, srcRect, dstRect, null);
{ surfaceHolder.unlockCanvasAndPost(canvas);
if(_bitmap==null) }
return; }
Canvas canvas=_surfaceHolder.lockCanvas(); }
if(canvas!=null)
{
canvas.drawBitmap(_bitmap, _srcRect, _dstRect, null);
_surfaceHolder.unlockCanvasAndPost(canvas);
}
}
}