refactor render java code

Review URL: http://webrtc-codereview.appspot.com/25017

git-svn-id: http://webrtc.googlecode.com/svn/trunk@54 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
leozwang@google.com 2011-06-07 17:53:23 +00:00
parent ed7f027037
commit 7a60252e4f
3 changed files with 476 additions and 477 deletions

View File

@ -1,262 +1,263 @@
package org.webrtc.videoengine;
import java.util.concurrent.locks.ReentrantLock;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.opengl.GLSurfaceView;
import android.util.Log;
public class ViEAndroidGLES20 extends GLSurfaceView
implements GLSurfaceView.Renderer
{
private boolean _surfaceCreated=false; // True if onSurfaceCreated has been called.
private boolean _openGLCreated=false;
private boolean _nativeFunctionsRegisted=false; // True if NativeFunctionsRegistered has been called.
private ReentrantLock _nativeFunctionLock = new ReentrantLock();
private long _nativeObject=0; // Address of Native object that will do the drawing.
private int _viewWidth=0;
private int _viewHeight=0;
public static boolean UseOpenGL2(Object renderWindow)
{
return ViEAndroidGLES20.class.isInstance(renderWindow);
}
public ViEAndroidGLES20(Context context) {
super(context);
/* Setup the context factory for 2.0 rendering.
* See ContextFactory class definition below
*/
setEGLContextFactory(new ContextFactory());
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our
* custom config chooser. See ConfigChooser class definition
* below.
*/
setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) ); // Use RGB 565 without an alpha channel.
this.setRenderer(this);
this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
/* IsSupported
* Return true if this device support Open GL ES 2.0 rendering.
*/
public static boolean IsSupported(Context context)
{
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo info = am.getDeviceConfigurationInfo();
if(info.reqGlEsVersion >= 0x20000) // Open GL ES 2.0 is supported.
{
return true;
}
return false;
}
public void onDrawFrame(GL10 gl) {
_nativeFunctionLock.lock();
if(!_nativeFunctionsRegisted || !_surfaceCreated)
{
_nativeFunctionLock.unlock();
return;
}
if(!_openGLCreated)
{
if(0!=CreateOpenGLNative(_nativeObject,_viewWidth,_viewHeight))
{
return; // Failed to create OpenGL
}
_openGLCreated=true; // Created OpenGL successfully
}
DrawNative(_nativeObject); // Draw the new frame
_nativeFunctionLock.unlock();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
_surfaceCreated=true;
_viewWidth=width;
_viewHeight=height;
_nativeFunctionLock.lock();
if(_nativeFunctionsRegisted)
{
if(CreateOpenGLNative(_nativeObject,width,height)==0)
_openGLCreated=true;
}
_nativeFunctionLock.unlock();
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void RegisterNativeObject(long nativeObject)
{
_nativeFunctionLock.lock();
_nativeObject=nativeObject;
_nativeFunctionsRegisted=true;
_nativeFunctionLock.unlock();
}
public void DeRegisterNativeObject()
{
_nativeFunctionLock.lock();
_nativeFunctionsRegisted=false;
_openGLCreated=false;
_nativeObject=0;
_nativeFunctionLock.unlock();
}
public void ReDraw()
{
if(_surfaceCreated)
this.requestRender(); // Request the renderer to redraw using the render thread context.
}
/*
* EGL Context factory used for creating EGL 2.0 context on Android 2.1(and later, though there are simpler ways in 2.2)
* Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.
*/
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
//checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; // Create an Open GL ES 2.0 context
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
checkEglError("ContextFactory eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
}
}
/* Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.*/
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
/* This EGL config specification is used to specify 2.0 rendering.
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below.
*/
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
/* Allocate then read the array of minimally matching EGL configs
*/
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
/* Now return the "best" one
*/
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
private native int CreateOpenGLNative(long nativeObject,int width, int height);
private native void DrawNative(long nativeObject);
}
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import java.util.concurrent.locks.ReentrantLock;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.opengl.GLSurfaceView;
import android.util.Log;
public class ViEAndroidGLES20 extends GLSurfaceView
implements GLSurfaceView.Renderer {
// True if onSurfaceCreated has been called.
private boolean surfaceCreated = false;
private boolean openGLCreated = false;
// True if NativeFunctionsRegistered has been called.
private boolean nativeFunctionsRegisted = false;
private ReentrantLock nativeFunctionLock = new ReentrantLock();
// Address of Native object that will do the drawing.
private long nativeObject = 0;
private int viewWidth = 0;
private int viewHeight = 0;
public static boolean UseOpenGL2(Object renderWindow) {
return ViEAndroidGLES20.class.isInstance(renderWindow);
}
public ViEAndroidGLES20(Context context) {
super(context);
// Setup the context factory for 2.0 rendering.
// See ContextFactory class definition below
setEGLContextFactory(new ContextFactory());
// We need to choose an EGLConfig that matches the format of
// our surface exactly. This is going to be done in our
// custom config chooser. See ConfigChooser class definition below
// Use RGB 565 without an alpha channel.
setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) );
this.setRenderer(this);
this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
// IsSupported
// Return true if this device support Open GL ES 2.0 rendering.
public static boolean IsSupported(Context context) {
ActivityManager am =
(ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo info = am.getDeviceConfigurationInfo();
if(info.reqGlEsVersion >= 0x20000) {
// Open GL ES 2.0 is supported.
return true;
}
return false;
}
public void onDrawFrame(GL10 gl) {
nativeFunctionLock.lock();
if(!nativeFunctionsRegisted || !surfaceCreated) {
nativeFunctionLock.unlock();
return;
}
if(!openGLCreated) {
if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
return; // Failed to create OpenGL
}
openGLCreated = true; // Created OpenGL successfully
}
DrawNative(nativeObject); // Draw the new frame
nativeFunctionLock.unlock();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
surfaceCreated = true;
viewWidth = width;
viewHeight = height;
nativeFunctionLock.lock();
if(nativeFunctionsRegisted) {
if(CreateOpenGLNative(nativeObject,width,height) == 0)
openGLCreated = true;
}
nativeFunctionLock.unlock();
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void RegisterNativeObject(long nativeObject) {
nativeFunctionLock.lock();
nativeObject = nativeObject;
nativeFunctionsRegisted = true;
nativeFunctionLock.unlock();
}
public void DeRegisterNativeObject() {
nativeFunctionLock.lock();
nativeFunctionsRegisted = false;
openGLCreated = false;
nativeObject = 0;
nativeFunctionLock.unlock();
}
public void ReDraw() {
if(surfaceCreated) {
// Request the renderer to redraw using the render thread context.
this.requestRender();
}
}
// EGL Context factory used for creating EGL 2.0 context
// on Android 2.1(and later,
// though there are simpler ways in 2.2)
// Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.
private static class ContextFactory
implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl,
EGLDisplay display,
EGLConfig eglConfig) {
//checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
// Create an Open GL ES 2.0 context
EGLContext context = egl.eglCreateContext(display,
eglConfig,
EGL10.EGL_NO_CONTEXT,
attrib_list);
checkEglError("ContextFactory eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display,
EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
}
}
// Code is from the NDK samples\hello-gl2\src\com\android\gl2jni
private static class ConfigChooser
implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
// This EGL config specification is used to specify 2.0 rendering.
// We use a minimum size of 4 bits for red/green/blue, but will
// perform actual matching in chooseConfig() below.
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
// Get the number of minimally matching EGL configurations
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
// Allocate then read the array of minimally matching EGL configs
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs,
numConfigs, num_config);
// Now return the "best" one
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize &&
b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute,
int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
private native int CreateOpenGLNative(long nativeObject,
int width, int height);
private native void DrawNative(long nativeObject);
}

View File

@ -1,50 +1,58 @@
package org.webrtc.videoengine;
import android.content.Context;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class ViERenderer {
private static SurfaceHolder g_localRenderer; // View used for local rendering that Cameras can use for Video Overlay.
public static SurfaceView CreateRenderer(Context context)
{
return CreateRenderer(context,false);
}
public static SurfaceView CreateRenderer(Context context, boolean useOpenGLES2)
{
if(useOpenGLES2==true && ViEAndroidGLES20.IsSupported(context))
return new ViEAndroidGLES20(context);
else
return new SurfaceView(context);
}
/*
* Creates a SurfaceView to be used by Android Camera service to display a local preview.
* This needs to be used on Android prior to version 2.1 in order to run the camera.
* Call this function before ViECapture::StartCapture.
* The created view needs to be added to a visible layout after a camera has been allocated (with the call ViECapture::AllocateCaptureDevice).
*
* IE.
* CreateLocalRenderer
* ViECapture::AllocateCaptureDevice
* LinearLayout.addview
* ViECapture::StartCapture
*
*/
public static SurfaceView CreateLocalRenderer(Context context)
{
SurfaceView localRender= new SurfaceView(context);
g_localRenderer=localRender.getHolder();
g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
return localRender;
}
public static SurfaceHolder GetLocalRenderer()
{
return g_localRenderer;
}
}
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import android.content.Context;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class ViERenderer {
// View used for local rendering that Cameras can use for Video Overlay.
private static SurfaceHolder g_localRenderer;
public static SurfaceView CreateRenderer(Context context) {
return CreateRenderer(context,false);
}
public static SurfaceView CreateRenderer(Context context,
boolean useOpenGLES2) {
if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
return new ViEAndroidGLES20(context);
else
return new SurfaceView(context);
}
// Creates a SurfaceView to be used by Android Camera
// service to display a local preview.
// This needs to be used on Android prior to version 2.1
// in order to run the camera.
// Call this function before ViECapture::StartCapture.
// The created view needs to be added to a visible layout
// after a camera has been allocated
// (with the call ViECapture::AllocateCaptureDevice).
// IE.
// CreateLocalRenderer
// ViECapture::AllocateCaptureDevice
// LinearLayout.addview
// ViECapture::StartCapture
public static SurfaceView CreateLocalRenderer(Context context) {
SurfaceView localRender = new SurfaceView(context);
g_localRenderer = localRender.getHolder();
g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
return localRender;
}
public static SurfaceHolder GetLocalRenderer() {
return g_localRenderer;
}
}

View File

@ -1,165 +1,155 @@
package org.webrtc.videoengine;
import java.nio.ByteBuffer;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.SurfaceHolder.Callback;
public class ViESurfaceRenderer implements Callback {
private Bitmap _bitmap=null; // the bitmap used for drawing.
private ByteBuffer _byteBuffer;
private SurfaceHolder _surfaceHolder;
private Rect _srcRect=new Rect(); // Rect of the source bitmap to draw
private Rect _dstRect=new Rect(); // Rect of the destination canvas to draw to
private int _dstHeight=0;
private int _dstWidth=0;
private float _dstTopScale=0;
private float _dstBottomScale=1;
private float _dstLeftScale=0;
private float _dstRightScale=1;
public ViESurfaceRenderer(SurfaceView view)
{
_surfaceHolder=view.getHolder();
if(_surfaceHolder==null)
return;
Canvas canvas=_surfaceHolder.lockCanvas();
if(canvas!=null)
{
Rect dst=_surfaceHolder.getSurfaceFrame();
if(dst!=null)
{
_dstRect=dst;
_dstHeight=_dstRect.bottom-_dstRect.top;
_dstWidth=_dstRect.right-_dstRect.left;
}
_surfaceHolder.unlockCanvasAndPost(canvas);
}
_surfaceHolder.addCallback(this);
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
_dstHeight=height;
_dstWidth=width;
_dstRect.left=(int)(_dstLeftScale*_dstWidth);
_dstRect.top=(int)(_dstTopScale*_dstHeight);
_dstRect.bottom=(int)(_dstBottomScale*_dstHeight);
_dstRect.right=(int) (_dstRightScale*_dstWidth);
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
public Bitmap CreateBitmap(int width, int height)
{
if (_bitmap == null)
{
try {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
} catch (Exception e) {
}
}
_bitmap=Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
_srcRect.left=0;
_srcRect.top=0;
_srcRect.bottom=height;
_srcRect.right=width;
return _bitmap;
}
public ByteBuffer CreateByteBuffer(int width, int height)
{
if (_bitmap == null)
{
try {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
} catch (Exception e) {
}
}
try {
_bitmap=Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
_byteBuffer=ByteBuffer.allocateDirect(width*height*2);
_srcRect.left=0;
_srcRect.top=0;
_srcRect.bottom=height;
_srcRect.right=width;
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to CreateByteBuffer");
_bitmap=null;
_byteBuffer=null;
}
return _byteBuffer;
}
public void SetCoordinates(
float left,
float top,
float right,
float bottom)
{
_dstLeftScale=left;
_dstTopScale=top;
_dstRightScale=right;
_dstBottomScale=bottom;
_dstRect.left=(int)(_dstLeftScale*_dstWidth);
_dstRect.top=(int)(_dstTopScale*_dstHeight);
_dstRect.bottom=(int)(_dstBottomScale*_dstHeight);
_dstRect.right=(int) (_dstRightScale*_dstWidth);
}
public void DrawByteBuffer()
{
if(_byteBuffer==null)
return;
_byteBuffer.rewind();
_bitmap.copyPixelsFromBuffer(_byteBuffer);
DrawBitmap();
}
public void DrawBitmap()
{
if(_bitmap==null)
return;
Canvas canvas=_surfaceHolder.lockCanvas();
if(canvas!=null)
{
canvas.drawBitmap(_bitmap, _srcRect, _dstRect, null);
_surfaceHolder.unlockCanvasAndPost(canvas);
}
}
}
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import java.nio.ByteBuffer;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.SurfaceHolder.Callback;
public class ViESurfaceRenderer implements Callback {
// the bitmap used for drawing.
private Bitmap bitmap = null;
private ByteBuffer byteBuffer;
private SurfaceHolder surfaceHolder;
// Rect of the source bitmap to draw
private Rect srcRect = new Rect();
// Rect of the destination canvas to draw to
private Rect dstRect = new Rect();
private int dstHeight = 0;
private int dstWidth = 0;
private float dstTopScale = 0;
private float dstBottomScale = 1;
private float dstLeftScale = 0;
private float dstRightScale = 1;
public ViESurfaceRenderer(SurfaceView view) {
surfaceHolder = view.getHolder();
if(surfaceHolder == null)
return;
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
Rect dst =surfaceHolder.getSurfaceFrame();
if(dst != null) {
dstRect = dst;
dstHeight =dstRect.bottom-dstRect.top;
dstWidth = dstRect.right-dstRect.left;
}
surfaceHolder.unlockCanvasAndPost(canvas);
}
surfaceHolder.addCallback(this);
}
public void surfaceChanged(SurfaceHolder holder, int format,
int in_width, int in_height) {
dstHeight = in_height;
dstWidth = in_width;
dstRect.left = (int)(dstLeftScale*dstWidth);
dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
dstRect.right = (int) (dstRightScale*dstWidth);
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO(leozwang) Auto-generated method stub
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO(leozwang) Auto-generated method stub
}
public Bitmap CreateBitmap(int width, int height) {
if (bitmap == null) {
try {
android.os.Process.setThreadPriority(
android.os.Process.THREAD_PRIORITY_DISPLAY);
}
catch (Exception e) {
}
}
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = height;
srcRect.right = width;
return bitmap;
}
public ByteBuffer CreateByteBuffer(int width, int height) {
if (bitmap == null) {
try {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
}
catch (Exception e) {
}
}
try {
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
byteBuffer = ByteBuffer.allocateDirect(width*height*2);
srcRect.left = 0;
srcRect.top = 0;
srcRect.bottom = height;
srcRect.right = width;
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to CreateByteBuffer");
bitmap = null;
byteBuffer = null;
}
return byteBuffer;
}
public void SetCoordinates(float left, float top,
float right, float bottom) {
dstLeftScale = left;
dstTopScale = top;
dstRightScale = right;
dstBottomScale = bottom;
dstRect.left = (int)(dstLeftScale*dstWidth);
dstRect.top = (int)(dstTopScale*dstHeight);
dstRect.bottom = (int)(dstBottomScale*dstHeight);
dstRect.right = (int) (dstRightScale*dstWidth);
}
public void DrawByteBuffer() {
if(byteBuffer == null)
return;
byteBuffer.rewind();
bitmap.copyPixelsFromBuffer(byteBuffer);
DrawBitmap();
}
public void DrawBitmap() {
if(bitmap == null)
return;
Canvas canvas = surfaceHolder.lockCanvas();
if(canvas != null) {
canvas.drawBitmap(bitmap, srcRect, dstRect, null);
surfaceHolder.unlockCanvasAndPost(canvas);
}
}
}