git-svn-id: http://webrtc.googlecode.com/svn/trunk@4 4adac7df-926f-26a2-2b94-8c16560cd09d

This commit is contained in:
niklase@google.com
2011-05-30 11:22:19 +00:00
parent 01813fe945
commit 77ae29bc81
1153 changed files with 404089 additions and 0 deletions

View File

@@ -0,0 +1,262 @@
package org.webrtc.videoengine;
import java.util.concurrent.locks.ReentrantLock;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.opengles.GL10;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ConfigurationInfo;
import android.opengl.GLSurfaceView;
import android.util.Log;
public class ViEAndroidGLES20 extends GLSurfaceView
implements GLSurfaceView.Renderer
{
private boolean _surfaceCreated=false; // True if onSurfaceCreated has been called.
private boolean _openGLCreated=false;
private boolean _nativeFunctionsRegisted=false; // True if NativeFunctionsRegistered has been called.
private ReentrantLock _nativeFunctionLock = new ReentrantLock();
private long _nativeObject=0; // Address of Native object that will do the drawing.
private int _viewWidth=0;
private int _viewHeight=0;
public static boolean UseOpenGL2(Object renderWindow)
{
return ViEAndroidGLES20.class.isInstance(renderWindow);
}
public ViEAndroidGLES20(Context context) {
super(context);
/* Setup the context factory for 2.0 rendering.
* See ContextFactory class definition below
*/
setEGLContextFactory(new ContextFactory());
/* We need to choose an EGLConfig that matches the format of
* our surface exactly. This is going to be done in our
* custom config chooser. See ConfigChooser class definition
* below.
*/
setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) ); // Use RGB 565 without an alpha channel.
this.setRenderer(this);
this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
/* IsSupported
* Return true if this device support Open GL ES 2.0 rendering.
*/
public static boolean IsSupported(Context context)
{
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
ConfigurationInfo info = am.getDeviceConfigurationInfo();
if(info.reqGlEsVersion >= 0x20000) // Open GL ES 2.0 is supported.
{
return true;
}
return false;
}
public void onDrawFrame(GL10 gl) {
_nativeFunctionLock.lock();
if(!_nativeFunctionsRegisted || !_surfaceCreated)
{
_nativeFunctionLock.unlock();
return;
}
if(!_openGLCreated)
{
if(0!=CreateOpenGLNative(_nativeObject,_viewWidth,_viewHeight))
{
return; // Failed to create OpenGL
}
_openGLCreated=true; // Created OpenGL successfully
}
DrawNative(_nativeObject); // Draw the new frame
_nativeFunctionLock.unlock();
}
public void onSurfaceChanged(GL10 gl, int width, int height) {
_surfaceCreated=true;
_viewWidth=width;
_viewHeight=height;
_nativeFunctionLock.lock();
if(_nativeFunctionsRegisted)
{
if(CreateOpenGLNative(_nativeObject,width,height)==0)
_openGLCreated=true;
}
_nativeFunctionLock.unlock();
}
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}
public void RegisterNativeObject(long nativeObject)
{
_nativeFunctionLock.lock();
_nativeObject=nativeObject;
_nativeFunctionsRegisted=true;
_nativeFunctionLock.unlock();
}
public void DeRegisterNativeObject()
{
_nativeFunctionLock.lock();
_nativeFunctionsRegisted=false;
_openGLCreated=false;
_nativeObject=0;
_nativeFunctionLock.unlock();
}
public void ReDraw()
{
if(_surfaceCreated)
this.requestRender(); // Request the renderer to redraw using the render thread context.
}
/*
* EGL Context factory used for creating EGL 2.0 context on Android 2.1(and later, though there are simpler ways in 2.2)
* Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.
*/
private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
//checkEglError("Before eglCreateContext", egl);
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; // Create an Open GL ES 2.0 context
EGLContext context = egl.eglCreateContext(display, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list);
checkEglError("ContextFactory eglCreateContext", egl);
return context;
}
public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
egl.eglDestroyContext(display, context);
}
}
private static void checkEglError(String prompt, EGL10 egl) {
int error;
while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error));
}
}
/* Code is from the NDK samples\hello-gl2\src\com\android\gl2jni.*/
private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
mRedSize = r;
mGreenSize = g;
mBlueSize = b;
mAlphaSize = a;
mDepthSize = depth;
mStencilSize = stencil;
}
/* This EGL config specification is used to specify 2.0 rendering.
* We use a minimum size of 4 bits for red/green/blue, but will
* perform actual matching in chooseConfig() below.
*/
private static int EGL_OPENGL_ES2_BIT = 4;
private static int[] s_configAttribs2 =
{
EGL10.EGL_RED_SIZE, 4,
EGL10.EGL_GREEN_SIZE, 4,
EGL10.EGL_BLUE_SIZE, 4,
EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL10.EGL_NONE
};
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
/* Get the number of minimally matching EGL configurations
*/
int[] num_config = new int[1];
egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
int numConfigs = num_config[0];
if (numConfigs <= 0) {
throw new IllegalArgumentException("No configs match configSpec");
}
/* Allocate then read the array of minimally matching EGL configs
*/
EGLConfig[] configs = new EGLConfig[numConfigs];
egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
/* Now return the "best" one
*/
return chooseConfig(egl, display, configs);
}
public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
EGLConfig[] configs) {
for(EGLConfig config : configs) {
int d = findConfigAttrib(egl, display, config,
EGL10.EGL_DEPTH_SIZE, 0);
int s = findConfigAttrib(egl, display, config,
EGL10.EGL_STENCIL_SIZE, 0);
// We need at least mDepthSize and mStencilSize bits
if (d < mDepthSize || s < mStencilSize)
continue;
// We want an *exact* match for red/green/blue/alpha
int r = findConfigAttrib(egl, display, config,
EGL10.EGL_RED_SIZE, 0);
int g = findConfigAttrib(egl, display, config,
EGL10.EGL_GREEN_SIZE, 0);
int b = findConfigAttrib(egl, display, config,
EGL10.EGL_BLUE_SIZE, 0);
int a = findConfigAttrib(egl, display, config,
EGL10.EGL_ALPHA_SIZE, 0);
if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
return config;
}
return null;
}
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
EGLConfig config, int attribute, int defaultValue) {
if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
return mValue[0];
}
return defaultValue;
}
// Subclasses can adjust these values:
protected int mRedSize;
protected int mGreenSize;
protected int mBlueSize;
protected int mAlphaSize;
protected int mDepthSize;
protected int mStencilSize;
private int[] mValue = new int[1];
}
private native int CreateOpenGLNative(long nativeObject,int width, int height);
private native void DrawNative(long nativeObject);
}

View File

@@ -0,0 +1,50 @@
package org.webrtc.videoengine;
import android.content.Context;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class ViERenderer {
private static SurfaceHolder g_localRenderer; // View used for local rendering that Cameras can use for Video Overlay.
public static SurfaceView CreateRenderer(Context context)
{
return CreateRenderer(context,false);
}
public static SurfaceView CreateRenderer(Context context, boolean useOpenGLES2)
{
if(useOpenGLES2==true && ViEAndroidGLES20.IsSupported(context))
return new ViEAndroidGLES20(context);
else
return new SurfaceView(context);
}
/*
* Creates a SurfaceView to be used by Android Camera service to display a local preview.
* This needs to be used on Android prior to version 2.1 in order to run the camera.
* Call this function before ViECapture::StartCapture.
* The created view needs to be added to a visible layout after a camera has been allocated (with the call ViECapture::AllocateCaptureDevice).
*
* IE.
* CreateLocalRenderer
* ViECapture::AllocateCaptureDevice
* LinearLayout.addview
* ViECapture::StartCapture
*
*/
public static SurfaceView CreateLocalRenderer(Context context)
{
SurfaceView localRender= new SurfaceView(context);
g_localRenderer=localRender.getHolder();
g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
return localRender;
}
public static SurfaceHolder GetLocalRenderer()
{
return g_localRenderer;
}
}

View File

@@ -0,0 +1,165 @@
package org.webrtc.videoengine;
import java.nio.ByteBuffer;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.SurfaceHolder.Callback;
public class ViESurfaceRenderer implements Callback {
private Bitmap _bitmap=null; // the bitmap used for drawing.
private ByteBuffer _byteBuffer;
private SurfaceHolder _surfaceHolder;
private Rect _srcRect=new Rect(); // Rect of the source bitmap to draw
private Rect _dstRect=new Rect(); // Rect of the destination canvas to draw to
private int _dstHeight=0;
private int _dstWidth=0;
private float _dstTopScale=0;
private float _dstBottomScale=1;
private float _dstLeftScale=0;
private float _dstRightScale=1;
public ViESurfaceRenderer(SurfaceView view)
{
_surfaceHolder=view.getHolder();
if(_surfaceHolder==null)
return;
Canvas canvas=_surfaceHolder.lockCanvas();
if(canvas!=null)
{
Rect dst=_surfaceHolder.getSurfaceFrame();
if(dst!=null)
{
_dstRect=dst;
_dstHeight=_dstRect.bottom-_dstRect.top;
_dstWidth=_dstRect.right-_dstRect.left;
}
_surfaceHolder.unlockCanvasAndPost(canvas);
}
_surfaceHolder.addCallback(this);
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
_dstHeight=height;
_dstWidth=width;
_dstRect.left=(int)(_dstLeftScale*_dstWidth);
_dstRect.top=(int)(_dstTopScale*_dstHeight);
_dstRect.bottom=(int)(_dstBottomScale*_dstHeight);
_dstRect.right=(int) (_dstRightScale*_dstWidth);
}
public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
}
public Bitmap CreateBitmap(int width, int height)
{
if (_bitmap == null)
{
try {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
} catch (Exception e) {
}
}
_bitmap=Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
_srcRect.left=0;
_srcRect.top=0;
_srcRect.bottom=height;
_srcRect.right=width;
return _bitmap;
}
public ByteBuffer CreateByteBuffer(int width, int height)
{
if (_bitmap == null)
{
try {
android.os.Process
.setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY);
} catch (Exception e) {
}
}
try {
_bitmap=Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
_byteBuffer=ByteBuffer.allocateDirect(width*height*2);
_srcRect.left=0;
_srcRect.top=0;
_srcRect.bottom=height;
_srcRect.right=width;
}
catch (Exception ex) {
Log.e("*WEBRTC*", "Failed to CreateByteBuffer");
_bitmap=null;
_byteBuffer=null;
}
return _byteBuffer;
}
public void SetCoordinates(
float left,
float top,
float right,
float bottom)
{
_dstLeftScale=left;
_dstTopScale=top;
_dstRightScale=right;
_dstBottomScale=bottom;
_dstRect.left=(int)(_dstLeftScale*_dstWidth);
_dstRect.top=(int)(_dstTopScale*_dstHeight);
_dstRect.bottom=(int)(_dstBottomScale*_dstHeight);
_dstRect.right=(int) (_dstRightScale*_dstWidth);
}
public void DrawByteBuffer()
{
if(_byteBuffer==null)
return;
_byteBuffer.rewind();
_bitmap.copyPixelsFromBuffer(_byteBuffer);
DrawBitmap();
}
public void DrawBitmap()
{
if(_bitmap==null)
return;
Canvas canvas=_surfaceHolder.lockCanvas();
if(canvas!=null)
{
canvas.drawBitmap(_bitmap, _srcRect, _dstRect, null);
_surfaceHolder.unlockCanvasAndPost(canvas);
}
}
}

View File

@@ -0,0 +1,404 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_android_impl.h"
#include "critical_section_wrapper.h"
#include "event_wrapper.h"
#include "thread_wrapper.h"
#include "tick_util.h"
#ifdef ANDROID_LOG
#include <stdio.h>
#include <android/log.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
#else
#include "trace.h"
#endif
namespace webrtc {
JavaVM* VideoRenderAndroid::g_jvm = NULL;
WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
g_jvm = (JavaVM*) javaVM;
return 0;
}
VideoRenderAndroid::VideoRenderAndroid(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool /*fullscreen*/):
_id(id),
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderType(videoRenderType),
_ptrWindow((jobject)(window)),
_streamsMap(),
_javaShutDownFlag(false),
_javaShutdownEvent(*EventWrapper::Create()),
_javaRenderEvent(*EventWrapper::Create()),
_lastJavaRenderEvent(0),
_javaRenderJniEnv(NULL),
_javaRenderThread(NULL)
{
}
VideoRenderAndroid::~VideoRenderAndroid()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"VideoRenderAndroid dtor");
if (_javaRenderThread)
StopRender();
for (MapItem* item = _streamsMap.First(); item != NULL; item
= _streamsMap.Next(item))
{ // Delete streams
delete static_cast<AndroidStream*> (item->GetItem());
}
delete &_javaShutdownEvent;
delete &_javaRenderEvent;
delete &_critSect;
}
WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id)
{
CriticalSectionScoped cs(_critSect);
_id = id;
return 0;
}
WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/)
{
return -1;
}
VideoRenderCallback*
VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_critSect);
AndroidStream* renderStream = NULL;
MapItem* item = _streamsMap.Find(streamId);
if (item)
{
renderStream = (AndroidStream*) (item->GetItem());
if (NULL != renderStream)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
"%s: Render stream already exists", __FUNCTION__);
return renderStream;
}
}
renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
right, bottom, *this);
if (renderStream)
{
_streamsMap.Insert(streamId, renderStream);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return NULL;
}
return renderStream;
}
WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_critSect);
MapItem* item = _streamsMap.Find(streamId);
if (item)
{
delete (AndroidStream*) item->GetItem();
_streamsMap.Erase(streamId);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
return -1;
}
return 0;
}
WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
WEBRTC_TRACE(
kTraceModuleCall,
kTraceVideoRenderer,
_id,
"%s: streamId - %d zOrder - %d left - %d top - %d right -%d and bottm - %d",
streamId, zOrder, left, top, right, bottom);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::StartRender()
{
CriticalSectionScoped cs(_critSect);
if (_javaRenderThread)
{
// StartRender is called when this stream should start render.
// However StopRender is not called when the streams stop rendering. Thus the the thread is only deleted when the renderer is removed.
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s, Render thread already exist", __FUNCTION__);
return 0;
}
_javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
kRealtimePriority,
"AndroidRenderThread");
if (!_javaRenderThread)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No thread", __FUNCTION__);
return -1;
}
unsigned int tId = 0;
if (_javaRenderThread->Start(tId))
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: thread started: %u", __FUNCTION__, tId);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not start send thread", __FUNCTION__);
return -1;
}
return 0;
}
WebRtc_Word32 VideoRenderAndroid::StopRender()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
{
CriticalSectionScoped cs(_critSect);
if (!_javaRenderThread)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id,
"%s, no renderer", __FUNCTION__);
return -1;
}
_javaShutDownFlag = true;
_javaRenderEvent.Set();
}
_javaShutdownEvent.Wait(3000);
CriticalSectionScoped cs(_critSect);
_javaRenderThread->SetNotAlive();
if (_javaRenderThread->Stop())
{
delete _javaRenderThread;
_javaRenderThread = NULL;
}
else
{
assert(false);
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Not able to stop thread, leaking", __FUNCTION__);
_javaRenderThread = NULL;
}
return 0;
}
void VideoRenderAndroid::ReDraw()
{
CriticalSectionScoped cs(_critSect);
if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) // Allow redraw if it was more than 20ms since last.
{
_lastJavaRenderEvent = TickTime::MillisecondTimestamp();
_javaRenderEvent.Set();
}
}
bool VideoRenderAndroid::JavaRenderThreadFun(void* obj)
{
return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
}
bool VideoRenderAndroid::JavaRenderThreadProcess()
{
_javaRenderEvent.Wait(1000);
CriticalSectionScoped cs(_critSect);
if (!_javaRenderJniEnv)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
// Get the JNI env for this thread
if ((res < 0) || !_javaRenderJniEnv)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, _javaRenderJniEnv);
return false;
}
}
for (MapItem* item = _streamsMap.First(); item != NULL; item
= _streamsMap.Next(item))
{
static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
_javaRenderJniEnv);
}
if (_javaShutDownFlag)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
else
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: Java thread detached", __FUNCTION__);
}
_javaRenderJniEnv = false;
_javaShutDownFlag = false;
_javaShutdownEvent.Set();
return false; // Do not run this thread again.
}
return true;
}
VideoRenderType VideoRenderAndroid::RenderType()
{
return _renderType;
}
RawVideoType VideoRenderAndroid::PerferedVideoType()
{
return kVideoI420;
}
bool VideoRenderAndroid::FullScreen()
{
return false;
}
WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
WebRtc_UWord64& /*totalGraphicsMemory*/,
WebRtc_UWord64& /*availableGraphicsMemory*/) const
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::GetScreenResolution(
WebRtc_UWord32& /*screenWidth*/,
WebRtc_UWord32& /*screenHeight*/) const
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(const WebRtc_UWord32 /*streamId*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
const WebRtc_UWord32 /*streamId*/,
const float /*left*/,
const float /*top*/,
const float /*right*/,
const float /*bottom*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetText(
const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left, const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
} //namespace webrtc

View File

@@ -0,0 +1,162 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#include <jni.h>
#include "i_video_render.h"
#include "map_wrapper.h"
namespace webrtc {
//#define ANDROID_LOG
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
// The object a module user uses to send new frames to the java renderer
// Base class for android render streams.
class AndroidStream: public VideoRenderCallback
{
public:
/*
* DeliverFrame is called from a thread connected to the Java VM.
* Used for Delivering frame for rendering.
*/
virtual void DeliverFrame(JNIEnv* jniEnv)=0;
virtual ~AndroidStream()
{
};
};
class VideoRenderAndroid: IVideoRender
{
public:
static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM);
VideoRenderAndroid(const WebRtc_Word32 id,
const VideoRenderType videoRenderType, void* window,
const bool fullscreen);
virtual ~VideoRenderAndroid();
virtual WebRtc_Word32 Init()=0;
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
virtual VideoRenderCallback
* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32
DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32
GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom) const;
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
virtual void ReDraw();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32
GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32
GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
protected:
virtual AndroidStream
* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left, const float top,
const float right, const float bottom,
VideoRenderAndroid& renderer) = 0;
WebRtc_Word32 _id;
CriticalSectionWrapper& _critSect;
VideoRenderType _renderType;
jobject _ptrWindow;
static JavaVM* g_jvm;
private:
static bool JavaRenderThreadFun(void* obj);
bool JavaRenderThreadProcess();
MapWrapper _streamsMap; // Map with streams to render.
bool _javaShutDownFlag; // True if the _javaRenderThread thread shall be detached from the JVM.
EventWrapper& _javaShutdownEvent;
EventWrapper& _javaRenderEvent;
WebRtc_Word64 _lastJavaRenderEvent;
JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
ThreadWrapper* _javaRenderThread;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_

View File

@@ -0,0 +1,496 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_android_native_opengl2.h"
#include "critical_section_wrapper.h"
#include "vplib.h"
#include "tick_util.h"
#ifdef ANDROID_LOG
#include <stdio.h>
#include <android/log.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
#else
#include "trace.h"
#endif
namespace webrtc {
AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
VideoRenderAndroid(id, videoRenderType, window, fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
{
}
bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window)
{
if (!g_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"RendererAndroid():UseOpenGL No JVM set.");
return false;
}
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"RendererAndroid(): Could not attach thread to JVM (%d, %p)",
res, env);
return false;
}
isAttached = true;
}
// get the renderer class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not find ViEAndroidRenderer class",
__FUNCTION__);
return false;
}
// get the method ID for UseOpenGL
jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
"UseOpenGL2",
"(Ljava/lang/Object;)Z");
if (cidUseOpenGL == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not get UseOpenGL ID", __FUNCTION__);
return false;
}
jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
cidUseOpenGL, (jobject) window);
// Detach this thread if it was attached
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
return res;
}
AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Renderer dtor");
if (g_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
}
else
{
isAttached = true;
}
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init()
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"(%s): Not a valid Java VM pointer.", __FUNCTION__);
return -1;
}
if (!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"(%s): No window have been provided.", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
// get the ViEAndroidGLES20 class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClassLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViEAndroidGLES20", __FUNCTION__);
return -1;
}
// create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
_javaRenderClass
= reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not create Java SurfaceHolder class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(_ptrWindow);
if (!_javaRenderObj)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
return -1;
}
// Detach this thread if it was attached
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
__FUNCTION__);
return 0;
}
AndroidStream*
AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
__FUNCTION__, streamId);
AndroidNativeOpenGl2Channel* stream =
new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
_javaRenderObj);
if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
return stream;
else
{
delete stream;
}
return NULL;
}
AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj):
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
_registerNativeCID(NULL), _deRegisterNativeCID(NULL),
_openGLRenderer(streamId)
{
}
AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidNativeOpenGl2Channel dtor");
delete &_renderCritSect;
if (_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
env = NULL;
}
else
{
isAttached = true;
}
}
if (env && _deRegisterNativeCID)
{
env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
}
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
if (!_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
jclass javaRenderClass =
env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
if (!javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
// get the method ID for the ReDraw function
_redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
if (_redrawCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get ReDraw ID", __FUNCTION__);
return -1;
}
_registerNativeCID = env->GetMethodID(javaRenderClass,
"RegisterNativeObject", "(J)V");
if (_registerNativeCID == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get RegisterNativeObject ID", __FUNCTION__);
return -1;
}
_deRegisterNativeCID = env->GetMethodID(javaRenderClass,
"DeRegisterNativeObject", "()V");
if (_deRegisterNativeCID == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: could not get DeRegisterNativeObject ID",
__FUNCTION__);
return -1;
}
JNINativeMethod
nativeFunctions[2] = {
"DrawNative",
"(J)V",
(void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic,
"CreateOpenGLNative",
"(JII)I",
(void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic };
if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
"%s: Registered native functions", __FUNCTION__);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: Failed to register native functions", __FUNCTION__);
return -1;
}
env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
// Detach this thread if it was attached
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0)
{
return -1;
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
return 0;
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(const WebRtc_UWord32 /*streamId*/,
VideoFrame& videoFrame)
{
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
}
/*Implements AndroidStream
* Calls the Java object and render the buffer in _bufferToRender
*/
void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv)
{
//TickTime timeNow=TickTime::Now();
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
//WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: time to deliver %lld" ,__FUNCTION__,(TickTime::Now()-timeNow).Milliseconds());
}
/*
* JNI callback from Java class. Called when the render want to render a frame. Called from the GLRenderThread
* Method: DrawNative
* Signature: (J)V
*/
void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic
(JNIEnv * env, jobject, jlong context)
{
AndroidNativeOpenGl2Channel* renderChannel=reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
renderChannel->DrawNative();
}
void AndroidNativeOpenGl2Channel::DrawNative()
{
_openGLRenderer.Render(_bufferToRender);
}
/*
* JNI callback from Java class. Called when the GLSurfaceview have created a surface. Called from the GLRenderThread
* Method: CreateOpenGLNativeStatic
* Signature: (JII)I
*/
jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(JNIEnv * env,
jobject,
jlong context,
jint width,
jint height)
{
AndroidNativeOpenGl2Channel* renderChannel =
reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
return renderChannel->CreateOpenGLNative(width, height);
}
jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(int width, int height)
{
return _openGLRenderer.Setup(width, height);
}
} //namespace webrtc

View File

@@ -0,0 +1,93 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#include <jni.h>
#include "video_render_defines.h"
#include "video_render_android_impl.h"
#include "video_render_opengles20.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidNativeOpenGl2Channel: public AndroidStream
{
public:
AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj);
~AndroidNativeOpenGl2Channel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
static jint CreateOpenGLNativeStatic(JNIEnv * env,jobject, jlong context, jint width, jint height);
jint CreateOpenGLNative(int width, int height);
static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
void DrawNative();
WebRtc_UWord32 _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
jmethodID _redrawCid;
jmethodID _registerNativeCID;
jmethodID _deRegisterNativeCID;
VideoRenderOpenGles20 _openGLRenderer;
};
class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid
{
public:
AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidNativeOpenGl2Renderer();
static bool UseOpenGL2(void* window);
WebRtc_Word32 Init();
virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_

View File

@@ -0,0 +1,466 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_android_surface_view.h"
#include "critical_section_wrapper.h"
#include "vplib.h"
#include "tick_util.h"
#ifdef ANDROID_NDK_8_OR_ABOVE
#include <android/bitmap.h>
#endif
#ifdef ANDROID_LOG
#include <stdio.h>
#include <android/log.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
#else
#include "trace.h"
#endif
namespace webrtc {
AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen)
:
VideoRenderAndroid(id,videoRenderType,window,fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
{
}
AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewRenderer dtor");
if(g_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
env=NULL;
}
else
{
isAttached = true;
}
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
}
}
WebRtc_Word32
AndroidSurfaceViewRenderer::Init()
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "(%s): Not a valid Java VM pointer.", __FUNCTION__);
return -1;
}
if(!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "(%s): No window have been provided.", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
// get the ViESurfaceRender class
jclass javaRenderClassLocal = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClassLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
// create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
_javaRenderClass = reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java ViESurfaceRenderer class reference", __FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
// get the method ID for the constructor
jmethodID cid = env->GetMethodID(_javaRenderClass, "<init>", "(Landroid/view/SurfaceView;)V");
if (cid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get constructor ID", __FUNCTION__);
return -1; /* exception thrown */
}
// construct the object
jobject javaRenderObjLocal = env->NewObject(_javaRenderClass, cid, _ptrWindow);
if (!javaRenderObjLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java Render", __FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
if (!_javaRenderObj)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java SurfaceRender object reference", __FUNCTION__);
return -1;
}
// Detach this thread if it was attached
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
return 0;
}
AndroidStream*
AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", __FUNCTION__,streamId);
AndroidSurfaceViewChannel* stream=new AndroidSurfaceViewChannel(streamId,g_jvm,renderer,_javaRenderObj);
if(stream && stream->Init(zOrder,left,top,right,bottom)==0)
return stream;
else
delete stream;
return NULL;
}
AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj)
:
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer),
_jvm(jvm),
_javaRenderObj(javaRenderObj),
_bitmapWidth(0),
_bitmapHeight(0)
{
}
AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewChannel dtor");
delete &_renderCritSect;
if(_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
env=NULL;
}
else
{
isAttached = true;
}
}
#ifdef ANDROID_NDK_8_OR_ABOVE
env->DeleteGlobalRef(_javaBitmapObj);
#else
env->DeleteGlobalRef(_javaByteBufferObj);
#endif
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
}
}
WebRtc_Word32
AndroidSurfaceViewChannel::Init(WebRtc_Word32 /*zOrder*/,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel", __FUNCTION__);
if (!_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer,_id, "%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
if((top>1 || top<0) || (right>1 || right<0) || (bottom>1 || bottom<0) || (left>1 || left<0))
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Wrong coordinates",
__FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
jclass javaRenderClass = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
#ifdef ANDROID_NDK_8_OR_ABOVE
// get the method ID for the CreateBitmap
_createBitmapCid = env->GetMethodID(_javaRenderClass, "CreateBitmap", "(II)Landroid/graphics/Bitmap;");
if (_createBitmapCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateBitmap ID", __FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the DrawBitmap function
_drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V");
if (_drawBitmapCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawBitmap ID", __FUNCTION__);
return -1; /* exception thrown */
}
#else
// get the method ID for the CreateIntArray
_createByteBufferCid = env->GetMethodID(javaRenderClass, "CreateByteBuffer", "(II)Ljava/nio/ByteBuffer;");
if (_createByteBufferCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateByteBuffer ID", __FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the DrawByteBuffer function
_drawByteBufferCid = env->GetMethodID(javaRenderClass, "DrawByteBuffer", "()V");
if (_drawByteBufferCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawByteBuffer ID", __FUNCTION__);
return -1; /* exception thrown */
}
#endif
// get the method ID for the SetCoordinates function
_setCoordinatesCid = env->GetMethodID(javaRenderClass, "SetCoordinates", "(FFFF)V");
if (_setCoordinatesCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get SetCoordinates ID", __FUNCTION__);
return -1; /* exception thrown */
}
env->CallVoidMethod(_javaRenderObj,_setCoordinatesCid,left,top,right,bottom);
// Detach this thread if it was attached
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel done", __FUNCTION__);
return 0;
}
WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
{
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
}
/*Implements AndroidStream
* Calls the Java object and render the buffer in _bufferToRender
*/
void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv)
{
_renderCritSect.Enter();
// TickTime timeNow=TickTime::Now();
#ifdef ANDROID_NDK_8_OR_ABOVE
if(_bitmapWidth!=_bufferToRender.Width() || _bitmapHeight!=_bufferToRender.Height())
{
// Create the bitmap to write to
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u %u", __FUNCTION__,_bufferToRender.Width(),_bufferToRender.Height());
if(_javaBitmapObj)
{
jniEnv->DeleteGlobalRef(_javaBitmapObj);
_javaBitmapObj=NULL;
}
jobject javaBitmap=jniEnv->CallObjectMethod(_javaRenderObj,_createBitmapCid,videoFrame.Width(),videoFrame.Height());
_javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
if (!_javaBitmapObj)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java Bitmap object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
}
else
{
_bitmapWidth=_bufferToRender.Width();
_bitmapHeight=_bufferToRender.Height();
}
}
void* pixels;
if (_javaBitmapObj && AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap", __FUNCTION__);
// Convert I420 straight into the Java bitmap.
const int conversionResult=ConvertI420ToRGB565( (unsigned char* )_bufferToRender.Buffer(), (unsigned char* ) pixels, _bitmapWidth, _bitmapHeight);
if(conversionResult<=0)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
}
AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock bitmap", __FUNCTION__);
}
_renderCritSect.Leave();
#else
if(_bitmapWidth!=_bufferToRender.Width() || _bitmapHeight!=_bufferToRender.Height())
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: New render size %d %d",__FUNCTION__, _bufferToRender.Width(), _bufferToRender.Height());
if(_javaByteBufferObj)
{
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
_javaByteBufferObj=NULL;
_directBuffer=NULL;
}
jobject javaByteBufferObj=jniEnv->CallObjectMethod(_javaRenderObj,_createByteBufferCid,_bufferToRender.Width(),_bufferToRender.Height());
_javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
if (!_javaByteBufferObj)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java ByteBuffer object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
}
else
{
_directBuffer=(unsigned char*) jniEnv->GetDirectBufferAddress(_javaByteBufferObj);
_bitmapWidth=_bufferToRender.Width();
_bitmapHeight=_bufferToRender.Height();
}
}
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight)
{
const int conversionResult=ConvertI420ToRGB565Android((unsigned char* )_bufferToRender.Buffer(), _directBuffer, _bitmapWidth, _bitmapHeight);
if(conversionResult<=0)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion failed.", __FUNCTION__);
_renderCritSect.Leave();
return;
}
}
_renderCritSect.Leave();
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj,_drawByteBufferCid);
#endif
//WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: time to deliver %lld" ,__FUNCTION__,(TickTime::Now()-timeNow).Milliseconds());
}
} //namespace webrtc

View File

@@ -0,0 +1,96 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#include <jni.h>
#include "video_render_defines.h"
#include "video_render_android_impl.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidSurfaceViewChannel: public AndroidStream
{
public:
AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj);
~AndroidSurfaceViewChannel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
WebRtc_UWord32 _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
#ifdef ANDROID_NDK_8_OR_ABOVE
jclass _javaBitmapClass;
jmethodID _createBitmapCid;
jobject _javaBitmapObj;
jmethodID _drawBitmapCid;
#else
jobject _javaByteBufferObj;
unsigned char* _directBuffer;
jmethodID _createByteBufferCid;
jmethodID _drawByteBufferCid;
#endif
jmethodID _setCoordinatesCid;
unsigned int _bitmapWidth;
unsigned int _bitmapHeight;
};
class AndroidSurfaceViewRenderer: private VideoRenderAndroid
{
public:
AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidSurfaceViewRenderer();
WebRtc_Word32 Init();
virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_

View File

@@ -0,0 +1,446 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdio.h>
#include <stdlib.h>
#include "video_render_opengles20.h"
//#define ANDROID_LOG
#ifdef ANDROID_LOG
#include <stdio.h>
#include <android/log.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
#else
#include "trace.h"
#endif
namespace webrtc {
const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
const char VideoRenderOpenGles20::g_vertextShader[] = {
"attribute vec4 aPosition;\n"
"attribute vec2 aTextureCoord;\n"
"varying vec2 vTextureCoord;\n"
"void main() {\n"
" gl_Position = aPosition;\n"
" vTextureCoord = aTextureCoord;\n"
"}\n" };
// The fragment shader.
// Do YUV to RGB565 conversion.
const char VideoRenderOpenGles20::g_fragmentShader[] = {
"precision mediump float;\n"
"uniform sampler2D Ytex;\n"
"uniform sampler2D Utex,Vtex;\n"
"varying vec2 vTextureCoord;\n"
"void main(void) {\n"
" float nx,ny,r,g,b,y,u,v;\n"
" mediump vec4 txl,ux,vx;"
" nx=vTextureCoord[0];\n"
" ny=vTextureCoord[1];\n"
" y=texture2D(Ytex,vec2(nx,ny)).r;\n"
" u=texture2D(Utex,vec2(nx,ny)).r;\n"
" v=texture2D(Vtex,vec2(nx,ny)).r;\n"
//" y = v;\n"+
" y=1.1643*(y-0.0625);\n"
" u=u-0.5;\n"
" v=v-0.5;\n"
" r=y+1.5958*v;\n"
" g=y-0.39173*u-0.81290*v;\n"
" b=y+2.017*u;\n"
" gl_FragColor=vec4(r,g,b,1.0);\n"
"}\n" };
VideoRenderOpenGles20::VideoRenderOpenGles20(WebRtc_Word32 id) :
_id(id),
_textureWidth(-1),
_textureHeight(-1)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
const GLfloat vertices[20] = {
// X, Y, Z, U, V
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, //Bottom Right
1, 1, 0, 1, 0, //Top Right
-1, 1, 0, 0, 0 }; //Top Left
memcpy(_vertices, vertices, sizeof(_vertices));
}
VideoRenderOpenGles20::~VideoRenderOpenGles20()
{
}
WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
WebRtc_Word32 height)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d", __FUNCTION__, (int) width,
(int) height);
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
int maxTextureImageUnits[2];
int maxTextureSize[2];
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: number of textures %d, size %d", __FUNCTION__,
(int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
_program = createProgram(g_vertextShader, g_fragmentShader);
if (!_program)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not create program", __FUNCTION__);
return -1;
}
int positionHandle = glGetAttribLocation(_program, "aPosition");
checkGlError("glGetAttribLocation aPosition");
if (positionHandle == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aPosition handle", __FUNCTION__);
return -1;
}
int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
checkGlError("glGetAttribLocation aTextureCoord");
if (textureHandle == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not get aTextureCoord handle", __FUNCTION__);
return -1;
}
// set the vertices array in the shader
// _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5
* sizeof(GLfloat), _vertices);
checkGlError("glVertexAttribPointer aPosition");
glEnableVertexAttribArray(positionHandle);
checkGlError("glEnableVertexAttribArray positionHandle");
// set the texture coordinate array in the shader
// _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
* sizeof(GLfloat), &_vertices[3]);
checkGlError("glVertexAttribPointer maTextureHandle");
glEnableVertexAttribArray(textureHandle);
checkGlError("glEnableVertexAttribArray textureHandle");
glUseProgram(_program);
int i = glGetUniformLocation(_program, "Ytex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
checkGlError("glUniform1i Ytex");
i = glGetUniformLocation(_program, "Utex");
checkGlError("glGetUniformLocation Utex");
glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
checkGlError("glUniform1i Utex");
i = glGetUniformLocation(_program, "Vtex");
checkGlError("glGetUniformLocation");
glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
checkGlError("glUniform1i");
glViewport(0, 0, width, height);
checkGlError("glViewport");
return 0;
}
/*
* SetCoordinates
* Sets the coordinates where the stream shall be rendered. Values must be between 0 and 1.
*/
WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1
|| bottom < 0) || (left > 1 || left < 0))
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Wrong coordinates", __FUNCTION__);
return -1;
}
/*
// X, Y, Z, U, V
-1, -1, 0, 0, 1, // Bottom Left
1, -1, 0, 1, 1, //Bottom Right
1, 1, 0, 1, 0, //Top Right
-1, 1, 0, 0, 0 }; //Top Left
*/
// Bottom Left
_vertices[0] = (left * 2) - 1;
_vertices[1] = -1 * (2 * bottom) + 1;
_vertices[2] = zOrder;
//Bottom Right
_vertices[5] = (right * 2) - 1;
_vertices[6] = -1 * (2 * bottom) + 1;
_vertices[7] = zOrder;
//Top Right
_vertices[10] = (right * 2) - 1;
_vertices[11] = -1 * (2 * top) + 1;
_vertices[12] = zOrder;
//Top Left
_vertices[15] = (left * 2) - 1;
_vertices[16] = -1 * (2 * top) + 1;
_vertices[17] = zOrder;
return 0;
}
WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender)
{
if (frameToRender.Length() == 0)
{
return -1;
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
__FUNCTION__, (int) _id);
//glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
//glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
glUseProgram(_program);
checkGlError("glUseProgram");
if (_textureWidth != (GLsizei) frameToRender.Width() || _textureHeight
!= (GLsizei) frameToRender.Height())
{
SetupTextures(frameToRender);
}
else
{
UpdateTextures(frameToRender);
}
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
checkGlError("glDrawArrays");
return 0;
}
GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
const char* pSource)
{
GLuint shader = glCreateShader(shaderType);
if (shader)
{
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled)
{
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen)
{
char* buf = (char*) malloc(infoLen);
if (buf)
{
glGetShaderInfoLog(shader, infoLen, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not compile shader %d: %s",
__FUNCTION__, shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
const char* pFragmentSource)
{
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader)
{
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader)
{
return 0;
}
GLuint program = glCreateProgram();
if (program)
{
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE)
{
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength)
{
char* buf = (char*) malloc(bufLength);
if (buf)
{
glGetProgramInfoLog(program, bufLength, NULL, buf);
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not link program: %s",
__FUNCTION__, buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
void VideoRenderOpenGles20::printGLString(const char *name, GLenum s)
{
const char *v = (const char *) glGetString(s);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
name, v);
}
void VideoRenderOpenGles20::checkGlError(const char* op)
{
#ifdef ANDROID_LOG
for (GLint error = glGetError(); error; error
= glGetError())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "after %s() glError (0x%x)\n", op, error);
}
#else
return;
#endif
}
void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s: width %d, height %d length %u", __FUNCTION__,
frameToRender.Width(), frameToRender.Height(),
frameToRender.Length());
const GLsizei width = frameToRender.Width();
const GLsizei height = frameToRender.Height();
glGenTextures(3, _textureIds); //Generate the Y, U and V texture
GLuint currentTextureId = _textureIds[0]; // Y
glActiveTexture( GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE,
(const GLvoid*) frameToRender.Buffer());
currentTextureId = _textureIds[1]; // U
glActiveTexture( GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
currentTextureId = _textureIds[2]; // V
glActiveTexture( GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
checkGlError("SetupTextures");
_textureWidth = width;
_textureHeight = height;
}
void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender)
{
const GLsizei width = frameToRender.Width();
const GLsizei height = frameToRender.Height();
GLuint currentTextureId = _textureIds[0]; // Y
glActiveTexture( GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer());
currentTextureId = _textureIds[1]; // U
glActiveTexture( GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
currentTextureId = _textureIds[2]; // V
glActiveTexture( GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, currentTextureId);
const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
checkGlError("UpdateTextures");
}
} //namespace webrtc

View File

@@ -0,0 +1,61 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
#include "video_render_defines.h"
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
namespace webrtc
{
class VideoRenderOpenGles20
{
public:
VideoRenderOpenGles20(WebRtc_Word32 id);
~VideoRenderOpenGles20();
WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height);
WebRtc_Word32 Render(const VideoFrame& frameToRender);
WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
private:
void printGLString(const char *name, GLenum s);
void checkGlError(const char* op);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint createProgram(const char* pVertexSource, const char* pFragmentSource);
void SetupTextures(const VideoFrame& frameToRender);
void UpdateTextures(const VideoFrame& frameToRender);
WebRtc_Word32 _id;
GLuint _textureIds[3]; // Texture id of Y,U and V texture.
GLuint _program;
GLuint _vPositionHandle;
GLsizei _textureWidth;
GLsizei _textureHeight;
GLfloat _vertices[20];
static const char g_indices[];
static const char g_vertextShader[];
static const char g_fragmentShader[];
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_

View File

@@ -0,0 +1,205 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_external_impl.h"
namespace webrtc {
VideoRenderExternalImpl::VideoRenderExternalImpl(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id), _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_window(window), _fullscreen(fullscreen)
{
}
VideoRenderExternalImpl::~VideoRenderExternalImpl()
{
delete &_critSect;
}
WebRtc_Word32 VideoRenderExternalImpl::Init()
{
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::ChangeUniqueId(const WebRtc_Word32 id)
{
CriticalSectionScoped cs(_critSect);
_id = id;
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::ChangeWindow(void* window)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
VideoRenderCallback*
VideoRenderExternalImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_critSect);
return this;
}
WebRtc_Word32 VideoRenderExternalImpl::DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
CriticalSectionScoped cs(_critSect);
zOrder = 0;
left = 0;
top = 0;
right = 0;
bottom = 0;
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::StartRender()
{
CriticalSectionScoped cs(_critSect);
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::StopRender()
{
CriticalSectionScoped cs(_critSect);
return 0;
}
VideoRenderType VideoRenderExternalImpl::RenderType()
{
return kRenderExternal;
}
RawVideoType VideoRenderExternalImpl::PerferedVideoType()
{
return kVideoI420;
}
bool VideoRenderExternalImpl::FullScreen()
{
CriticalSectionScoped cs(_critSect);
return _fullscreen;
}
WebRtc_Word32 VideoRenderExternalImpl::GetGraphicsMemory(
WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const
{
totalGraphicsMemory = 0;
availableGraphicsMemory = 0;
return -1;
}
WebRtc_Word32 VideoRenderExternalImpl::GetScreenResolution(
WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const
{
CriticalSectionScoped cs(_critSect);
screenWidth = 0;
screenHeight = 0;
return 0;
}
WebRtc_UWord32 VideoRenderExternalImpl::RenderFrameRate(
const WebRtc_UWord32 streamId)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::SetStreamCropping(
const WebRtc_UWord32 streamId,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::ConfigureRenderer(
const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::SetTransparentBackground(
const bool enable)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::SetText(
const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
WebRtc_Word32 VideoRenderExternalImpl::SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_critSect);
return 0;
}
// VideoRenderCallback
WebRtc_Word32 VideoRenderExternalImpl::RenderFrame(
const WebRtc_UWord32 streamId,
VideoFrame& videoFrame)
{
return 0;
}
} //namespace webrtc

View File

@@ -0,0 +1,134 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
#include "i_video_render.h"
#include "critical_section_wrapper.h"
#include "module_common_types.h"
namespace webrtc {
// Class definitions
class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback
{
public:
/*
* Constructor/destructor
*/
VideoRenderExternalImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~VideoRenderExternalImpl();
virtual WebRtc_Word32 Init();
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32
DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32
GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32
GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32
GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
// VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
private:
WebRtc_Word32 _id;
CriticalSectionWrapper& _critSect;
void* _window;
bool _fullscreen;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_

View File

@@ -0,0 +1,133 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
#include "video_render.h"
namespace webrtc {
// Class definitions
class IVideoRender
{
public:
/*
* Constructor/destructor
*/
virtual ~IVideoRender()
{
};
virtual WebRtc_Word32 Init() = 0;
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual WebRtc_Word32
DeleteIncomingRenderStream(const WebRtc_UWord32 streamId) = 0;
virtual WebRtc_Word32
GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const = 0;
// Implemented in common code?
//virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const = 0;
//virtual bool HasIncomingRenderStream(const WebRtc_UWord16 stramId) const = 0;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender() = 0;
virtual WebRtc_Word32 StopRender() = 0;
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType() = 0;
virtual RawVideoType PerferedVideoType() = 0;
virtual bool FullScreen() = 0;
// TODO: This should be treated in platform specific code only
virtual WebRtc_Word32
GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const = 0;
virtual WebRtc_Word32
GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const = 0;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId) = 0;
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left,
const float top,
const float rigth,
const float bottom) = 0;
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom) = 0;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_

View File

@@ -0,0 +1,412 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "incoming_video_stream.h"
#include "critical_section_wrapper.h"
#include "event_wrapper.h"
#include "trace.h"
#include "thread_wrapper.h"
#include "video_render_frames.h"
#include "tick_util.h"
#include "map_wrapper.h"
#include "vplib.h"
#include <cassert>
// Platform specifics
#if defined(_WIN32)
#include <windows.h>
#elif defined(WEBRTC_LINUX)
#include <ctime>
#include <sys/time.h>
#else
#include <sys/time.h>
#endif
namespace webrtc {
IncomingVideoStream::IncomingVideoStream(const WebRtc_Word32 moduleId,
const WebRtc_UWord32 streamId) :
_moduleId(moduleId),
_streamId(streamId),
_streamCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_bufferCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_threadCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_ptrIncomingRenderThread(),
_deliverBufferEvent(*EventWrapper::Create()),
_running(false),
_ptrExternalCallback(NULL),
_ptrRenderCallback(NULL),
_renderBuffers(*(new VideoRenderFrames)),
_callbackVideoType(kVideoI420),
_callbackWidth(0),
_callbackHeight(0),
_incomingRate(0),
_lastRateCalculationTimeMs(0),
_numFramesSinceLastCalculation(0),
_lastRenderedFrame(),
_startImage(),
_timeoutImage(),
_timeoutTime(),
_mirrorFramesEnabled(false),
_mirroring(),
_transformedVideoFrame()
{
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId,
"%s created for stream %d", __FUNCTION__, streamId);
}
IncomingVideoStream::~IncomingVideoStream()
{
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, _moduleId,
"%s deleted for stream %d", __FUNCTION__, _streamId);
Stop();
// _ptrIncomingRenderThread - Delete in stop
delete &_renderBuffers;
delete &_streamCritsect;
delete &_bufferCritsect;
delete &_threadCritsect;
delete &_deliverBufferEvent;
}
WebRtc_Word32 IncomingVideoStream::ChangeModuleId(const WebRtc_Word32 id)
{
CriticalSectionScoped cs(_streamCritsect);
_moduleId = id;
return 0;
}
VideoRenderCallback*
IncomingVideoStream::ModuleCallback()
{
CriticalSectionScoped cs(_streamCritsect);
return this;
}
WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame)
{
CriticalSectionScoped csS(_streamCritsect);
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
"%s for stream %d, render time: %u", __FUNCTION__, _streamId,
videoFrame.RenderTimeMs());
if (!_running)
{
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
"%s: Not running", __FUNCTION__);
return -1;
}
if (true == _mirrorFramesEnabled)
{
_transformedVideoFrame.VerifyAndAllocate(videoFrame.Length());
if (_mirroring.mirrorXAxis)
{
MirrorI420UpDown(videoFrame.Buffer(),
_transformedVideoFrame.Buffer(),
videoFrame.Width(), videoFrame.Height());
_transformedVideoFrame.SetLength(videoFrame.Length());
_transformedVideoFrame.SetWidth(videoFrame.Width());
_transformedVideoFrame.SetHeight(videoFrame.Height());
videoFrame.SwapFrame(_transformedVideoFrame);
}
if (_mirroring.mirrorYAxis)
{
MirrorI420LeftRight(videoFrame.Buffer(),
_transformedVideoFrame.Buffer(),
videoFrame.Width(), videoFrame.Height());
_transformedVideoFrame.SetLength(videoFrame.Length());
_transformedVideoFrame.SetWidth(videoFrame.Width());
_transformedVideoFrame.SetHeight(videoFrame.Height());
videoFrame.SwapFrame(_transformedVideoFrame);
}
}
// Rate statistics
_numFramesSinceLastCalculation++;
WebRtc_Word64 nowMs = TickTime::MillisecondTimestamp();
if (nowMs >= _lastRateCalculationTimeMs + KFrameRatePeriodMs)
{
_incomingRate = (WebRtc_UWord32) (1000 * _numFramesSinceLastCalculation
/ (nowMs - _lastRateCalculationTimeMs));
_numFramesSinceLastCalculation = 0;
_lastRateCalculationTimeMs = nowMs;
}
// Insert frame
CriticalSectionScoped csB(_bufferCritsect);
if (_renderBuffers.AddFrame(&videoFrame) == 1)
_deliverBufferEvent.Set();
return 0;
}
WebRtc_Word32 IncomingVideoStream::SetStartImage(const VideoFrame& videoFrame)
{
CriticalSectionScoped csS(_threadCritsect);
return _startImage.CopyFrame(videoFrame);
}
WebRtc_Word32 IncomingVideoStream::SetTimeoutImage(const VideoFrame& videoFrame,
const WebRtc_UWord32 timeout)
{
CriticalSectionScoped csS(_threadCritsect);
_timeoutTime = timeout;
return _timeoutImage.CopyFrame(videoFrame);
}
WebRtc_Word32 IncomingVideoStream::SetRenderCallback(VideoRenderCallback* renderCallback)
{
CriticalSectionScoped cs(_streamCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s(%x) for stream %d", __FUNCTION__, renderCallback,
_streamId);
_ptrRenderCallback = renderCallback;
return 0;
}
WebRtc_Word32 IncomingVideoStream::EnableMirroring(const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis)
{
CriticalSectionScoped cs(_streamCritsect);
_mirrorFramesEnabled = enable;
_mirroring.mirrorXAxis = mirrorXAxis;
_mirroring.mirrorYAxis = mirrorYAxis;
return 0;
}
WebRtc_Word32 IncomingVideoStream::SetExternalCallback(VideoRenderCallback* externalCallback)
{
CriticalSectionScoped cs(_streamCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s(%x) for stream %d", __FUNCTION__, externalCallback,
_streamId);
_ptrExternalCallback = externalCallback;
_callbackVideoType = kVideoI420;
_callbackWidth = 0;
_callbackHeight = 0;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Start()
{
CriticalSectionScoped csS(_streamCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s for stream %d", __FUNCTION__, _streamId);
if (_running)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
"%s: Already running", __FUNCTION__);
return 0;
}
CriticalSectionScoped csT(_threadCritsect);
assert(_ptrIncomingRenderThread == NULL);
_ptrIncomingRenderThread
= ThreadWrapper::CreateThread(IncomingVideoStreamThreadFun, this,
kRealtimePriority,
"IncomingVideoStreamThread");
if (!_ptrIncomingRenderThread)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
"%s: No thread", __FUNCTION__);
return -1;
}
unsigned int tId = 0;
if (_ptrIncomingRenderThread->Start(tId))
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s: thread started: %u", __FUNCTION__, tId);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _moduleId,
"%s: Could not start send thread", __FUNCTION__);
return -1;
}
_deliverBufferEvent.StartTimer(false, KEventStartupTimeMS);
_running = true;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Stop()
{
CriticalSectionScoped csStream(_streamCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _moduleId,
"%s for stream %d", __FUNCTION__, _streamId);
if (!_running)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
"%s: Not running", __FUNCTION__);
return 0;
}
_threadCritsect.Enter();
if (_ptrIncomingRenderThread)
{
ThreadWrapper* ptrThread = _ptrIncomingRenderThread;
_ptrIncomingRenderThread = NULL;
ptrThread->SetNotAlive();
#ifndef _WIN32
_deliverBufferEvent.StopTimer();
#endif
_threadCritsect.Leave();
if (ptrThread->Stop())
{
delete ptrThread;
}
else
{
assert(false);
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _moduleId,
"%s: Not able to stop thread, leaking", __FUNCTION__);
}
}
else
{
_threadCritsect.Leave();
}
_running = false;
return 0;
}
WebRtc_Word32 IncomingVideoStream::Reset()
{
CriticalSectionScoped csStream(_streamCritsect);
CriticalSectionScoped csBuffer(_bufferCritsect);
_renderBuffers.ReleaseAllFrames();
return 0;
}
WebRtc_UWord32 IncomingVideoStream::StreamId() const
{
CriticalSectionScoped csStream(_streamCritsect);
return _streamId;
}
WebRtc_UWord32 IncomingVideoStream::IncomingRate() const
{
CriticalSectionScoped cs(_streamCritsect);
return _incomingRate;
}
bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj)
{
return static_cast<IncomingVideoStream*> (obj)->IncomingVideoStreamProcess();
}
bool IncomingVideoStream::IncomingVideoStreamProcess()
{
if (kEventError != _deliverBufferEvent.Wait(KEventMaxWaitTimeMs))
{
if (_ptrIncomingRenderThread == NULL)
{
// Terminating
return false;
}
_threadCritsect.Enter();
VideoFrame* ptrFrameToRender = NULL;
// Get a new frame to render and the time for the frame after this one.
_bufferCritsect.Enter();
ptrFrameToRender = _renderBuffers.FrameToRender();
WebRtc_UWord32 waitTime = _renderBuffers.TimeToNextFrameRelease();
_bufferCritsect.Leave();
// Set timer for next frame to render
if (waitTime > KEventMaxWaitTimeMs)
{
waitTime = KEventMaxWaitTimeMs;
}
_deliverBufferEvent.StartTimer(false, waitTime);
if (!ptrFrameToRender)
{
if (_ptrRenderCallback)
{
if (_lastRenderedFrame.RenderTimeMs() == 0
&& _startImage.Size()) // And we have not rendered anything and have a start image
{
_tempFrame.CopyFrame(_startImage);// Copy the startimage if the renderer modifies the render buffer.
_ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
}
else if (_timeoutImage.Size()
&& _lastRenderedFrame.RenderTimeMs() + _timeoutTime
< TickTime::MillisecondTimestamp()) // We have rendered something a long time ago and have a timeout image
{
_tempFrame.CopyFrame(_timeoutImage); // Copy the timeoutImage if the renderer modifies the render buffer.
_ptrRenderCallback->RenderFrame(_streamId, _tempFrame);
}
}
// No frame
_threadCritsect.Leave();
return true;
}
// Send frame for rendering
if (_ptrExternalCallback)
{
WEBRTC_TRACE(kTraceStream,
kTraceVideoRenderer,
_moduleId,
"%s: executing external renderer callback to deliver frame",
__FUNCTION__, ptrFrameToRender->RenderTimeMs());
_ptrExternalCallback->RenderFrame(_streamId, *ptrFrameToRender);
}
else
{
if (_ptrRenderCallback)
{
WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, _moduleId,
"%s: Render frame, time: ", __FUNCTION__,
ptrFrameToRender->RenderTimeMs());
_ptrRenderCallback->RenderFrame(_streamId, *ptrFrameToRender);
}
}
// Release critsect before calling the module user
_threadCritsect.Leave();
// We're done with this frame, delete it.
if (ptrFrameToRender)
{
CriticalSectionScoped cs(_bufferCritsect);
_lastRenderedFrame.SwapFrame(*ptrFrameToRender);
_renderBuffers.ReturnFrame(ptrFrameToRender);
}
}
return true;
}
WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame(VideoFrame& videoFrame) const
{
CriticalSectionScoped cs(_bufferCritsect);
return videoFrame.CopyFrame(_lastRenderedFrame);
}
} //namespace webrtc

View File

@@ -0,0 +1,140 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
#include "video_render.h"
#include "map_wrapper.h"
namespace webrtc {
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
class VideoRenderCallback;
class VideoRenderFrames;
struct VideoMirroring
{
bool mirrorXAxis;
bool mirrorYAxis;
VideoMirroring() :
mirrorXAxis(false), mirrorYAxis(false)
{
}
};
// Class definitions
class IncomingVideoStream: public VideoRenderCallback
{
public:
/*
* VideoRenderer constructor/destructor
*/
IncomingVideoStream(const WebRtc_Word32 moduleId,
const WebRtc_UWord32 streamId);
~IncomingVideoStream();
WebRtc_Word32 ChangeModuleId(const WebRtc_Word32 id);
// Get callbck to deliver frames to the module
VideoRenderCallback* ModuleCallback();
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
// Set callback to the platform dependant code
WebRtc_Word32 SetRenderCallback(VideoRenderCallback* renderCallback);
// Callback for file recording, snapshot, ...
WebRtc_Word32 SetExternalCallback(VideoRenderCallback* renderObject);
/*
* Start/Stop
*/
WebRtc_Word32 Start();
WebRtc_Word32 Stop();
// Clear all buffers
WebRtc_Word32 Reset();
/*
* Properties
*/
WebRtc_UWord32 StreamId() const;
WebRtc_UWord32 IncomingRate() const;
/*
*
*/
WebRtc_Word32 GetLastRenderedFrame(VideoFrame& videoFrame) const;
WebRtc_Word32 SetStartImage(const VideoFrame& videoFrame);
WebRtc_Word32 SetTimeoutImage(const VideoFrame& videoFrame,
const WebRtc_UWord32 timeout);
WebRtc_Word32 EnableMirroring(const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis);
protected:
static bool IncomingVideoStreamThreadFun(void* obj);
bool IncomingVideoStreamProcess();
private:
// Enums
enum
{
KEventStartupTimeMS = 10
};
enum
{
KEventMaxWaitTimeMs = 100
};
enum
{
KFrameRatePeriodMs = 1000
};
WebRtc_Word32 _moduleId;
WebRtc_UWord32 _streamId;
CriticalSectionWrapper& _streamCritsect; // Critsects in allowed to enter order
CriticalSectionWrapper& _threadCritsect;
CriticalSectionWrapper& _bufferCritsect;
ThreadWrapper* _ptrIncomingRenderThread;
EventWrapper& _deliverBufferEvent;
bool _running;
VideoRenderCallback* _ptrExternalCallback;
VideoRenderCallback* _ptrRenderCallback;
VideoRenderFrames& _renderBuffers;
RawVideoType _callbackVideoType;
WebRtc_UWord32 _callbackWidth;
WebRtc_UWord32 _callbackHeight;
WebRtc_UWord32 _incomingRate;
WebRtc_Word64 _lastRateCalculationTimeMs;
WebRtc_UWord16 _numFramesSinceLastCalculation;
VideoFrame _lastRenderedFrame;
VideoFrame _tempFrame;
VideoFrame _startImage;
VideoFrame _timeoutImage;
WebRtc_UWord32 _timeoutTime;
bool _mirrorFramesEnabled;
VideoMirroring _mirroring;
VideoFrame _transformedVideoFrame;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_

View File

@@ -0,0 +1,271 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_linux_impl.h"
#include "critical_section_wrapper.h"
#include "trace.h"
#include "video_x11_render.h"
#include <X11/Xlib.h>
namespace webrtc {
VideoRenderLinuxImpl::VideoRenderLinuxImpl(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen) :
_id(id),
_renderLinuxCritsect(
*CriticalSectionWrapper::CreateCriticalSection()),
_ptrWindow(window), _fullscreen(fullscreen), _ptrX11Render(NULL),
_renderType(videoRenderType)
{
}
VideoRenderLinuxImpl::~VideoRenderLinuxImpl()
{
if (_ptrX11Render)
delete _ptrX11Render;
delete &_renderLinuxCritsect;
}
WebRtc_Word32 VideoRenderLinuxImpl::Init()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_renderLinuxCritsect);
_ptrX11Render = new VideoX11Render((Window) _ptrWindow);
if (!_ptrX11Render)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s",
"Failed to create instance of VideoX11Render object");
return -1;
}
int retVal = _ptrX11Render->Init();
if (retVal == -1)
{
return -1;
}
return 0;
}
WebRtc_Word32 VideoRenderLinuxImpl::ChangeUniqueId(const WebRtc_Word32 id)
{
CriticalSectionScoped cs(_renderLinuxCritsect);
_id = id;
return 0;
}
WebRtc_Word32 VideoRenderLinuxImpl::ChangeWindow(void* window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_renderLinuxCritsect);
_ptrWindow = window;
if (_ptrX11Render)
{
return _ptrX11Render->ChangeWindow((Window) window);
}
return -1;
}
VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream(
const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_renderLinuxCritsect);
VideoRenderCallback* renderCallback = NULL;
if (_ptrX11Render)
{
VideoX11Channel* renderChannel =
_ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left,
top, right, bottom);
if (!renderChannel)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"Render channel creation failed for stream id: %d",
streamId);
return NULL;
}
renderCallback = (VideoRenderCallback *) renderChannel;
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"_ptrX11Render is NULL");
return NULL;
}
return renderCallback;
}
WebRtc_Word32 VideoRenderLinuxImpl::DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_renderLinuxCritsect);
if (_ptrX11Render)
{
return _ptrX11Render->DeleteX11RenderChannel(streamId);
}
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_renderLinuxCritsect);
if (_ptrX11Render)
{
return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder,
left, top, right,
bottom);
}
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::StartRender()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
return 0;
}
WebRtc_Word32 VideoRenderLinuxImpl::StopRender()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
__FUNCTION__);
return 0;
}
VideoRenderType VideoRenderLinuxImpl::RenderType()
{
return kRenderX11;
}
RawVideoType VideoRenderLinuxImpl::PerferedVideoType()
{
return kVideoI420;
}
bool VideoRenderLinuxImpl::FullScreen()
{
return false;
}
WebRtc_Word32 VideoRenderLinuxImpl::GetGraphicsMemory(
WebRtc_UWord64& /*totalGraphicsMemory*/,
WebRtc_UWord64& /*availableGraphicsMemory*/) const
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::GetScreenResolution(
WebRtc_UWord32& /*screenWidth*/,
WebRtc_UWord32& /*screenHeight*/) const
{
return -1;
}
WebRtc_UWord32 VideoRenderLinuxImpl::RenderFrameRate(const WebRtc_UWord32 /*streamId*/)
{
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::SetStreamCropping(
const WebRtc_UWord32 /*streamId*/,
const float /*left*/,
const float /*top*/,
const float /*right*/,
const float /*bottom*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::ConfigureRenderer(
const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::SetText(
const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderLinuxImpl::SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Linux", __FUNCTION__);
return -1;
}
} //namespace webrtc

View File

@@ -0,0 +1,136 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
#include "i_video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoX11Render;
// Class definitions
class VideoRenderLinuxImpl: IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderLinuxImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~VideoRenderLinuxImpl();
virtual WebRtc_Word32 Init();
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32
DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32
GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32
GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32
GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
private:
WebRtc_Word32 _id;
CriticalSectionWrapper& _renderLinuxCritsect;
void* _ptrWindow;
bool _fullscreen;
// X11 Render
VideoX11Render* _ptrX11Render;
VideoRenderType _renderType;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_

View File

@@ -0,0 +1,317 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_x11_channel.h"
#include "critical_section_wrapper.h"
#include "trace.h"
namespace webrtc {
#define DISP_MAX 128
static Display *dispArray[DISP_MAX];
static int dispCount = 0;
VideoX11Channel::VideoX11Channel(WebRtc_Word32 id) :
_crit(*CriticalSectionWrapper::CreateCriticalSection()),
_videoInterpolator(NULL), _display(NULL), _xvport(), _shminfo(),
_image(NULL), _window(NULL), _width(DEFAULT_RENDER_FRAME_WIDTH),
_height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0),
_xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL),
_Id(id)
{
}
VideoX11Channel::~VideoX11Channel()
{
if (_prepared)
{
_crit.Enter();
RemoveRenderer();
_crit.Leave();
}
delete &_crit;
if (_videoInterpolator)
{
delete _videoInterpolator;
}
}
WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame)
{
CriticalSectionScoped cs(_crit);
if (_width != (WebRtc_Word32) videoFrame.Width() || _height
!= (WebRtc_Word32) videoFrame.Height())
{
if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
{
return -1;
}
}
return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
videoFrame.TimeStamp());
}
WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
WebRtc_Word32 height,
WebRtc_Word32 /*numberOfStreams */)
{
CriticalSectionScoped cs(_crit);
if (_prepared)
{
RemoveRenderer();
}
if (CreateLocalRenderer(width, height) == -1)
{
return -1;
}
return 0;
}
WebRtc_Word32 VideoX11Channel::DeliverFrame(unsigned char* buffer,
WebRtc_Word32 bufferSize,
unsigned WebRtc_Word32 /*timeStamp90kHz*/)
{
CriticalSectionScoped cs(_crit);
if (!_prepared)
{
return 0;
}
if (!dispArray[_dispCount])
{
return -1;
}
unsigned char *pBuf = buffer;
// convert to RGB32
ConvertI420ToARGB(pBuf, _buffer, _width, _height, 0);
// put image in window
XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
_height, True);
// very important for the image to update properly!
XSync(_display, false);
return 0;
}
WebRtc_Word32 VideoX11Channel::GetFrameSize(WebRtc_Word32& width,
WebRtc_Word32& height)
{
width = _width;
height = _height;
return 0;
}
WebRtc_Word32 VideoX11Channel::Init(Window window, float left, float top,
float right, float bottom)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_crit);
_window = window;
_left = left;
_right = _right;
_top = top;
_bottom = _bottom;
_display = XOpenDisplay(NULL); // Use default display
if (!_window || !_display)
{
return -1;
}
if (dispCount < DISP_MAX)
{
dispArray[dispCount] = _display;
_dispCount = dispCount;
dispCount++;
}
else
{
return -1;
}
if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right
< 0) || (1 < bottom || bottom < 0))
{
return -1;
}
// calculate position and size of rendered video
int x, y;
unsigned int winWidth, winHeight, borderwidth, depth;
Window rootret;
if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
&winHeight, &borderwidth, &depth) == 0)
{
return -1;
}
_xPos = (WebRtc_Word32) (winWidth * left);
_yPos = (WebRtc_Word32) (winHeight * top);
_outWidth = (WebRtc_Word32) (winWidth * (right - left));
_outHeight = (WebRtc_Word32) (winHeight * (bottom - top));
if (_outWidth % 2)
_outWidth++; // the renderer want's sizes that are multiples of two
if (_outHeight % 2)
_outHeight++;
if (CreateLocalRenderer(winWidth, winHeight) == -1)
{
return -1;
}
return 0;
}
WebRtc_Word32 VideoX11Channel::ChangeWindow(Window window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_crit);
// Stop the rendering, if we are rendering...
RemoveRenderer();
_window = window;
// calculate position and size of rendered video
int x, y;
unsigned int winWidth, winHeight, borderwidth, depth;
Window rootret;
if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
&winHeight, &borderwidth, &depth) == -1)
{
return -1;
}
_xPos = (int) (winWidth * _left);
_yPos = (int) (winHeight * _top);
_outWidth = (int) (winWidth * (_right - _left));
_outHeight = (int) (winHeight * (_bottom - _top));
if (_outWidth % 2)
_outWidth++; // the renderer want's sizes that are multiples of two
if (_outHeight % 2)
_outHeight++;
// Prepare rendering using the
if (CreateLocalRenderer(_width, _height) == -1)
{
return -1;
}
return 0;
}
WebRtc_Word32 VideoX11Channel::ReleaseWindow()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_crit);
return RemoveRenderer();
}
WebRtc_Word32 VideoX11Channel::CreateLocalRenderer(WebRtc_Word32 width,
WebRtc_Word32 height)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
CriticalSectionScoped cs(_crit);
if (!_window || !_display)
{
return -1;
}
if (_prepared)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id,
"Renderer already prepared, exits.");
return -1;
}
_width = width;
_height = height;
// create a graphics context in the window
_gc = XCreateGC(_display, _window, 0, 0);
// create shared memory image
_image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL,
&_shminfo, _width, _height); // this parameter needs to be the same for some reason.
_shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line
* _image->height), IPC_CREAT | 0777);
_shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0);
_buffer = (unsigned char*) _image->data;
_shminfo.readOnly = False;
// attach image to display
if (!XShmAttach(_display, &_shminfo))
{
//printf("XShmAttach failed !\n");
return -1;
}
_prepared = true;
return 0;
}
WebRtc_Word32 VideoX11Channel::RemoveRenderer()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
if (!_prepared)
{
return 0;
}
_prepared = false;
// free and closse Xwindow and XShm
XShmDetach(_display, &_shminfo);
XDestroyImage( _image );
shmdt(_shminfo.shmaddr);
return 0;
}
WebRtc_Word32 VideoX11Channel::GetStreamProperties(WebRtc_UWord32& zOrder,
float& left, float& top,
float& right,
float& bottom) const
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
__FUNCTION__);
zOrder = 0; // no z-order support yet
left = _left;
top = _top;
right = _right;
bottom = _bottom;
return 0;
}
} //namespace webrtc

View File

@@ -0,0 +1,104 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
#include "video_render_defines.h"
#include "vplib.h"
#include <sys/ipc.h>
#include <sys/shm.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <X11/Xatom.h>
#include <X11/extensions/XShm.h>
#include <X11/extensions/Xv.h>
#include <X11/extensions/Xvlib.h>
namespace webrtc {
class CriticalSectionWrapper;
#define DEFAULT_RENDER_FRAME_WIDTH 352
#define DEFAULT_RENDER_FRAME_HEIGHT 288
typedef char* VideoInterpolator;
class VideoX11Channel: public VideoRenderCallback
{
public:
VideoX11Channel(WebRtc_Word32 id);
virtual ~VideoX11Channel();
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height,
WebRtc_Word32 numberOfStreams);
WebRtc_Word32 DeliverFrame(unsigned char* buffer, WebRtc_Word32 bufferSize,
unsigned WebRtc_Word32 /*timeStamp90kHz*/);
WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height);
WebRtc_Word32 Init(Window window, float left, float top, float right,
float bottom);
WebRtc_Word32 ChangeWindow(Window window);
WebRtc_Word32
GetStreamProperties(WebRtc_UWord32& zOrder, float& left,
float& top, float& right, float& bottom) const;
WebRtc_Word32 ReleaseWindow();
bool IsPrepared()
{
return _prepared;
}
private:
WebRtc_Word32
CreateLocalRenderer(WebRtc_Word32 width, WebRtc_Word32 height);
WebRtc_Word32 RemoveRenderer();
//FIXME a better place for this method? the GetWidthHeight no longer supported by vplib.
int GetWidthHeight(VideoType type, int bufferSize, int& width,
int& height);
CriticalSectionWrapper& _crit;
VideoInterpolator* _videoInterpolator;
Display* _display;
WebRtc_Word32 _xvport;
XShmSegmentInfo _shminfo;
XImage* _image;
Window _window;
GC _gc;
WebRtc_Word32 _width; // incoming frame width
WebRtc_Word32 _height; // incoming frame height
WebRtc_Word32 _outWidth; // render frame width
WebRtc_Word32 _outHeight; // render frame height
WebRtc_Word32 _xPos; // position within window
WebRtc_Word32 _yPos;
bool _prepared; // true if ready to use
WebRtc_Word32 _dispCount;
unsigned char* _buffer;
float _top;
float _left;
float _right;
float _bottom;
WebRtc_Word32 _Id;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_

View File

@@ -0,0 +1,154 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_x11_render.h"
#include "video_x11_channel.h"
#include "critical_section_wrapper.h"
#include "trace.h"
namespace webrtc {
VideoX11Render::VideoX11Render(Window window) :
_window(window),
_critSect(*CriticalSectionWrapper::CreateCriticalSection())
{
}
VideoX11Render::~VideoX11Render()
{
delete &_critSect;
}
WebRtc_Word32 VideoX11Render::Init()
{
CriticalSectionScoped cs(_critSect);
_streamIdToX11ChannelMap.clear();
return 0;
}
WebRtc_Word32 VideoX11Render::ChangeWindow(Window window)
{
CriticalSectionScoped cs(_critSect);
VideoX11Channel* renderChannel = NULL;
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.begin();
while (iter != _streamIdToX11ChannelMap.end())
{
renderChannel = iter->second;
if (renderChannel)
{
renderChannel->ChangeWindow(window);
}
iter++;
}
_window = window;
return 0;
}
VideoX11Channel* VideoX11Render::CreateX11RenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_critSect);
VideoX11Channel* renderChannel = NULL;
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.find(streamId);
if (iter == _streamIdToX11ChannelMap.end())
{
renderChannel = new VideoX11Channel(streamId);
if (!renderChannel)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"Failed to create VideoX11Channel for streamId : %d",
streamId);
return NULL;
}
renderChannel->Init(_window, left, top, right, bottom);
_streamIdToX11ChannelMap[streamId] = renderChannel;
}
else
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
"Render Channel already exists for streamId: %d", streamId);
renderChannel = iter->second;
}
return renderChannel;
}
WebRtc_Word32 VideoX11Render::DeleteX11RenderChannel(WebRtc_Word32 streamId)
{
CriticalSectionScoped cs(_critSect);
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.find(streamId);
if (iter != _streamIdToX11ChannelMap.end())
{
VideoX11Channel *renderChannel = iter->second;
if (renderChannel)
{
renderChannel->ReleaseWindow();
delete renderChannel;
renderChannel = NULL;
}
_streamIdToX11ChannelMap.erase(iter);
}
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"No VideoX11Channel object exists for stream id: %d",
streamId);
return -1;
}
WebRtc_Word32 VideoX11Render::GetIncomingStreamProperties(
WebRtc_Word32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom)
{
CriticalSectionScoped cs(_critSect);
std::map<int, VideoX11Channel*>::iterator iter =
_streamIdToX11ChannelMap.find(streamId);
if (iter != _streamIdToX11ChannelMap.end())
{
VideoX11Channel *renderChannel = iter->second;
if (renderChannel)
{
renderChannel->GetStreamProperties(zOrder, left, top, right, bottom);
}
}
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"No VideoX11Channel object exists for stream id: %d",
streamId);
return -1;
}
} //namespace webrtc

View File

@@ -0,0 +1,58 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
#include "video_render_defines.h"
#include <X11/Xlib.h>
#include <map>
namespace webrtc {
class CriticalSectionWrapper;
class VideoX11Channel;
class VideoX11Render
{
public:
VideoX11Render(Window window);
~VideoX11Render();
WebRtc_Word32 Init();
WebRtc_Word32 ChangeWindow(Window window);
VideoX11Channel* CreateX11RenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
WebRtc_Word32 DeleteX11RenderChannel(WebRtc_Word32 streamId);
WebRtc_Word32 GetIncomingStreamProperties(WebRtc_Word32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom);
private:
Window _window;
CriticalSectionWrapper& _critSect;
std::map<int, VideoX11Channel*> _streamIdToX11ChannelMap;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_

View File

@@ -0,0 +1,33 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// cocoa_full_screen_window.h
//
//
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
#import <Cocoa/Cocoa.h>
//#define GRAB_ALL_SCREENS 1
@interface CocoaFullScreenWindow : NSObject {
NSWindow* _window;
}
-(id)init;
-(void)grabFullScreen;
-(void)releaseFullScreen;
@end
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_

View File

@@ -0,0 +1,87 @@
//
// CocoaFullScreenWindow.m
//
#import "cocoa_full_screen_window.h"
#include "trace.h"
using namespace webrtc;
@implementation CocoaFullScreenWindow
-(id)init{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d", __FUNCTION__, __LINE__);
self = [super init];
if(!self){
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__);
return nil;
}
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__);
return self;
}
-(NSWindow*)window{
return _window;
}
-(void)grabFullScreen{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d", __FUNCTION__, __LINE__);
#ifdef GRAB_ALL_SCREENS
if(CGCaptureAllDisplays() != kCGErrorSuccess)
#else
if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess)
#endif
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__);
}
// get the shielding window level
int windowLevel = CGShieldingWindowLevel();
// get the screen rect of main display
NSRect screenRect = [[NSScreen mainScreen]frame];
_window = [[NSWindow alloc]initWithContentRect:screenRect
styleMask:NSBorderlessWindowMask
backing:NSBackingStoreBuffered
defer:NO
screen:[NSScreen mainScreen]];
[_window setLevel:windowLevel];
[_window setBackgroundColor:[NSColor blackColor]];
[_window makeKeyAndOrderFront:nil];
}
-(void)releaseFullScreen
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d", __FUNCTION__, __LINE__);
[_window orderOut:self];
#ifdef GRAB_ALL_SCREENS
if(CGReleaseAllDisplays() != kCGErrorSuccess)
#else
if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess)
#endif
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__);
}
}
- (void) dealloc
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d", __FUNCTION__, __LINE__);
[self releaseFullScreen];
[super dealloc];
}
@end

View File

@@ -0,0 +1,35 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// cocoa_render_view.h
//
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
#import <Cocoa/Cocoa.h>
#import <OpenGL/gl.h>
#import <OpenGL/glu.h>
#import <OpenGL/OpenGL.h>
@interface CocoaRenderView : NSOpenGLView {
NSOpenGLContext* _nsOpenGLContext;
}
-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt;
-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt;
@end
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_

View File

@@ -0,0 +1,56 @@
//
// CocoaRenderView.mm
//
#import <Cocoa/Cocoa.h>
#import <AppKit/AppKit.h>
#import "cocoa_render_view.h"
#include "trace.h"
using namespace webrtc;
@implementation CocoaRenderView
-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d", __FUNCTION__, __LINE__);
self = [super initWithFrame:[self frame] pixelFormat:[fmt autorelease]];
if (self == nil){
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
}
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__);
_nsOpenGLContext = [self openGLContext];
}
-(NSOpenGLContext*)nsOpenGLContext{
return _nsOpenGLContext;
}
-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d", __FUNCTION__, __LINE__);
NSRect screenRect = [[NSScreen mainScreen]frame];
// [_windowRef setFrame:screenRect];
// [_windowRef setBounds:screenRect];
self = [super initWithFrame:screenRect pixelFormat:[fmt autorelease]];
if (self == nil){
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__);
}
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__);
_nsOpenGLContext = [self openGLContext];
}
@end

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,185 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "engine_configurations.h"
#if defined(CARBON_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
#include "video_render_defines.h"
#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
#define NEW_HIVIEW_EVENT_HANDLER 1
#define USE_STRUCT_RGN
#include <AGL/agl.h>
#include <Carbon/Carbon.h>
#include <OpenGL/OpenGL.h>
#include <OpenGL/glu.h>
#include <OpenGL/glext.h>
#include <list>
#include <map>
class VideoRenderAGL;
namespace webrtc {
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
class VideoChannelAGL : public VideoRenderCallback
{
public:
VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
virtual ~VideoChannelAGL();
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
virtual int UpdateSize(int width, int height);
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
int RenderOffScreenBuffer();
int IsUpdated(bool& isUpdated);
virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
private:
AGLContext _aglContext;
int _id;
VideoRenderAGL* _owner;
int _width;
int _height;
int _stretchedWidth;
int _stretchedHeight;
float _startHeight;
float _startWidth;
float _stopWidth;
float _stopHeight;
int _xOldWidth;
int _yOldHeight;
int _oldStretchedHeight;
int _oldStretchedWidth;
unsigned char* _buffer;
int _bufferSize;
int _incommingBufferSize;
bool _bufferIsUpdated;
bool _sizeInitialized;
int _numberOfStreams;
bool _bVideoSizeStartedChanging;
GLenum _pixelFormat;
GLenum _pixelDataType;
unsigned int _texture;
};
class VideoRenderAGL
{
public:
VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId);
VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId);
~VideoRenderAGL();
int Init();
VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
int DeleteAGLChannel(int channel);
int DeleteAllAGLChannels();
int StopThread();
bool IsFullScreen();
bool HasChannels();
bool HasChannel(int channel);
int GetChannels(std::list<int>& channelList);
void LockAGLCntx();
void UnlockAGLCntx();
static int GetOpenGLVersion(int& aglMajor, int& aglMinor);
// ********** new module functions ************ //
int ChangeWindow(void* newWindowRef);
WebRtc_Word32 ChangeUniqueID(WebRtc_Word32 id);
WebRtc_Word32 StartRender();
WebRtc_Word32 StopRender();
WebRtc_Word32 DeleteAGLChannel(const WebRtc_UWord32 streamID);
WebRtc_Word32 GetChannelProperties(const WebRtc_UWord16 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom);
protected:
static bool ScreenUpdateThreadProc(void* obj);
bool ScreenUpdateProcess();
int GetWindowRect(Rect& rect);
private:
int CreateMixingContext();
int RenderOffScreenBuffers();
int SwapAndDisplayBuffers();
int UpdateClipping();
int CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren);
bool CheckValidRegion(RgnHandle rHandle);
void ParentWindowResized(WindowRef window);
// Carbon GUI event handlers
static pascal OSStatus sHandleWindowResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
static pascal OSStatus sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
HIViewRef _hiviewRef;
WindowRef _windowRef;
bool _fullScreen;
int _id;
webrtc::CriticalSectionWrapper& _renderCritSec;
webrtc::ThreadWrapper* _screenUpdateThread;
webrtc::EventWrapper* _screenUpdateEvent;
bool _isHIViewRef;
AGLContext _aglContext;
int _windowWidth;
int _windowHeight;
int _lastWindowWidth;
int _lastWindowHeight;
int _lastHiViewWidth;
int _lastHiViewHeight;
int _currentParentWindowHeight;
int _currentParentWindowWidth;
Rect _currentParentWindowBounds;
bool _windowHasResized;
Rect _lastParentWindowBounds;
Rect _currentHIViewBounds;
Rect _lastHIViewBounds;
Rect _windowRect;
std::map<int, VideoChannelAGL*> _aglChannels;
std::multimap<int, int> _zOrderToChannel;
EventHandlerRef _hiviewEventHandlerRef;
EventHandlerRef _windowEventHandlerRef;
HIRect _currentViewBounds;
HIRect _lastViewBounds;
bool _renderingIsPaused;
unsigned int _threadID;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
#endif // CARBON_RENDERING

View File

@@ -0,0 +1,299 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "engine_configurations.h"
#if defined(CARBON_RENDERING)
#include "video_render_mac_carbon_impl.h"
#include "critical_section_wrapper.h"
#include "video_render_agl.h"
#include "trace.h"
#include <AGL/agl.h>
namespace webrtc {
VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id),
_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_fullScreen(fullscreen),
_ptrWindow(window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
}
VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
delete &_renderMacCarbonCritsect;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::Init()
{
CriticalSectionScoped cs(_renderMacCarbonCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
if (!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
return -1;
}
// We don't know if the user passed us a WindowRef or a HIViewRef, so test.
bool referenceIsValid = false;
// Check if it's a valid WindowRef
//WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
WindowRef* windowRef = static_cast<WindowRef*>(_ptrWindow);
//WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
if (IsValidWindowPtr(*windowRef))
{
_ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id);
referenceIsValid = true;
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef);
}
else
{
HIViewRef* hiviewRef = static_cast<HIViewRef*>(_ptrWindow);
if (HIViewIsValid(*hiviewRef))
{
_ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id);
referenceIsValid = true;
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef);
}
}
if(!referenceIsValid)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__);
return -1;
}
if(!_ptrCarbonRender)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__);
}
int retVal = _ptrCarbonRender->Init();
if (retVal == -1)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__);
return -1;
}
return 0;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::ChangeUniqueId(const WebRtc_Word32 id)
{
return -1;
CriticalSectionScoped cs(_renderMacCarbonCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
_id = id;
if(_ptrCarbonRender)
{
_ptrCarbonRender->ChangeUniqueID(_id);
}
return 0;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::ChangeWindow(void* window)
{
return -1;
CriticalSectionScoped cs(_renderMacCarbonCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
if (window == NULL)
{
return -1;
}
_ptrWindow = window;
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
_ptrWindow = window;
return 0;
}
VideoRenderCallback*
VideoRenderMacCarbonImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_renderMacCarbonCritsect);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
VideoChannelAGL* AGLChannel = NULL;
if(!_ptrWindow)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id, "%s, no window", __FUNCTION__);
}
if(!AGLChannel)
{
AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
}
return AGLChannel;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
CriticalSectionScoped cs(_renderMacCarbonCritsect);
_ptrCarbonRender->DeleteAGLChannel(streamId);
return 0;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
return -1;
return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
}
WebRtc_Word32
VideoRenderMacCarbonImpl::StartRender()
{
return _ptrCarbonRender->StartRender();
}
WebRtc_Word32
VideoRenderMacCarbonImpl::StopRender()
{
return _ptrCarbonRender->StopRender();
}
VideoRenderType
VideoRenderMacCarbonImpl::RenderType()
{
return kRenderCarbon;
}
RawVideoType
VideoRenderMacCarbonImpl::PerferedVideoType()
{
return kVideoI420;
}
bool
VideoRenderMacCarbonImpl::FullScreen()
{
return false;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const
{
totalGraphicsMemory = 0;
availableGraphicsMemory = 0;
return 0;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const
{
CriticalSectionScoped cs(_renderMacCarbonCritsect);
//NSScreen* mainScreen = [NSScreen mainScreen];
//NSRect frame = [mainScreen frame];
//screenWidth = frame.size.width;
//screenHeight = frame.size.height;
return 0;
}
WebRtc_UWord32
VideoRenderMacCarbonImpl::RenderFrameRate(const WebRtc_UWord32 streamId)
{
CriticalSectionScoped cs(_renderMacCarbonCritsect);
return 0;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::SetStreamCropping(const WebRtc_UWord32 streamId,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
WebRtc_Word32 VideoRenderMacCarbonImpl::ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
WebRtc_Word32
VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable)
{
return 0;
}
WebRtc_Word32 VideoRenderMacCarbonImpl::SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
WebRtc_Word32 VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
} //namespace webrtc
#endif // CARBON_RENDERING

View File

@@ -0,0 +1,148 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "engine_configurations.h"
#if defined(CARBON_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
#include "i_video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoRenderAGL;
// Class definitions
class VideoRenderMacCarbonImpl : IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderMacCarbonImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
virtual ~VideoRenderMacCarbonImpl();
virtual WebRtc_Word32 Init();
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32 GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable)
{
// not supported in Carbon at this time
return -1;
}
private:
WebRtc_Word32 _id;
CriticalSectionWrapper& _renderMacCarbonCritsect;
bool _fullScreen;
void* _ptrWindow;
VideoRenderAGL* _ptrCarbonRender;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
#endif // CARBON_RENDERING

View File

@@ -0,0 +1,271 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "engine_configurations.h"
#if defined(COCOA_RENDERING)
#import "cocoa_render_view.h"
#include "video_render_mac_cocoa_impl.h"
#include "critical_section_wrapper.h"
#include "video_render_nsopengl.h"
#include "trace.h"
namespace webrtc {
VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
_id(id),
_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
_fullScreen(fullscreen),
_ptrWindow(window)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
}
VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
delete &_renderMacCocoaCritsect;
if (_ptrCocoaRender)
{
delete _ptrCocoaRender;
_ptrCocoaRender = NULL;
}
}
WebRtc_Word32
VideoRenderMacCocoaImpl::Init()
{
CriticalSectionScoped cs(_renderMacCocoaCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
// cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView.
_ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id);
if (!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
return -1;
}
int retVal = _ptrCocoaRender->Init();
if (retVal == -1)
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__);
return -1;
}
return 0;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::ChangeUniqueId(const WebRtc_Word32 id)
{
CriticalSectionScoped cs(_renderMacCocoaCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
_id = id;
if(_ptrCocoaRender)
{
_ptrCocoaRender->ChangeUniqueID(_id);
}
return 0;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::ChangeWindow(void* window)
{
CriticalSectionScoped cs(_renderMacCocoaCritsect);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
if (window == NULL)
{
return -1;
}
_ptrWindow = window;
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
_ptrWindow = window;
_ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow);
return 0;
}
VideoRenderCallback*
VideoRenderMacCocoaImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
CriticalSectionScoped cs(_renderMacCocoaCritsect);
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
VideoChannelNSOpenGL* nsOpenGLChannel = NULL;
if(!_ptrWindow)
{
WEBRTC_TRACE(kTraceModuleCall, kTraceVideoRenderer, _id, "%s, no window", __FUNCTION__);
}
if(!nsOpenGLChannel)
{
nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
}
return nsOpenGLChannel;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const WebRtc_UWord32 streamId)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
CriticalSectionScoped cs(_renderMacCocoaCritsect);
_ptrCocoaRender->DeleteNSGLChannel(streamId);
return 0;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
}
WebRtc_Word32
VideoRenderMacCocoaImpl::StartRender()
{
return _ptrCocoaRender->StartRender();
}
WebRtc_Word32
VideoRenderMacCocoaImpl::StopRender()
{
return _ptrCocoaRender->StopRender();
}
VideoRenderType
VideoRenderMacCocoaImpl::RenderType()
{
return kRenderCocoa;
}
RawVideoType
VideoRenderMacCocoaImpl::PerferedVideoType()
{
return kVideoI420;
}
bool
VideoRenderMacCocoaImpl::FullScreen()
{
return false;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const
{
totalGraphicsMemory = 0;
availableGraphicsMemory = 0;
return 0;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const
{
CriticalSectionScoped cs(_renderMacCocoaCritsect);
NSScreen* mainScreen = [NSScreen mainScreen];
NSRect frame = [mainScreen frame];
screenWidth = frame.size.width;
screenHeight = frame.size.height;
return 0;
}
WebRtc_UWord32
VideoRenderMacCocoaImpl::RenderFrameRate(const WebRtc_UWord32 streamId)
{
CriticalSectionScoped cs(_renderMacCocoaCritsect);
return 0;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::SetStreamCropping(const WebRtc_UWord32 streamId,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
WebRtc_Word32 VideoRenderMacCocoaImpl::ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
WebRtc_Word32
VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable)
{
return 0;
}
WebRtc_Word32 VideoRenderMacCocoaImpl::SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom)
{
return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom);
}
WebRtc_Word32 VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom)
{
return 0;
}
WebRtc_Word32 VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable)
{
return -1;
}
} //namespace webrtc
#endif // COCOA_RENDERING

View File

@@ -0,0 +1,143 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "engine_configurations.h"
#if defined(COCOA_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
#include "i_video_render.h"
namespace webrtc {
class CriticalSectionWrapper;
class VideoRenderNSOpenGL;
// Class definitions
class VideoRenderMacCocoaImpl : IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderMacCocoaImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
virtual ~VideoRenderMacCocoaImpl();
virtual WebRtc_Word32 Init();
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32 GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable);
private:
WebRtc_Word32 _id;
CriticalSectionWrapper& _renderMacCocoaCritsect;
bool _fullScreen;
void* _ptrWindow;
VideoRenderNSOpenGL* _ptrCocoaRender;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
#endif // COCOA_RENDERING

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,191 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "engine_configurations.h"
#if defined(COCOA_RENDERING)
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
#include <Cocoa/Cocoa.h>
#include <QuickTime/QuickTime.h>
#include <OpenGL/OpenGL.h>
#include <OpenGL/glu.h>
#include <OpenGL/glext.h>
#include <list>
#include <map>
#include "video_render_defines.h"
#import "cocoa_render_view.h"
#import "cocoa_full_screen_window.h"
class Trace;
namespace webrtc {
class EventWrapper;
class ThreadWrapper;
class VideoRenderNSOpenGL;
class CriticalSectionWrapper;
class VideoChannelNSOpenGL : public VideoRenderCallback
{
public:
VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner);
virtual ~VideoChannelNSOpenGL();
// A new frame is delivered
virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
// Called when the incomming frame size and/or number of streams in mix changes
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
virtual int UpdateSize(int width, int height);
// Setup
int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
// Called when it's time to render the last frame for the channel
int RenderOffScreenBuffer();
// Returns true if a new buffer has been delivered to the texture
int IsUpdated(bool& isUpdated);
virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
// ********** new module functions ************ //
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
// ********** new module helper functions ***** //
int ChangeContext(NSOpenGLContext *nsglContext);
WebRtc_Word32 GetChannelProperties(float& left,
float& top,
float& right,
float& bottom);
private:
NSOpenGLContext* _nsglContext;
int _id;
VideoRenderNSOpenGL* _owner;
WebRtc_Word32 _width;
WebRtc_Word32 _height;
float _startWidth;
float _startHeight;
float _stopWidth;
float _stopHeight;
int _stretchedWidth;
int _stretchedHeight;
int _oldStretchedHeight;
int _oldStretchedWidth;
int _xOldWidth;
int _yOldHeight;
unsigned char* _buffer;
int _bufferSize;
int _incommingBufferSize;
bool _bufferIsUpdated;
int _numberOfStreams;
GLenum _pixelFormat;
GLenum _pixelDataType;
unsigned int _texture;
bool _bVideoSizeStartedChanging;
};
class VideoRenderNSOpenGL
{
public: // methods
VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId);
~VideoRenderNSOpenGL();
static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor);
// Allocates textures
int Init();
VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
int DeleteNSGLChannel(int channel);
int DeleteAllNSGLChannels();
int StopThread();
bool IsFullScreen();
bool HasChannels();
bool HasChannel(int channel);
int GetChannels(std::list<int>& channelList);
void LockAGLCntx();
void UnlockAGLCntx();
// ********** new module functions ************ //
int ChangeWindow(CocoaRenderView* newWindowRef);
WebRtc_Word32 ChangeUniqueID(WebRtc_Word32 id);
WebRtc_Word32 StartRender();
WebRtc_Word32 StopRender();
WebRtc_Word32 DeleteNSGLChannel(const WebRtc_UWord32 streamID);
WebRtc_Word32 GetChannelProperties(const WebRtc_UWord16 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom);
WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left,
const float top,
const float right,
const float bottom);
// ********** new module helper functions ***** //
int configureNSOpenGLEngine();
int configureNSOpenGLView();
int setRenderTargetWindow();
int setRenderTargetFullScreen();
protected: // methods
static bool ScreenUpdateThreadProc(void* obj);
bool ScreenUpdateProcess();
int GetWindowRect(Rect& rect);
private: // methods
int CreateMixingContext();
int RenderOffScreenBuffers();
int DisplayBuffers();
private: // variables
CocoaRenderView* _windowRef;
bool _fullScreen;
int _id;
CriticalSectionWrapper& _nsglContextCritSec;
ThreadWrapper* _screenUpdateThread;
EventWrapper* _screenUpdateEvent;
NSOpenGLContext* _nsglContext;
NSOpenGLContext* _nsglFullScreenContext;
CocoaFullScreenWindow* _fullScreenWindow;
Rect _windowRect; // The size of the window
int _windowWidth;
int _windowHeight;
std::map<int, VideoChannelNSOpenGL*> _nsglChannels;
std::multimap<int, int> _zOrderToChannel;
unsigned int _threadID;
bool _renderingIsPaused;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
#endif // COCOA_RENDERING

View File

@@ -0,0 +1,177 @@
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'includes': [
'../../../../common_settings.gypi', # Common settings
],
'targets': [
{
'target_name': 'video_render_module',
'type': '<(library)',
'dependencies': [
'../../../../common_video/vplib/main/source/vplib.gyp:webrtc_vplib',
'../../../../system_wrappers/source/system_wrappers.gyp:system_wrappers',
'../../../utility/source/utility.gyp:webrtc_utility',
],
'include_dirs': [
'.',
'../interface',
'../../../interface',
],
'direct_dependent_settings': {
'include_dirs': [
'../interface',
'../../../interface',
],
},
'sources': [
# interfaces
'../interface/video_render.h',
'../interface/video_render_defines.h',
# headers
'incoming_video_stream.h',
'video_render_frames.h',
'video_render_impl.h',
'i_video_render.h',
# Linux
'linux/video_render_linux_impl.h',
'linux/video_x11_channel.h',
'linux/video_x11_render.h',
# Mac
'mac/cocoa_full_screen_window.h',
'mac/cocoa_render_view.h',
'mac/video_render_agl.h',
'mac/video_render_mac_carbon_impl.h',
'mac/video_render_mac_cocoa_impl.h',
'mac/video_render_nsopengl.h',
# Windows
'windows/i_video_render_win.h',
'windows/video_render_direct3d9.h',
'windows/video_render_directdraw.h',
'windows/video_render_windows_impl.h',
# External
'external/video_render_external_impl.h',
# PLATFORM INDEPENDENT SOURCE FILES
'incoming_video_stream.cc',
'video_render_frames.cc',
'video_render_impl.cc',
# PLATFORM SPECIFIC SOURCE FILES - Will be filtered below
# Linux
'linux/video_render_linux_impl.cc',
'linux/video_x11_channel.cc',
'linux/video_x11_render.cc',
# Mac
'mac/video_render_nsopengl.cc',
'mac/video_render_mac_cocoa_impl.cc',
'mac/video_render_agl.cc',
'mac/video_render_mac_carbon_impl.cc',
'mac/cocoa_render_view.mm',
'mac/cocoa_full_screen_window.mm',
# Windows
'windows/video_render_direct3d9.cc',
'windows/video_render_directdraw.cc',
'windows/video_render_windows_impl.cc',
# External
'external/video_render_external_impl.cc',
],
'conditions': [
# DEFINE PLATFORM SPECIFIC SOURCE FILES
['OS!="linux" or build_with_chromium==1', {
'sources!': [
'linux/video_render_linux_impl.h',
'linux/video_x11_channel.h',
'linux/video_x11_render.h',
'linux/video_render_linux_impl.cc',
'linux/video_x11_channel.cc',
'linux/video_x11_render.cc',
],
}],
['OS!="mac" or build_with_chromium==1', {
'sources!': [
'mac/cocoa_full_screen_window.h',
'mac/cocoa_render_view.h',
'mac/video_render_agl.h',
'mac/video_render_mac_carbon_impl.h',
'mac/video_render_mac_cocoa_impl.h',
'mac/video_render_nsopengl.h',
'mac/video_render_nsopengl.cc',
'mac/video_render_mac_cocoa_impl.cc',
'mac/video_render_agl.cc',
'mac/video_render_mac_carbon_impl.cc',
'mac/cocoa_render_view.mm',
'mac/cocoa_full_screen_window.mm',
],
}],
['OS!="win" or build_with_chromium==1', {
'sources!': [
'windows/i_video_render_win.h',
'windows/video_render_direct3d9.h',
'windows/video_render_directdraw.h',
'windows/video_render_windows_impl.h',
'windows/video_render_direct3d9.cc',
'windows/video_render_directdraw.cc',
'windows/video_render_windows_impl.cc',
],
}],
# DEFINE PLATFORM SPECIFIC INCLUDE AND CFLAGS
['OS=="mac"', {
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS': '-x objective-c++'
},
}],
] # conditions
}, # video_render_module
{
'target_name': 'video_render_module_test',
'type': 'executable',
'dependencies': [
'video_render_module',
'../../../utility/source/utility.gyp:webrtc_utility',
'../../../../system_wrappers/source/system_wrappers.gyp:system_wrappers',
'../../../../common_video/vplib/main/source/vplib.gyp:webrtc_vplib',
],
'include_dirs': [
],
'sources': [
# sources
'../test/testAPI/testAPI.cpp',
], # source
'conditions': [
# DEFINE PLATFORM SPECIFIC INCLUDE AND CFLAGS
['OS=="mac" or OS=="linux"', {
'cflags': [
'-Wno-write-strings',
],
'ldflags': [
'-lpthread -lm',
],
}],
['OS=="linux"', {
'libraries': [
'-lrt',
'-lXext',
'-lX11',
],
}],
['OS=="mac"', {
'xcode_settings': {
'OTHER_CPLUSPLUSFLAGS': '-x objective-c++',
'OTHER_LDFLAGS': [
'-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL',
],
},
}],
] # conditions
}, # video_render_module_test
], # targets
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:

View File

@@ -0,0 +1,209 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_frames.h"
#include "module_common_types.h"
#include "tick_util.h"
#include "trace.h"
#include <cassert>
namespace webrtc {
VideoRenderFrames::VideoRenderFrames() :
_incomingFrames(), _renderDelayMs(10)
{
}
VideoRenderFrames::~VideoRenderFrames()
{
ReleaseAllFrames();
}
WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* ptrNewFrame)
{
const WebRtc_Word64 timeNow = TickTime::MillisecondTimestamp();
if (ptrNewFrame->RenderTimeMs() + KOldRenderTimestampMS < timeNow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: too old frame.", __FUNCTION__);
return -1;
}
if (ptrNewFrame->RenderTimeMs() > timeNow + KFutureRenderTimestampMS)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
"%s: frame too long into the future.", __FUNCTION__);
return -1;
}
// Get an empty frame
VideoFrame* ptrFrameToAdd = NULL;
if (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
ptrFrameToAdd = static_cast<VideoFrame*> (item->GetItem());
_emptyFrames.Erase(item);
}
}
if (!ptrFrameToAdd)
{
if (_emptyFrames.GetSize() + _incomingFrames.GetSize()
> KMaxNumberOfFrames)
{
// Already allocated toom many frames...
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
-1, "%s: too many frames, limit: %d", __FUNCTION__,
KMaxNumberOfFrames);
return -1;
}
// Allocate new memory
WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
"%s: allocating buffer %d", __FUNCTION__,
_emptyFrames.GetSize() + _incomingFrames.GetSize());
ptrFrameToAdd = new VideoFrame();
if (!ptrFrameToAdd)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
"%s: could not create new frame for", __FUNCTION__);
return -1;
}
}
ptrFrameToAdd->VerifyAndAllocate(ptrNewFrame->Length());
ptrFrameToAdd->SwapFrame(const_cast<VideoFrame&> (*ptrNewFrame)); //remove const ness. Copying will be costly.
_incomingFrames.PushBack(ptrFrameToAdd);
return _incomingFrames.GetSize();
}
VideoFrame*
VideoRenderFrames::FrameToRender()
{
VideoFrame* ptrRenderFrame = NULL;
while (!_incomingFrames.Empty())
{
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* ptrOldestFrameInList =
static_cast<VideoFrame*> (item->GetItem());
if (ptrOldestFrameInList->RenderTimeMs()
<= TickTime::MillisecondTimestamp() + _renderDelayMs)
{
// This is the oldest one so far and it's ok to render
if (ptrRenderFrame)
{
// This one is older than the newly found frame, remove this one.
ptrRenderFrame->SetWidth(0);
ptrRenderFrame->SetHeight(0);
ptrRenderFrame->SetLength(0);
ptrRenderFrame->SetRenderTime(0);
ptrRenderFrame->SetTimeStamp(0);
_emptyFrames.PushFront(ptrRenderFrame);
}
ptrRenderFrame = ptrOldestFrameInList;
_incomingFrames.Erase(item);
}
else
{
// We can't release this one yet, we're done here.
break;
}
}
else
{
assert(false);
}
}
return ptrRenderFrame;
}
WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* ptrOldFrame)
{
ptrOldFrame->SetWidth(0);
ptrOldFrame->SetHeight(0);
ptrOldFrame->SetRenderTime(0);
ptrOldFrame->SetLength(0);
_emptyFrames.PushBack(ptrOldFrame);
return 0;
}
WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames()
{
while (!_incomingFrames.Empty())
{
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* ptrFrame =
static_cast<VideoFrame*> (item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
}
_incomingFrames.Erase(item);
}
while (!_emptyFrames.Empty())
{
ListItem* item = _emptyFrames.First();
if (item)
{
VideoFrame* ptrFrame =
static_cast<VideoFrame*> (item->GetItem());
assert(ptrFrame != NULL);
ptrFrame->Free();
delete ptrFrame;
}
_emptyFrames.Erase(item);
}
return 0;
}
WebRtc_Word32 KEventMaxWaitTimeMs = 200;
WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease()
{
WebRtc_Word64 timeToRelease = 0;
ListItem* item = _incomingFrames.First();
if (item)
{
VideoFrame* oldestFrame =
static_cast<VideoFrame*> (item->GetItem());
timeToRelease = oldestFrame->RenderTimeMs() - _renderDelayMs
- TickTime::MillisecondTimestamp();
if (timeToRelease < 0)
{
timeToRelease = 0;
}
}
else
{
timeToRelease = KEventMaxWaitTimeMs;
}
return (WebRtc_UWord32) timeToRelease;
}
//
WebRtc_Word32 VideoRenderFrames::SetRenderDelay(
const WebRtc_UWord32 renderDelay)
{
_renderDelayMs = renderDelay;
return 0;
}
} //namespace webrtc

View File

@@ -0,0 +1,78 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_
#include "list_wrapper.h"
#include "video_render.h"
namespace webrtc {
// Class definitions
class VideoRenderFrames
{
public:
VideoRenderFrames();
~VideoRenderFrames();
/*
* Add a frame to the render queue
*/
WebRtc_Word32 AddFrame(VideoFrame* ptrNewFrame);
/*
* Get a frame for rendering, if it's time to render.
*/
VideoFrame* FrameToRender();
/*
* Return an old frame
*/
WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
/*
* Releases all frames
*/
WebRtc_Word32 ReleaseAllFrames();
/*
* Returns the number of ms to next frame to render
*/
WebRtc_UWord32 TimeToNextFrameRelease();
/*
* Sets estimates delay in renderer
*/
WebRtc_Word32 SetRenderDelay(const WebRtc_UWord32 renderDelay);
private:
enum
{
KMaxNumberOfFrames = 300
}; // 10 seconds for 30 fps.
enum
{
KOldRenderTimestampMS = 500
}; //Don't render frames with timestamp older than 500ms from now.
enum
{
KFutureRenderTimestampMS = 10000
}; //Don't render frames with timestamp more than 10s into the future.
ListWrapper _incomingFrames; // Sorted oldest video frame first
ListWrapper _emptyFrames; // Empty frames
WebRtc_UWord32 _renderDelayMs; // Set render delay
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,236 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
#include "engine_configurations.h"
#include "video_render.h"
#include "map_wrapper.h"
//#include "video_render_defines.h"
namespace webrtc {
class CriticalSectionWrapper;
class IncomingVideoStream;
class IVideoRender;
class MapWrapper;
// Class definitions
class ModuleVideoRenderImpl: public VideoRender
{
public:
/*
* VideoRenderer constructor/destructor
*/
ModuleVideoRenderImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~ModuleVideoRenderImpl();
/*
* Returns version of the module and its components
*/
virtual WebRtc_Word32 Version(WebRtc_Word8* version,
WebRtc_UWord32& remainingBufferInBytes,
WebRtc_UWord32& position) const;
/*
* Change the unique identifier of this object
*/
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 TimeUntilNextProcess();
virtual WebRtc_Word32 Process();
/*
* Returns the render window
*/
virtual void* Window();
/*
* Change render window
*/
virtual WebRtc_Word32 ChangeWindow(void* window);
/*
* Returns module id
*/
WebRtc_Word32 Id();
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
/*
* Add incoming render stream
*/
virtual VideoRenderCallback
* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right, const float bottom);
/*
* Delete incoming render stream
*/
virtual WebRtc_Word32
DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
/*
* Add incoming render callback, used for external rendering
*/
virtual WebRtc_Word32
AddExternalRenderCallback(const WebRtc_UWord32 streamId,
VideoRenderCallback* renderObject);
/*
* Get the porperties for an incoming render stream
*/
virtual WebRtc_Word32
GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/*
* Incoming frame rate for the specified stream.
*/
virtual WebRtc_UWord32 GetIncomingFrameRate(const WebRtc_UWord32 streamId);
/*
* Returns the number of incoming streams added to this render module
*/
virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const;
/*
* Returns true if this render module has the streamId added, false otherwise.
*/
virtual bool HasIncomingRenderStream(const WebRtc_UWord32 streamId) const;
/*
*
*/
virtual WebRtc_Word32
RegisterRawFrameCallback(const WebRtc_UWord32 streamId,
VideoRenderCallback* callbackObj);
virtual WebRtc_Word32 GetLastRenderedFrame(const WebRtc_UWord32 streamId,
VideoFrame &frame) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
/*
* Starts rendering the specified stream
*/
virtual WebRtc_Word32 StartRender(const WebRtc_UWord32 streamId);
/*
* Stops the renderer
*/
virtual WebRtc_Word32 StopRender(const WebRtc_UWord32 streamId);
/*
* Sets the renderer in start state, no streams removed.
*/
virtual WebRtc_Word32 ResetRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
/*
* Returns the prefered render video type
*/
virtual RawVideoType PreferredVideoType() const;
/*
* Returns true if the renderer is in fullscreen mode, otherwise false.
*/
virtual bool IsFullScreen();
/*
* Gets screen resolution in pixels
*/
virtual WebRtc_Word32
GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
/*
* Get the actual render rate for this stream. I.e rendered frame rate,
* not frames delivered to the renderer.
*/
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
/*
* Set cropping of incoming stream
*/
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 SetStartImage(const WebRtc_UWord32 streamId,
const VideoFrame& videoFrame);
virtual WebRtc_Word32 SetTimeoutImage(const WebRtc_UWord32 streamId,
const VideoFrame& videoFrame,
const WebRtc_UWord32 timeout);
virtual WebRtc_Word32 MirrorRenderStream(const int renderId,
const bool enable,
const bool mirrorXAxis,
const bool mirrorYAxis);
private:
WebRtc_Word32 _id;
CriticalSectionWrapper& _moduleCrit;
void* _ptrWindow;
VideoRenderType _renderType;
bool _fullScreen;
IVideoRender* _ptrRenderer;
MapWrapper& _streamRenderMap;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_

View File

@@ -0,0 +1,118 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
#include "video_render.h"
namespace webrtc {
// Class definitions
class IVideoRenderWin
{
public:
/**************************************************************************
*
* Constructor/destructor
*
***************************************************************************/
virtual ~IVideoRenderWin()
{
};
virtual WebRtc_Word32 Init() = 0;
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* CreateChannel(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId) = 0;
virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) = 0;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender() = 0;
virtual WebRtc_Word32 StopRender() = 0;
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual bool IsFullScreen() = 0;
virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 colorText,
const WebRtc_UWord32 colorBg,
const float left,
const float top,
const float rigth,
const float bottom) = 0;
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom) = 0;
virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
WebRtc_UWord64& availableMemory) = 0;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,267 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
// WebRtc includes
#include "i_video_render_win.h"
#include <d3d9.h>
#include <d3dx9.h>
#include "ddraw.h"
#include <Map>
// Added
#include "video_render_defines.h"
#pragma comment(lib, "d3d9.lib") // located in DirectX SDK
namespace webrtc {
class CriticalSectionWrapper;
class EventWrapper;
class Trace;
class ThreadWrapper;
class D3D9Channel: public VideoRenderCallback
{
public:
D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
CriticalSectionWrapper* critSect, Trace* trace);
virtual ~D3D9Channel();
// Inherited from VideoRencerCallback, called from VideoAPI class.
// Called when the incomming frame size and/or number of streams in mix changes
virtual int FrameSizeChange(int width, int height, int numberOfStreams);
// A new frame is delivered
virtual int DeliverFrame(unsigned char* buffer,
int bufferSize,
unsigned int timeStame90kHz);
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
// Called to check if the video frame is updated.
int IsUpdated(bool& isUpdated);
// Called after the video frame has been render to the screen
int RenderOffFrame();
// Called to get the texture that contains the video frame
LPDIRECT3DTEXTURE9 GetTexture();
// Called to get the texture(video frame) size
int GetTextureWidth();
int GetTextureHeight();
//
void SetStreamSettings(WebRtc_UWord16 streamId,
WebRtc_UWord32 zOrder,
float startWidth,
float startHeight,
float stopWidth,
float stopHeight);
int GetStreamSettings(WebRtc_UWord16 streamId,
WebRtc_UWord32& zOrder,
float& startWidth,
float& startHeight,
float& stopWidth,
float& stopHeight);
int ReleaseTexture();
int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice);
protected:
private:
//critical section passed from the owner
CriticalSectionWrapper* _critSect;
LPDIRECT3DDEVICE9 _pd3dDevice;
LPDIRECT3DTEXTURE9 _pTexture;
bool _bufferIsUpdated;
// the frame size
int _width;
int _height;
//sream settings
//TODO support multiple streams in one channel
WebRtc_UWord16 _streamId;
WebRtc_UWord32 _zOrder;
float _startWidth;
float _startHeight;
float _stopWidth;
float _stopHeight;
};
class VideoRenderDirect3D9: IVideoRenderWin
{
public:
VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen);
~VideoRenderDirect3D9();
public:
//IVideoRenderWin
/**************************************************************************
*
* Init
*
***************************************************************************/
virtual WebRtc_Word32 Init();
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* CreateChannel(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom);
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual bool IsFullScreen();
virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 ChangeWindow(void* window);
virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
WebRtc_UWord64& availableMemory);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 colorText,
const WebRtc_UWord32 colorBg,
const float left,
const float top,
const float rigth,
const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey,
const float left,
const float top,
const float right,
const float bottom);
public:
// Get a channel by channel id
D3D9Channel* GetD3DChannel(int channel);
int UpdateRenderSurface();
protected:
// The thread rendering the screen
static bool ScreenUpdateThreadProc(void* obj);
bool ScreenUpdateProcess();
private:
// Init/close the d3d device
int InitDevice();
int CloseDevice();
// Transparent related functions
int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
DDCOLORKEY* transparentColorKey,
DWORD width,
DWORD height);
CriticalSectionWrapper& _refD3DCritsect;
Trace* _trace;
ThreadWrapper* _screenUpdateThread;
EventWrapper* _screenUpdateEvent;
HWND _hWnd;
bool _fullScreen;
RECT _originalHwndRect;
//FIXME we probably don't need this since all the information can be get from _d3dChannels
int _channel;
//Window size
UINT _winWidth;
UINT _winHeight;
// Device
LPDIRECT3D9 _pD3D; // Used to create the D3DDevice
LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device
LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices
LPDIRECT3DTEXTURE9 _pTextureLogo;
std::map<int, D3D9Channel*> _d3dChannels;
std::multimap<int, unsigned int> _d3dZorder;
// The position where the logo will be placed
float _logoLeft;
float _logoTop;
float _logoRight;
float _logoBottom;
typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**);
LPDIRECT3DSURFACE9 _pd3dSurface;
DWORD GetVertexProcessingCaps();
int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp);
D3DPRESENT_PARAMETERS _d3dpp;
int ResetDevice();
int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset,
float startWidth, float startHeight,
float stopWidth, float stopHeight);
//code for providing graphics settings
DWORD _totalMemory;
DWORD _availableMemory;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,399 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
#include "typedefs.h"
#include "i_video_render_win.h"
#include "vplib.h"
#include "ddraw.h"
#include <Map>
#include <List>
// Added
#include "video_render_defines.h"
#pragma comment(lib, "ddraw.lib") // located in DirectX SDK
namespace webrtc {
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
class Trace;
class VideoRenderDirectDraw;
// some typedefs to make it easy to test different versions
typedef IDirectDraw7 DirectDraw;
typedef IDirectDrawSurface7 DirectDrawSurface;
typedef DDSURFACEDESC2 DirectDrawSurfaceDesc;
typedef DDSCAPS2 DirectDrawCaps;
typedef std::pair<int, unsigned int> ZorderPair;
class WindowsThreadCpuUsage
{
public:
WindowsThreadCpuUsage();
int GetCpuUsage(); //in % since last call
DWORD LastGetCpuTime()
{
return _lastGetCpuUsageTime;
}
const enum
{
CPU_CHECK_INTERVAL = 1000
};
private:
_int64 _lastCpuUsageTime;
DWORD _lastGetCpuUsageTime;
int _lastCpuUsage;
HANDLE _hThread;
int _cores;
};
class DirectDrawStreamSettings
{
public:
DirectDrawStreamSettings();
float _startWidth;
float _stopWidth;
float _startHeight;
float _stopHeight;
float _cropStartWidth;
float _cropStopWidth;
float _cropStartHeight;
float _cropStopHeight;
};
class DirectDrawBitmapSettings
{
public:
DirectDrawBitmapSettings();
~DirectDrawBitmapSettings();
int SetBitmap(Trace* trace, DirectDraw* directDraw);
HBITMAP _transparentBitMap;
float _transparentBitmapLeft;
float _transparentBitmapRight;
float _transparentBitmapTop;
float _transparentBitmapBottom;
int _transparentBitmapWidth;
int _transparentBitmapHeight;
DDCOLORKEY* _transparentBitmapColorKey;
DirectDrawSurface* _transparentBitmapSurface; // size of bitmap image
};
class DirectDrawTextSettings
{
public:
DirectDrawTextSettings();
~DirectDrawTextSettings();
int SetText(const char* text, int textLength, COLORREF colorText,
COLORREF colorBg, float left, float top, float right,
float bottom);
char* _ptrText;
WebRtc_UWord32 _textLength;
COLORREF _colorRefText;
COLORREF _colorRefBackground;
float _textLeft;
float _textRight;
float _textTop;
float _textBottom;
bool _transparent;
};
class DirectDrawChannel: public VideoRenderCallback
{
public:
DirectDrawChannel(DirectDraw* directDraw,
VideoType blitVideoType,
VideoType incomingVideoType,
VideoType screenVideoType,
VideoRenderDirectDraw* owner);
int FrameSizeChange(int width, int height, int numberOfStreams);
int DeliverFrame(unsigned char* buffer, int buffeSize,
unsigned int timeStamp90KHz);
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
int ChangeDeliverColorFormat(bool useScreenType);
void AddRef();
void Release();
void SetStreamSettings(VideoRenderDirectDraw* DDObj, short streamId,
float startWidth, float startHeight,
float stopWidth, float stopHeight);
void SetStreamCropSettings(VideoRenderDirectDraw* DDObj,
short streamId, float startWidth,
float startHeight, float stopWidth,
float stopHeight);
int GetStreamSettings(VideoRenderDirectDraw* DDObj, short streamId,
float& startWidth, float& startHeight,
float& stopWidth, float& stopHeight);
void GetLargestSize(RECT* mixingRect);
int
BlitFromOffscreenBufferToMixingBuffer(
VideoRenderDirectDraw* DDObj,
short streamID,
DirectDrawSurface* mixingSurface,
RECT &dstRect, bool demuxing);
bool IsOffScreenSurfaceUpdated(VideoRenderDirectDraw* DDobj);
protected:
virtual ~DirectDrawChannel();
private:
CriticalSectionWrapper* _critSect; // protect members from change while using them
int _refCount;
int _width;
int _height;
int _numberOfStreams;
bool _deliverInScreenType;
bool _doubleBuffer;
DirectDraw* _directDraw;
DirectDrawSurface* _offScreenSurface; // size of incoming stream
DirectDrawSurface* _offScreenSurfaceNext; // size of incoming stream
VideoType _blitVideoType;
VideoType _originalBlitVideoType;
VideoType _incomingVideoType;
VideoType _screenVideoType;
enum
{
MAX_FRAMEDELIVER_TIME = 20
}; //Maximum time it might take to deliver a frame (process time in DeliverFrame)
enum
{
MAX_NO_OF_LATE_FRAMEDELIVER_TIME = 10
}; //No of times we allow DeliverFrame process time to exceed MAX_FRAMEDELIVER_TIME before we take action.
VideoFrame _tempRenderBuffer;
std::map<unsigned long long, DirectDrawStreamSettings*>
_streamIdToSettings;
bool _offScreenSurfaceUpdated;
VideoRenderDirectDraw* _owner;
};
class VideoRenderDirectDraw: IVideoRenderWin
{
public:
VideoRenderDirectDraw(Trace* trace, HWND hWnd, bool fullscreen);
~VideoRenderDirectDraw();
public:
//IVideoRenderWin
/**************************************************************************
*
* Init
*
***************************************************************************/
virtual WebRtc_Word32 Init();
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* CreateChannel(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder, const float left,
const float top, const float right,
const float bottom);
virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom);
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual bool IsFullScreen();
virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 ChangeWindow(void* window);
virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
WebRtc_UWord64& availableMemory);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 colorText,
const WebRtc_UWord32 colorBg,
const float left, const float top,
const float rigth, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
const WebRtc_UWord16 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
public:
// Used for emergency stops...
int Stop();
DirectDrawChannel* ShareDirectDrawChannel(int channel);
DirectDrawChannel* ConfigureDirectDrawChannel(int channel,
unsigned char streamID,
int zOrder, float left,
float top, float right,
float bottom);
int AddDirectDrawChannel(int channel, unsigned char streamID, int zOrder,
DirectDrawChannel*);
VideoType GetPerferedVideoFormat();
bool HasChannels();
bool HasChannel(int channel);
bool DeliverInScreenType();
int GetChannels(std::list<int>& channelList);
// code for getting graphics settings
int GetScreenResolution(int& screenWidth, int& screenHeight);
int UpdateSystemCPUUsage(int systemCPU);
int SetBitmap(HBITMAP bitMap, unsigned char pictureId,
DDCOLORKEY* colorKey, float left, float top, float rigth,
float bottom);
bool IsPrimaryOrMixingSurfaceOnSystem();
bool CanBltFourCC()
{
return _bCanBltFourcc;
}
protected:
static bool RemoteRenderingThreadProc(void* obj);
bool RemoteRenderingProcess();
private:
int CheckCapabilities();
int CreateMixingSurface();
int CreatePrimarySurface();
int FillSurface(DirectDrawSurface *pDDSurface, RECT* rect);
int DrawOnSurface(unsigned char* buffer, int buffeSize);
int BlitFromOffscreenBuffersToMixingBuffer();
int BlitFromBitmapBuffersToMixingBuffer();
int BlitFromTextToMixingBuffer();
bool HasHWNDChanged();
void DecideBestRenderingMode(bool hwndChanged, int totalRenderTime);
// in fullscreen flip mode
int WaitAndFlip(int& waitTime);
int BlitFromMixingBufferToBackBuffer();
// in normal window mode
int BlitFromMixingBufferToFrontBuffer(bool hwndChanged, int& waitTime);
// private members
Trace* _trace;
CriticalSectionWrapper* _confCritSect; // protect members from change while using them
bool _fullscreen;
bool _demuxing;
bool _transparentBackground;
bool _supportTransparency;
bool _canStretch;
bool _canMirrorLeftRight;
bool _clearMixingSurface;
bool _deliverInScreenType;
bool _renderModeWaitForCorrectScanLine;
bool _deliverInHalfFrameRate;
bool _deliverInQuarterFrameRate;
bool _bCanBltFourcc;
bool _frameChanged; // True if a frame has changed or bitmap or text has changed.
int _processCount;
HWND _hWnd;
RECT _screenRect; // whole screen as a rect
RECT _mixingRect;
RECT _originalHwndRect;
RECT _hwndRect;
VideoType _incomingVideoType;
VideoType _blitVideoType;
VideoType _rgbVideoType;
DirectDraw* _directDraw;
DirectDrawSurface* _primarySurface; // size of screen
DirectDrawSurface* _backSurface; // size of screen
DirectDrawSurface* _mixingSurface; // size of screen
std::map<unsigned char, DirectDrawBitmapSettings*> _bitmapSettings;
std::map<unsigned char, DirectDrawTextSettings*> _textSettings;
std::map<int, DirectDrawChannel*> _directDrawChannels;
std::multimap<int, unsigned int> _directDrawZorder;
EventWrapper* _fullScreenWaitEvent;
EventWrapper* _screenEvent;
ThreadWrapper* _screenRenderThread;
WindowsThreadCpuUsage _screenRenderCpuUsage;
int _lastRenderModeCpuUsage;
// Used for emergency stop caused by OnDisplayChange
bool _blit;
//code for providing graphics settings
DWORD _totalMemory;
DWORD _availableMemory;
int _systemCPUUsage;
// Variables used for checking render time
int _maxAllowedRenderTime;
int _nrOfTooLongRenderTimes;
bool _isPrimaryOrMixingSurfaceOnSystem;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,155 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
#include <Winerror.h>
#include <dxdiag.h>
#include "i_video_render.h"
#include "i_video_render_win.h"
namespace webrtc {
class CriticalSectionWrapper;
#define EXPAND(x) x, sizeof(x)/sizeof(TCHAR)
#pragma comment(lib, "dxguid.lib")
enum VideoRenderWinMethod
{
kVideoRenderWinDd = 0, kVideoRenderWinD3D9 = 1
};
// Class definitions
class VideoRenderWindowsImpl: IVideoRender
{
public:
/*
* Constructor/destructor
*/
VideoRenderWindowsImpl(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window, const bool fullscreen);
virtual ~VideoRenderWindowsImpl();
virtual WebRtc_Word32 Init();
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
/**************************************************************************
*
* Incoming Streams
*
***************************************************************************/
virtual VideoRenderCallback
* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32
DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32
GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom) const;
/**************************************************************************
*
* Start/Stop
*
***************************************************************************/
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32
GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32
GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
static int CheckHWAcceleration();
static void CheckHWDriver(bool& badDriver, bool& fullAccelerationEnabled);
private:
void LogOSAndHardwareDetails();
HRESULT GetBoolValue(IDxDiagContainer* pObject, WCHAR* wstrName,
BOOL* pbValue);
HRESULT GetStringValue(IDxDiagContainer* pObject, WCHAR* wstrName,
TCHAR* strValue, int nStrLen);
WebRtc_Word32 _id;
CriticalSectionWrapper& _renderWindowsCritsect;
void* _prtWindow;
bool _fullscreen;
VideoRenderWinMethod _renderMethod;
IVideoRenderWin* _ptrRendererWin;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_