Change folder name from Android to android

Review URL: https://webrtc-codereview.appspot.com/447012

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1912 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
leozwang@webrtc.org 2012-03-19 21:09:42 +00:00
parent 4530aa3157
commit ac9fd8af09
28 changed files with 1567 additions and 1464 deletions

View File

@ -1,4 +1,4 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@ -21,15 +21,15 @@ LOCAL_SRC_FILES := \
video_capture_impl.cc \
device_info_impl.cc \
video_capture_factory.cc \
Android/video_capture_android.cc \
Android/device_info_android.cc
android/video_capture_android.cc \
android/device_info_android.cc
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
$(MY_WEBRTC_COMMON_DEFS)
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/Android \
$(LOCAL_PATH)/android \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../source \
$(LOCAL_PATH)/../../../interface \

View File

@ -1,362 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "device_info_android.h"
#include <stdio.h>
#include "ref_count.h"
#include "trace.h"
#include "video_capture_android.h"
namespace webrtc
{
namespace videocapturemodule
{
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo (
const WebRtc_Word32 id)
{
videocapturemodule::DeviceInfoAndroid *deviceInfo =
new videocapturemodule::DeviceInfoAndroid(id);
if (deviceInfo && deviceInfo->Init() != 0) // Failed to init
{
delete deviceInfo;
deviceInfo = NULL;
}
return deviceInfo;
}
DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) :
DeviceInfoImpl(id)
{
}
WebRtc_Word32 DeviceInfoAndroid::Init()
{
return 0;
}
DeviceInfoAndroid::~DeviceInfoAndroid()
{
}
WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices()
{
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
{
return 0;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s GetMethodId", __FUNCTION__);
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"NumberOfDevices",
"()I");
jint numberOfDevices = 0;
if (cid != NULL)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s Calling Number of devices", __FUNCTION__);
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
if (numberOfDevices > 0)
return numberOfDevices;
return 0;
}
WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
WebRtc_UWord32 deviceNumber,
char* deviceNameUTF8,
WebRtc_UWord32 deviceNameLength,
char* deviceUniqueIdUTF8,
WebRtc_UWord32 deviceUniqueIdUTF8Length,
char* /*productUniqueIdUTF8*/,
WebRtc_UWord32 /*productUniqueIdUTF8Length*/)
{
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
WebRtc_Word32 result = 0;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached)!= 0)
{
return -1;
}
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
"(I)Ljava/lang/String;");
if (cid != NULL)
{
jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, deviceNumber);
if (javaDeviceNameObj == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get device name for device %d.",
__FUNCTION__, (int) deviceNumber);
result = -1;
}
else
{
jboolean isCopy;
const char* javaDeviceNameChar = env->GetStringUTFChars(
(jstring) javaDeviceNameObj
,&isCopy);
const jsize javaDeviceNameCharLength = env->GetStringUTFLength(
(jstring) javaDeviceNameObj);
if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceUniqueIdUTF8Length)
{
memcpy(deviceUniqueIdUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id, "%s: deviceUniqueIdUTF8 to short.",
__FUNCTION__);
result = -1;
}
if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength)
{
memcpy(deviceNameUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
javaDeviceNameChar);
}//javaDeviceNameObj==NULL
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find GetDeviceUniqueName function id",
__FUNCTION__);
result = -1;
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, (int) result);
return result;
}
WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
const char* deviceUniqueIdUTF8)
{
MapItem* item = NULL;
while ((item = _captureCapabilities.Last()))
{
delete (VideoCaptureCapability*) item->GetItem();
_captureCapabilities.Erase(item);
}
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
{
return -1;
}
// Find the capability class
jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
if (javaCapClassLocal == NULL)
{
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find java class VideoCaptureCapabilityAndroid.",
__FUNCTION__);
return -1;
}
// get the method ID for the Android Java GetCapabilityArray .
char signature[256];
sprintf(signature,
"(Ljava/lang/String;)[L%s;",
AndroidJavaCaptureCapabilityClass);
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"GetCapabilityArray",
signature);
if (cid == NULL)
{
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find method GetCapabilityArray.", __FUNCTION__);
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL)
{
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get an array with capabilities back.
jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, capureIdString);
if (!javaCapabilitiesObj)
{
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to call java GetCapabilityArray.",
__FUNCTION__);
return -1;
}
jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I");
jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I");
jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I");
if (widthField == NULL || heigtField == NULL || maxFpsField == NULL)
{
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get field Id.", __FUNCTION__);
return -1;
}
const jsize numberOfCapabilities =
env->GetArrayLength((jarray) javaCapabilitiesObj);
for (jsize i = 0; i < numberOfCapabilities; ++i)
{
VideoCaptureCapability *cap = new VideoCaptureCapability();
jobject capabilityElement = env->GetObjectArrayElement(
(jobjectArray) javaCapabilitiesObj,
i);
cap->width = env->GetIntField(capabilityElement, widthField);
cap->height = env->GetIntField(capabilityElement, heigtField);
cap->expectedCaptureDelay = _expectedCaptureDelay;
cap->rawType = kVideoNV21;
cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Cap width %d, height %d, fps %d", __FUNCTION__,
cap->width, cap->height, cap->maxFPS);
_captureCapabilities.Insert(i, cap);
}
_lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
_lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
_lastUsedDeviceNameLength + 1);
memcpy(_lastUsedDeviceName,
deviceUniqueIdUTF8,
_lastUsedDeviceNameLength + 1);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"CreateCapabilityMap %d", _captureCapabilities.Size());
return _captureCapabilities.Size();
}
WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation)
{
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
{
return -1;
}
// get the method ID for the Android Java GetOrientation .
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
"(Ljava/lang/String;)I");
if (cid == NULL)
{
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find method GetOrientation.", __FUNCTION__);
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL)
{
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get the orientation.
jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
capureIdString);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WebRtc_Word32 retValue = 0;
switch (jorientation)
{
case -1: //Error
orientation = kCameraRotate0;
retValue = -1;
break;
case 0:
orientation = kCameraRotate0;
break;
case 90:
orientation = kCameraRotate90;
break;
case 180:
orientation = kCameraRotate180;
break;
case 270:
orientation = kCameraRotate270;
break;
case 360:
orientation = kCameraRotate0;
break;
}
return retValue;
}
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -0,0 +1,348 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "device_info_android.h"
#include <stdio.h>
#include "ref_count.h"
#include "trace.h"
#include "video_capture_android.h"
namespace webrtc
{
namespace videocapturemodule
{
VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo (
const WebRtc_Word32 id)
{
videocapturemodule::DeviceInfoAndroid *deviceInfo =
new videocapturemodule::DeviceInfoAndroid(id);
if (deviceInfo && deviceInfo->Init() != 0) // Failed to init
{
delete deviceInfo;
deviceInfo = NULL;
}
return deviceInfo;
}
DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) :
DeviceInfoImpl(id)
{
}
WebRtc_Word32 DeviceInfoAndroid::Init()
{
return 0;
}
DeviceInfoAndroid::~DeviceInfoAndroid()
{
}
WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices()
{
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0)
{
return 0;
}
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s GetMethodId", __FUNCTION__);
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"NumberOfDevices",
"()I");
jint numberOfDevices = 0;
if (cid != NULL)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"%s Calling Number of devices", __FUNCTION__);
numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
if (numberOfDevices > 0)
return numberOfDevices;
return 0;
}
WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
WebRtc_UWord32 deviceNumber,
char* deviceNameUTF8,
WebRtc_UWord32 deviceNameLength,
char* deviceUniqueIdUTF8,
WebRtc_UWord32 deviceUniqueIdUTF8Length,
char* /*productUniqueIdUTF8*/,
WebRtc_UWord32 /*productUniqueIdUTF8Length*/) {
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
WebRtc_Word32 result = 0;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached)!= 0)
{
return -1;
}
// get the method ID for the Android Java GetDeviceUniqueName name.
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
"(I)Ljava/lang/String;");
if (cid != NULL)
{
jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, deviceNumber);
if (javaDeviceNameObj == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get device name for device %d.",
__FUNCTION__, (int) deviceNumber);
result = -1;
}
else
{
jboolean isCopy;
const char* javaDeviceNameChar = env->GetStringUTFChars(
(jstring) javaDeviceNameObj
,&isCopy);
const jsize javaDeviceNameCharLength =
env->GetStringUTFLength((jstring) javaDeviceNameObj);
if ((WebRtc_UWord32) javaDeviceNameCharLength <
deviceUniqueIdUTF8Length) {
memcpy(deviceUniqueIdUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
_id, "%s: deviceUniqueIdUTF8 to short.",
__FUNCTION__);
result = -1;
}
if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength)
{
memcpy(deviceNameUTF8,
javaDeviceNameChar,
javaDeviceNameCharLength + 1);
}
env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
javaDeviceNameChar);
}//javaDeviceNameObj==NULL
}
else
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: Failed to find GetDeviceUniqueName function id",
__FUNCTION__);
result = -1;
}
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
"%s: result %d", __FUNCTION__, (int) result);
return result;
}
WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
const char* deviceUniqueIdUTF8)
{
MapItem* item = NULL;
while ((item = _captureCapabilities.Last())) {
delete (VideoCaptureCapability*) item->GetItem();
_captureCapabilities.Erase(item);
}
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0) {
return -1;
}
// Find the capability class
jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
if (javaCapClassLocal == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find java class VideoCaptureCapabilityAndroid.",
__FUNCTION__);
return -1;
}
// get the method ID for the Android Java GetCapabilityArray .
char signature[256];
sprintf(signature,
"(Ljava/lang/String;)[L%s;",
AndroidJavaCaptureCapabilityClass);
jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
"GetCapabilityArray",
signature);
if (cid == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find method GetCapabilityArray.", __FUNCTION__);
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get an array with capabilities back.
jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
cid, capureIdString);
if (!javaCapabilitiesObj) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to call java GetCapabilityArray.",
__FUNCTION__);
return -1;
}
jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I");
jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I");
jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I");
if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to get field Id.", __FUNCTION__);
return -1;
}
const jsize numberOfCapabilities =
env->GetArrayLength((jarray) javaCapabilitiesObj);
for (jsize i = 0; i < numberOfCapabilities; ++i) {
VideoCaptureCapability *cap = new VideoCaptureCapability();
jobject capabilityElement = env->GetObjectArrayElement(
(jobjectArray) javaCapabilitiesObj,
i);
cap->width = env->GetIntField(capabilityElement, widthField);
cap->height = env->GetIntField(capabilityElement, heigtField);
cap->expectedCaptureDelay = _expectedCaptureDelay;
cap->rawType = kVideoNV21;
cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Cap width %d, height %d, fps %d", __FUNCTION__,
cap->width, cap->height, cap->maxFPS);
_captureCapabilities.Insert(i, cap);
}
_lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
_lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
_lastUsedDeviceNameLength + 1);
memcpy(_lastUsedDeviceName,
deviceUniqueIdUTF8,
_lastUsedDeviceNameLength + 1);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
"CreateCapabilityMap %d", _captureCapabilities.Size());
return _captureCapabilities.Size();
}
WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
const char* deviceUniqueIdUTF8,
VideoCaptureRotation& orientation)
{
JNIEnv *env;
jclass javaCmDevInfoClass;
jobject javaCmDevInfoObject;
bool attached = false;
if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
env,
javaCmDevInfoClass,
javaCmDevInfoObject,
attached) != 0) {
return -1;
}
// get the method ID for the Android Java GetOrientation .
jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
"(Ljava/lang/String;)I");
if (cid == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't find method GetOrientation.", __FUNCTION__);
return -1;
}
// Create a jstring so we can pass the deviceUniquName to the java method.
jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
if (capureIdString == NULL) {
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Can't create string for method GetCapabilityArray.",
__FUNCTION__);
return -1;
}
// Call the java class and get the orientation.
jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
capureIdString);
VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
WebRtc_Word32 retValue = 0;
switch (jorientation) {
case -1: //Error
orientation = kCameraRotate0;
retValue = -1;
break;
case 0:
orientation = kCameraRotate0;
break;
case 90:
orientation = kCameraRotate90;
break;
case 180:
orientation = kCameraRotate180;
break;
case 270:
orientation = kCameraRotate270;
break;
case 360:
orientation = kCameraRotate0;
break;
}
return retValue;
}
} // namespace videocapturemodule
} // namespace webrtc

View File

@ -23,9 +23,11 @@ namespace webrtc
namespace videocapturemodule
{
// Android logging, uncomment to print trace to logcat instead of trace file/callback
//#include <android/log.h>
//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
// Android logging, uncomment to print trace to
// logcat instead of trace file/callback
// #include <android/log.h>
// #define WEBRTC_TRACE(a,b,c,...)
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
class DeviceInfoAndroid: public DeviceInfoImpl
{

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source

View File

@ -35,15 +35,20 @@ VideoCaptureModule* VideoCaptureImpl::Create(
return implementation;
}
// Android logging, uncomment to print trace to logcat instead of trace file/callback
//#include <android/log.h>
//#undef WEBRTC_TRACE
//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
// Android logging, uncomment to print trace to
// logcat instead of trace file/callback
// #include <android/log.h>
// #undef WEBRTC_TRACE
// #define WEBRTC_TRACE(a,b,c,...)
// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
JavaVM* VideoCaptureAndroid::g_jvm = NULL;
jclass VideoCaptureAndroid::g_javaCmClass = NULL; //VideoCaptureAndroid.java
jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL; //VideoCaptureDeviceInfoAndroid.java
jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL; //static instance of VideoCaptureDeviceInfoAndroid.java
//VideoCaptureAndroid.java
jclass VideoCaptureAndroid::g_javaCmClass = NULL;
//VideoCaptureDeviceInfoAndroid.java
jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
//static instance of VideoCaptureDeviceInfoAndroid.java
jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
jobject VideoCaptureAndroid::g_javaContext = NULL;
/*
@ -73,7 +78,8 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
"%s: could not find java class", __FUNCTION__);
return -1;
}
// create a global reference to the class (to tell JNI that we are referencing it
// create a global reference to the class
// (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmClass = static_cast<jclass>
(env->NewGlobalRef(javaCmClassLocal));
@ -112,7 +118,8 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
return -1;
}
// create a global reference to the class (to tell JNI that we are referencing it
// create a global reference to the class
// (to tell JNI that we are referencing it
// after this function has returned)
g_javaCmDevInfoClass = static_cast<jclass>
(env->NewGlobalRef(javaCmDevInfoClassLocal));
@ -132,14 +139,16 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
// get the method ID for the Android Java CaptureClass static
//CreateVideoCaptureAndroid factory method.
jmethodID cid = env->GetStaticMethodID(g_javaCmDevInfoClass,
"CreateVideoCaptureDeviceInfoAndroid",
"(ILandroid/content/Context;)"
"Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
jmethodID cid = env->GetStaticMethodID(
g_javaCmDevInfoClass,
"CreateVideoCaptureDeviceInfoAndroid",
"(ILandroid/content/Context;)"
"Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
if (cid == NULL)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
"%s: could not get java VideoCaptureDeviceInfoAndroid constructor ID",
"%s: could not get java"
"VideoCaptureDeviceInfoAndroid constructor ID",
__FUNCTION__);
return -1;
}
@ -148,10 +157,10 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
"%s: construct static java device object", __FUNCTION__);
// construct the object by calling the static constructor object
jobject javaCameraDeviceInfoObjLocal = env->CallStaticObjectMethod(
g_javaCmDevInfoClass,
cid, (int) -1,
g_javaContext);
jobject javaCameraDeviceInfoObjLocal =
env->CallStaticObjectMethod(g_javaCmDevInfoClass,
cid, (int) -1,
g_javaContext);
if (!javaCameraDeviceInfoObjLocal)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
@ -159,13 +168,16 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
__FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
// create a reference to the object (to tell JNI that
// we are referencing it after this function has returned)
g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
if (!g_javaCmDevInfoObject)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1,
"%s: could not create Java cameradevinceinfo object reference",
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceAudioDevice,
-1,
"%s: could not create Java"
"cameradevinceinfo object reference",
__FUNCTION__);
return -1;
}
@ -216,10 +228,10 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
}
WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached)
JNIEnv*& env,
jclass& javaCmDevInfoClass,
jobject& javaCmDevInfoObject,
bool& attached)
{
// get the JNI env for this thread
if (!g_jvm)
@ -250,19 +262,19 @@ WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
}
WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(bool attached)
{
if (attached && g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
return -1;
}
return 0;
WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
bool attached) {
if (attached && g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
"%s: Could not detach thread from JVM", __FUNCTION__);
return -1;
}
return 0;
}
/*
* JNI callback from Java class. Called when the camera has a new frame to deliver
* JNI callback from Java class. Called
* when the camera has a new frame to deliver
* Class: org_webrtc_capturemodule_VideoCaptureAndroid
* Method: ProvideCameraFrame
* Signature: ([BIJ)V
@ -273,7 +285,8 @@ void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
jint length,
jlong context)
{
VideoCaptureAndroid* captureModule=reinterpret_cast<VideoCaptureAndroid*>(context);
VideoCaptureAndroid* captureModule =
reinterpret_cast<VideoCaptureAndroid*>(context);
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
-1, "%s: IncomingFrame %d", __FUNCTION__,length);
jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
@ -313,8 +326,11 @@ WebRtc_Word32 VideoCaptureAndroid::Init(const WebRtc_Word32 id,
if (_capInfo.Init() != 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
"%s: Failed to initialize CaptureDeviceInfo", __FUNCTION__);
WEBRTC_TRACE(webrtc::kTraceError,
webrtc::kTraceVideoCapture,
_id,
"%s: Failed to initialize CaptureDeviceInfo",
__FUNCTION__);
return -1;
}
@ -350,7 +366,8 @@ WebRtc_Word32 VideoCaptureAndroid::Init(const WebRtc_Word32 id,
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
"get method id");
// get the method ID for the Android Java CaptureDeviceInfoClass AllocateCamera factory method.
// get the method ID for the Android Java
// CaptureDeviceInfoClass AllocateCamera factory method.
char signature[256];
sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
@ -438,9 +455,10 @@ VideoCaptureAndroid::~VideoCaptureAndroid()
// get the method ID for the Android Java CaptureClass static
// DeleteVideoCaptureAndroid method. Call this to release the camera so
// another application can use it.
jmethodID cid = env->GetStaticMethodID(g_javaCmClass,
"DeleteVideoCaptureAndroid",
"(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
jmethodID cid = env->GetStaticMethodID(
g_javaCmClass,
"DeleteVideoCaptureAndroid",
"(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
if (cid != NULL)
{
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
@ -473,7 +491,7 @@ VideoCaptureAndroid::~VideoCaptureAndroid()
}
WebRtc_Word32 VideoCaptureAndroid::StartCapture(
const VideoCaptureCapability& capability)
const VideoCaptureCapability& capability)
{
CriticalSectionScoped cs(&_apiCs);
WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
@ -516,7 +534,8 @@ WebRtc_Word32 VideoCaptureAndroid::StartCapture(
"%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width,
_frameInfo.height);
// get the method ID for the Android Java CaptureClass static StartCapture method.
// get the method ID for the Android Java
// CaptureClass static StartCapture method.
jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I");
if (cid != NULL)
{

View File

@ -36,7 +36,8 @@ public:
const char* deviceUniqueIdUTF8);
virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability);
virtual WebRtc_Word32 StartCapture(
const VideoCaptureCapability& capability);
virtual WebRtc_Word32 StopCapture();
virtual bool CaptureStarted();
virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
@ -56,7 +57,8 @@ protected:
static JavaVM* g_jvm;
static jclass g_javaCmClass;
static jclass g_javaCmDevInfoClass;
static jobject g_javaCmDevInfoObject; //Static java object implementing the needed device info functions;
//Static java object implementing the needed device info functions;
static jobject g_javaCmDevInfoObject;
static jobject g_javaContext; // Java Application context
};
} // namespace videocapturemodule

View File

@ -186,13 +186,13 @@
}], # win
['OS=="android"', {
'include_dirs': [
'Android',
'android',
],
'sources': [
'Android/device_info_android.cc',
'Android/device_info_android.h',
'Android/video_capture_android.cc',
'Android/video_capture_android.h',
'android/device_info_android.cc',
'android/device_info_android.h',
'android/video_capture_android.cc',
'android/video_capture_android.h',
],
}], # android
], # conditions

View File

@ -1,4 +1,4 @@
# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
@ -21,10 +21,10 @@ LOCAL_SRC_FILES := \
video_render_frames.cc \
video_render_impl.cc \
external/video_render_external_impl.cc \
Android/video_render_android_impl.cc \
Android/video_render_android_native_opengl2.cc \
Android/video_render_android_surface_view.cc \
Android/video_render_opengles20.cc
android/video_render_android_impl.cc \
android/video_render_android_native_opengl2.cc \
android/video_render_android_surface_view.cc \
android/video_render_opengles20.cc
# Flags passed to both C and C++ files.
LOCAL_CFLAGS := \
@ -33,7 +33,7 @@ LOCAL_CFLAGS := \
LOCAL_C_INCLUDES := \
$(LOCAL_PATH) \
$(LOCAL_PATH)/Android \
$(LOCAL_PATH)/android \
$(LOCAL_PATH)/../interface \
$(LOCAL_PATH)/../../../.. \
$(LOCAL_PATH)/../../../audio_coding/main/interface \

View File

@ -1,162 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#include <jni.h>
#include "i_video_render.h"
#include "map_wrapper.h"
namespace webrtc {
//#define ANDROID_LOG
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
// The object a module user uses to send new frames to the java renderer
// Base class for android render streams.
class AndroidStream: public VideoRenderCallback
{
public:
/*
* DeliverFrame is called from a thread connected to the Java VM.
* Used for Delivering frame for rendering.
*/
virtual void DeliverFrame(JNIEnv* jniEnv)=0;
virtual ~AndroidStream()
{
};
};
class VideoRenderAndroid: IVideoRender
{
public:
static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM);
VideoRenderAndroid(const WebRtc_Word32 id,
const VideoRenderType videoRenderType, void* window,
const bool fullscreen);
virtual ~VideoRenderAndroid();
virtual WebRtc_Word32 Init()=0;
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
virtual VideoRenderCallback
* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32
DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32
GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom) const;
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
virtual void ReDraw();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32
GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32
GetScreenResolution(WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
protected:
virtual AndroidStream
* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left, const float top,
const float right, const float bottom,
VideoRenderAndroid& renderer) = 0;
WebRtc_Word32 _id;
CriticalSectionWrapper& _critSect;
VideoRenderType _renderType;
jobject _ptrWindow;
static JavaVM* g_jvm;
private:
static bool JavaRenderThreadFun(void* obj);
bool JavaRenderThreadProcess();
MapWrapper _streamsMap; // Map with streams to render.
bool _javaShutDownFlag; // True if the _javaRenderThread thread shall be detached from the JVM.
EventWrapper& _javaShutdownEvent;
EventWrapper& _javaRenderEvent;
WebRtc_Word64 _lastJavaRenderEvent;
JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
ThreadWrapper* _javaRenderThread;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_

View File

@ -1,93 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#include <jni.h>
#include "video_render_defines.h"
#include "video_render_android_impl.h"
#include "video_render_opengles20.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidNativeOpenGl2Channel: public AndroidStream
{
public:
AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj);
~AndroidNativeOpenGl2Channel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
static jint CreateOpenGLNativeStatic(JNIEnv * env,jobject, jlong context, jint width, jint height);
jint CreateOpenGLNative(int width, int height);
static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
void DrawNative();
WebRtc_UWord32 _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
jmethodID _redrawCid;
jmethodID _registerNativeCID;
jmethodID _deRegisterNativeCID;
VideoRenderOpenGles20 _openGLRenderer;
};
class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid
{
public:
AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidNativeOpenGl2Renderer();
static bool UseOpenGL2(void* window);
WebRtc_Word32 Init();
virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_

View File

@ -1,470 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_android_surface_view.h"
#include "critical_section_wrapper.h"
#include "common_video/libyuv/include/libyuv.h"
#include "tick_util.h"
#ifdef ANDROID_NDK_8_OR_ABOVE
#include <android/bitmap.h>
#endif
#ifdef ANDROID_LOG
#include <stdio.h>
#include <android/log.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
#else
#include "trace.h"
#endif
namespace webrtc {
AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen)
:
VideoRenderAndroid(id,videoRenderType,window,fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
{
}
AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewRenderer dtor");
if(g_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
env=NULL;
}
else
{
isAttached = true;
}
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
}
}
WebRtc_Word32
AndroidSurfaceViewRenderer::Init()
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "(%s): Not a valid Java VM pointer.", __FUNCTION__);
return -1;
}
if(!_ptrWindow)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "(%s): No window have been provided.", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
// get the ViESurfaceRender class
jclass javaRenderClassLocal = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClassLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
// create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
_javaRenderClass = reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java ViESurfaceRenderer class reference", __FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
// get the method ID for the constructor
jmethodID cid = env->GetMethodID(_javaRenderClass, "<init>", "(Landroid/view/SurfaceView;)V");
if (cid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get constructor ID", __FUNCTION__);
return -1; /* exception thrown */
}
// construct the object
jobject javaRenderObjLocal = env->NewObject(_javaRenderClass, cid, _ptrWindow);
if (!javaRenderObjLocal)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java Render", __FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
if (!_javaRenderObj)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java SurfaceRender object reference", __FUNCTION__);
return -1;
}
// Detach this thread if it was attached
if (isAttached)
{
if (g_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
return 0;
}
AndroidStream*
AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", __FUNCTION__,streamId);
AndroidSurfaceViewChannel* stream=new AndroidSurfaceViewChannel(streamId,g_jvm,renderer,_javaRenderObj);
if(stream && stream->Init(zOrder,left,top,right,bottom)==0)
return stream;
else
delete stream;
return NULL;
}
AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj)
:
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer),
_jvm(jvm),
_javaRenderObj(javaRenderObj),
_bitmapWidth(0),
_bitmapHeight(0)
{
}
AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel()
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewChannel dtor");
delete &_renderCritSect;
if(_jvm)
{
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
env=NULL;
}
else
{
isAttached = true;
}
}
#ifdef ANDROID_NDK_8_OR_ABOVE
env->DeleteGlobalRef(_javaBitmapObj);
#else
env->DeleteGlobalRef(_javaByteBufferObj);
#endif
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
}
}
WebRtc_Word32
AndroidSurfaceViewChannel::Init(WebRtc_Word32 /*zOrder*/,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel", __FUNCTION__);
if (!_jvm)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer,_id, "%s: Not a valid Java VM pointer", __FUNCTION__);
return -1;
}
if((top>1 || top<0) || (right>1 || right<0) || (bottom>1 || bottom<0) || (left>1 || left<0))
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Wrong coordinates",
__FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK)
{
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env);
return -1;
}
isAttached = true;
}
jclass javaRenderClass = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__);
return -1;
}
#ifdef ANDROID_NDK_8_OR_ABOVE
// get the method ID for the CreateBitmap
_createBitmapCid = env->GetMethodID(_javaRenderClass, "CreateBitmap", "(II)Landroid/graphics/Bitmap;");
if (_createBitmapCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateBitmap ID", __FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the DrawBitmap function
_drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V");
if (_drawBitmapCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawBitmap ID", __FUNCTION__);
return -1; /* exception thrown */
}
#else
// get the method ID for the CreateIntArray
_createByteBufferCid = env->GetMethodID(javaRenderClass, "CreateByteBuffer", "(II)Ljava/nio/ByteBuffer;");
if (_createByteBufferCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateByteBuffer ID", __FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the DrawByteBuffer function
_drawByteBufferCid = env->GetMethodID(javaRenderClass, "DrawByteBuffer", "()V");
if (_drawByteBufferCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawByteBuffer ID", __FUNCTION__);
return -1; /* exception thrown */
}
#endif
// get the method ID for the SetCoordinates function
_setCoordinatesCid = env->GetMethodID(javaRenderClass, "SetCoordinates", "(FFFF)V");
if (_setCoordinatesCid == NULL)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get SetCoordinates ID", __FUNCTION__);
return -1; /* exception thrown */
}
env->CallVoidMethod(_javaRenderObj,_setCoordinatesCid,left,top,right,bottom);
// Detach this thread if it was attached
if (isAttached)
{
if (_jvm->DetachCurrentThread() < 0)
{
WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel done", __FUNCTION__);
return 0;
}
WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
{
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
}
/*Implements AndroidStream
* Calls the Java object and render the buffer in _bufferToRender
*/
void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
_renderCritSect.Enter();
#ifdef ANDROID_NDK_8_OR_ABOVE
if (_bitmapWidth != _bufferToRender.Width() ||
_bitmapHeight != _bufferToRender.Height()) {
// Create the bitmap to write to
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u "
"%u", __FUNCTION__, _bufferToRender.Width(),
_bufferToRender.Height());
if (_javaBitmapObj) {
jniEnv->DeleteGlobalRef(_javaBitmapObj);
_javaBitmapObj = NULL;
}
jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj,
_createBitmapCid,
videoFrame.Width(),
videoFrame.Height());
_javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
if (!_javaBitmapObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
"create Java Bitmap object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
} else {
_bitmapWidth=_bufferToRender.Width();
_bitmapHeight=_bufferToRender.Height();
}
}
void* pixels;
if (_javaBitmapObj &&
AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap",
__FUNCTION__);
// Convert I420 straight into the Java bitmap.
int ret = ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
(unsigned char* ) pixels,
_bitmapWidth, _bitmapHeight);
if (ret < 0) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion "
"failed.", __FUNCTION__);
}
AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
// Draw the Surface.
jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
} else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock "
"bitmap", __FUNCTION__);
}
_renderCritSect.Leave();
#else
if (_bitmapWidth != _bufferToRender.Width() ||
_bitmapHeight != _bufferToRender.Height()) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
"%d",__FUNCTION__,
_bufferToRender.Width(), _bufferToRender.Height());
if (_javaByteBufferObj) {
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
_javaByteBufferObj = NULL;
_directBuffer = NULL;
}
jobject javaByteBufferObj =
jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
_bufferToRender.Width(),
_bufferToRender.Height());
_javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
if (!_javaByteBufferObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
"create Java ByteBuffer object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
} else {
_directBuffer = static_cast<unsigned char*>
(jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
_bitmapWidth = _bufferToRender.Width();
_bitmapHeight = _bufferToRender.Height();
}
}
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
// Android requires a vertically flipped image compared to std convert.
// This is done by giving a negative height input.
const int conversionResult =
ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
_directBuffer, _bitmapWidth, -_bitmapHeight);
if (conversionResult < 0) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
" failed.", __FUNCTION__);
_renderCritSect.Leave();
return;
}
}
_renderCritSect.Leave();
// Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
#endif
}
} // namespace webrtc

View File

@ -1,96 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#include <jni.h>
#include "video_render_defines.h"
#include "video_render_android_impl.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidSurfaceViewChannel: public AndroidStream
{
public:
AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj);
~AndroidSurfaceViewChannel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
WebRtc_UWord32 _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
#ifdef ANDROID_NDK_8_OR_ABOVE
jclass _javaBitmapClass;
jmethodID _createBitmapCid;
jobject _javaBitmapObj;
jmethodID _drawBitmapCid;
#else
jobject _javaByteBufferObj;
unsigned char* _directBuffer;
jmethodID _createByteBufferCid;
jmethodID _drawByteBufferCid;
#endif
jmethodID _setCoordinatesCid;
unsigned int _bitmapWidth;
unsigned int _bitmapHeight;
};
class AndroidSurfaceViewRenderer: private VideoRenderAndroid
{
public:
AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidSurfaceViewRenderer();
WebRtc_Word32 Init();
virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source

View File

@ -39,10 +39,10 @@ WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM)
}
VideoRenderAndroid::VideoRenderAndroid(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool /*fullscreen*/):
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool /*fullscreen*/):
_id(id),
_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderType(videoRenderType),
@ -127,7 +127,7 @@ VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
}
WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId)
const WebRtc_UWord32 streamId)
{
CriticalSectionScoped cs(&_critSect);
@ -147,53 +147,47 @@ WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
}
WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const
{
return -1;
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left,
float& top,
float& right,
float& bottom) const {
return -1;
}
WebRtc_Word32 VideoRenderAndroid::StartRender()
{
CriticalSectionScoped cs(&_critSect);
WebRtc_Word32 VideoRenderAndroid::StartRender() {
CriticalSectionScoped cs(&_critSect);
if (_javaRenderThread)
{
// StartRender is called when this stream should start render.
// However StopRender is not called when the streams stop rendering. Thus the the thread is only deleted when the renderer is removed.
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s, Render thread already exist", __FUNCTION__);
return 0;
}
_javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
kRealtimePriority,
"AndroidRenderThread");
if (!_javaRenderThread)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No thread", __FUNCTION__);
return -1;
}
unsigned int tId = 0;
if (_javaRenderThread->Start(tId))
{
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: thread started: %u", __FUNCTION__, tId);
}
else
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not start send thread", __FUNCTION__);
return -1;
}
if (_javaRenderThread) {
// StartRender is called when this stream should start render.
// However StopRender is not called when the streams stop rendering.
// Thus the the thread is only deleted when the renderer is removed.
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
"%s, Render thread already exist", __FUNCTION__);
return 0;
}
_javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
kRealtimePriority,
"AndroidRenderThread");
if (!_javaRenderThread) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: No thread", __FUNCTION__);
return -1;
}
unsigned int tId = 0;
if (_javaRenderThread->Start(tId)) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"%s: thread started: %u", __FUNCTION__, tId);
}
else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Could not start send thread", __FUNCTION__);
return -1;
}
return 0;
}
WebRtc_Word32 VideoRenderAndroid::StopRender()
@ -228,19 +222,17 @@ WebRtc_Word32 VideoRenderAndroid::StopRender()
return 0;
}
void VideoRenderAndroid::ReDraw()
{
CriticalSectionScoped cs(&_critSect);
if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) // Allow redraw if it was more than 20ms since last.
{
_lastJavaRenderEvent = TickTime::MillisecondTimestamp();
_javaRenderEvent.Set();
}
void VideoRenderAndroid::ReDraw() {
CriticalSectionScoped cs(&_critSect);
// Allow redraw if it was more than 20ms since last.
if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
_lastJavaRenderEvent = TickTime::MillisecondTimestamp();
_javaRenderEvent.Set();
}
}
bool VideoRenderAndroid::JavaRenderThreadFun(void* obj)
{
return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
}
bool VideoRenderAndroid::JavaRenderThreadProcess()
@ -268,7 +260,7 @@ bool VideoRenderAndroid::JavaRenderThreadProcess()
= _streamsMap.Next(item))
{
static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
_javaRenderJniEnv);
_javaRenderJniEnv);
}
if (_javaShutDownFlag)
@ -307,74 +299,68 @@ bool VideoRenderAndroid::FullScreen()
}
WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
WebRtc_UWord64& /*totalGraphicsMemory*/,
WebRtc_UWord64& /*availableGraphicsMemory*/) const
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
WebRtc_UWord64& /*totalGraphicsMemory*/,
WebRtc_UWord64& /*availableGraphicsMemory*/) const {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::GetScreenResolution(
WebRtc_UWord32& /*screenWidth*/,
WebRtc_UWord32& /*screenHeight*/) const
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
WebRtc_UWord32& /*screenWidth*/,
WebRtc_UWord32& /*screenHeight*/) const {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(const WebRtc_UWord32 /*streamId*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(
const WebRtc_UWord32 /*streamId*/) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
const WebRtc_UWord32 /*streamId*/,
const float /*left*/,
const float /*top*/,
const float /*right*/,
const float /*bottom*/)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
const WebRtc_UWord32 /*streamId*/,
const float /*left*/,
const float /*top*/,
const float /*right*/,
const float /*bottom*/) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left,
const float top,
const float right,
const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetText(
const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
@ -382,11 +368,10 @@ WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
const void* colorKey,
const float left, const float top,
const float right,
const float bottom)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
const float bottom) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s - not supported on Android", __FUNCTION__);
return -1;
}
} //namespace webrtc
} //namespace webrtc

View File

@ -0,0 +1,168 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
#include <jni.h>
#include "i_video_render.h"
#include "map_wrapper.h"
namespace webrtc {
//#define ANDROID_LOG
class CriticalSectionWrapper;
class EventWrapper;
class ThreadWrapper;
// The object a module user uses to send new frames to the java renderer
// Base class for android render streams.
class AndroidStream: public VideoRenderCallback
{
public:
/*
* DeliverFrame is called from a thread connected to the Java VM.
* Used for Delivering frame for rendering.
*/
virtual void DeliverFrame(JNIEnv* jniEnv)=0;
virtual ~AndroidStream()
{
};
};
class VideoRenderAndroid: IVideoRender
{
public:
static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM);
VideoRenderAndroid(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
virtual ~VideoRenderAndroid();
virtual WebRtc_Word32 Init()=0;
virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
virtual WebRtc_Word32 ChangeWindow(void* window);
virtual VideoRenderCallback* AddIncomingRenderStream(
const WebRtc_UWord32 streamId,
const WebRtc_UWord32 zOrder,
const float left, const float top,
const float right, const float bottom);
virtual WebRtc_Word32 DeleteIncomingRenderStream(
const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 GetIncomingRenderStreamProperties(
const WebRtc_UWord32 streamId,
WebRtc_UWord32& zOrder,
float& left, float& top,
float& right, float& bottom) const;
virtual WebRtc_Word32 StartRender();
virtual WebRtc_Word32 StopRender();
virtual void ReDraw();
/**************************************************************************
*
* Properties
*
***************************************************************************/
virtual VideoRenderType RenderType();
virtual RawVideoType PerferedVideoType();
virtual bool FullScreen();
virtual WebRtc_Word32 GetGraphicsMemory(
WebRtc_UWord64& totalGraphicsMemory,
WebRtc_UWord64& availableGraphicsMemory) const;
virtual WebRtc_Word32 GetScreenResolution(
WebRtc_UWord32& screenWidth,
WebRtc_UWord32& screenHeight) const;
virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
const unsigned int zOrder,
const float left, const float top,
const float right,
const float bottom);
virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
const WebRtc_UWord8* text,
const WebRtc_Word32 textLength,
const WebRtc_UWord32 textColorRef,
const WebRtc_UWord32 backgroundColorRef,
const float left, const float top,
const float rigth, const float bottom);
virtual WebRtc_Word32 SetBitmap(const void* bitMap,
const WebRtc_UWord8 pictureId,
const void* colorKey, const float left,
const float top, const float right,
const float bottom);
protected:
virtual AndroidStream* CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer) = 0;
WebRtc_Word32 _id;
CriticalSectionWrapper& _critSect;
VideoRenderType _renderType;
jobject _ptrWindow;
static JavaVM* g_jvm;
private:
static bool JavaRenderThreadFun(void* obj);
bool JavaRenderThreadProcess();
// Map with streams to render.
MapWrapper _streamsMap;
// True if the _javaRenderThread thread shall be detached from the JVM.
bool _javaShutDownFlag;
EventWrapper& _javaShutdownEvent;
EventWrapper& _javaRenderEvent;
WebRtc_Word64 _lastJavaRenderEvent;
JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
ThreadWrapper* _javaRenderThread;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_

View File

@ -25,10 +25,10 @@
namespace webrtc {
AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen) :
VideoRenderAndroid(id, videoRenderType, window, fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
@ -54,12 +54,12 @@ bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window)
// Get the JNI env for this thread
if ((res < 0) || !env)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"RendererAndroid(): Could not attach thread to JVM (%d, %p)",
res, env);
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
-1,
"RendererAndroid(): Could not attach thread to JVM (%d, %p)",
res, env);
return false;
}
isAttached = true;
@ -191,9 +191,10 @@ WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init()
return -1;
}
// create a global reference to the class (to tell JNI that we are referencing it after this function has returned)
_javaRenderClass
= reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
// create a global reference to the class (to tell JNI that
// we are referencing it after this function has returned)
_javaRenderClass =
reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass)
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
@ -211,11 +212,11 @@ WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init()
if (!_javaRenderObj)
{
WEBRTC_TRACE(
kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
return -1;
}
@ -236,13 +237,13 @@ WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init()
}
AndroidStream*
AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer)
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer)
{
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
__FUNCTION__, streamId);
@ -258,9 +259,10 @@ AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
return NULL;
}
AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj):
AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj):
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
@ -428,8 +430,9 @@ WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
return 0;
}
WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(const WebRtc_UWord32 /*streamId*/,
VideoFrame& videoFrame)
WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(
const WebRtc_UWord32 /*streamId*/,
VideoFrame& videoFrame)
{
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
@ -449,46 +452,50 @@ void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv)
//Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
//WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: time to deliver %lld" ,__FUNCTION__,(TickTime::Now()-timeNow).Milliseconds());
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
// "%s: time to deliver %lld" ,__FUNCTION__,
// (TickTime::Now()-timeNow).Milliseconds());
}
/*
* JNI callback from Java class. Called when the render want to render a frame. Called from the GLRenderThread
* JNI callback from Java class. Called when the render
* want to render a frame. Called from the GLRenderThread
* Method: DrawNative
* Signature: (J)V
*/
void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic
(JNIEnv * env, jobject, jlong context)
{
AndroidNativeOpenGl2Channel* renderChannel=reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
renderChannel->DrawNative();
void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
JNIEnv * env, jobject, jlong context) {
AndroidNativeOpenGl2Channel* renderChannel =
reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
renderChannel->DrawNative();
}
void AndroidNativeOpenGl2Channel::DrawNative()
{
_openGLRenderer.Render(_bufferToRender);
_openGLRenderer.Render(_bufferToRender);
}
/*
* JNI callback from Java class. Called when the GLSurfaceview have created a surface. Called from the GLRenderThread
* JNI callback from Java class. Called when the GLSurfaceview
* have created a surface. Called from the GLRenderThread
* Method: CreateOpenGLNativeStatic
* Signature: (JII)I
*/
jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(JNIEnv * env,
jobject,
jlong context,
jint width,
jint height)
{
AndroidNativeOpenGl2Channel* renderChannel =
reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
return renderChannel->CreateOpenGLNative(width, height);
jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
JNIEnv * env,
jobject,
jlong context,
jint width,
jint height) {
AndroidNativeOpenGl2Channel* renderChannel =
reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
return renderChannel->CreateOpenGLNative(width, height);
}
jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(int width, int height)
{
return _openGLRenderer.Setup(width, height);
jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
int width, int height) {
return _openGLRenderer.Setup(width, height);
}
} //namespace webrtc

View File

@ -0,0 +1,99 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
#include <jni.h>
#include "video_render_defines.h"
#include "video_render_android_impl.h"
#include "video_render_opengles20.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidNativeOpenGl2Channel: public AndroidStream {
public:
AndroidNativeOpenGl2Channel(
WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,jobject javaRenderObj);
~AndroidNativeOpenGl2Channel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(
const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
static jint CreateOpenGLNativeStatic(
JNIEnv * env,
jobject,
jlong context,
jint width,
jint height);
jint CreateOpenGLNative(int width, int height);
static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
void DrawNative();
WebRtc_UWord32 _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
jmethodID _redrawCid;
jmethodID _registerNativeCID;
jmethodID _deRegisterNativeCID;
VideoRenderOpenGles20 _openGLRenderer;
};
class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
public:
AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidNativeOpenGl2Renderer();
static bool UseOpenGL2(void* window);
WebRtc_Word32 Init();
virtual AndroidStream* CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_

View File

@ -0,0 +1,562 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_render_android_surface_view.h"
#include "critical_section_wrapper.h"
#include "common_video/libyuv/include/libyuv.h"
#include "tick_util.h"
#ifdef ANDROID_NDK_8_OR_ABOVE
#include <android/bitmap.h>
#endif
#ifdef ANDROID_LOG
#include <stdio.h>
#include <android/log.h>
#undef WEBRTC_TRACE
#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
#else
#include "trace.h"
#endif
namespace webrtc {
AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen)
:
VideoRenderAndroid(id,videoRenderType,window,fullscreen),
_javaRenderObj(NULL),
_javaRenderClass(NULL)
{
}
AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
"AndroidSurfaceViewRenderer dtor");
if(g_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
env=NULL;
}
else {
isAttached = true;
}
}
env->DeleteGlobalRef(_javaRenderObj);
env->DeleteGlobalRef(_javaRenderClass);
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
WebRtc_Word32 AndroidSurfaceViewRenderer::Init() {
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
if (!g_jvm) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"(%s): Not a valid Java VM pointer.",
__FUNCTION__);
return -1;
}
if(!_ptrWindow) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"(%s): No window have been provided.",
__FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = g_jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
return -1;
}
isAttached = true;
}
// get the ViESurfaceRender class
jclass javaRenderClassLocal =
env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClassLocal) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not find ViESurfaceRenderer",
__FUNCTION__);
return -1;
}
// create a global reference to the class (to tell JNI that
// we are referencing it after this function has returned)
_javaRenderClass =
reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
if (!_javaRenderClass) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java ViESurfaceRenderer class reference",
__FUNCTION__);
return -1;
}
// Delete local class ref, we only use the global ref
env->DeleteLocalRef(javaRenderClassLocal);
// get the method ID for the constructor
jmethodID cid = env->GetMethodID(_javaRenderClass,
"<init>",
"(Landroid/view/SurfaceView;)V");
if (cid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get constructor ID",
__FUNCTION__);
return -1; /* exception thrown */
}
// construct the object
jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
cid,
_ptrWindow);
if (!javaRenderObjLocal) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java Render",
__FUNCTION__);
return -1;
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
_javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
if (!_javaRenderObj) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not create Java SurfaceRender object reference",
__FUNCTION__);
return -1;
}
// Detach this thread if it was attached
if (isAttached) {
if (g_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
return 0;
}
AndroidStream*
AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer) {
WEBRTC_TRACE(kTraceDebug,
kTraceVideoRenderer,
_id,
"%s: Id %d",
__FUNCTION__,
streamId);
AndroidSurfaceViewChannel* stream =
new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
return stream;
else
delete stream;
return NULL;
}
AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj) :
_id(streamId),
_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
_renderer(renderer),
_jvm(jvm),
_javaRenderObj(javaRenderObj),
_bitmapWidth(0),
_bitmapHeight(0) {
}
AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
WEBRTC_TRACE(kTraceInfo,
kTraceVideoRenderer,
_id,
"AndroidSurfaceViewChannel dtor");
delete &_renderCritSect;
if(_jvm) {
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
env=NULL;
}
else {
isAttached = true;
}
}
#ifdef ANDROID_NDK_8_OR_ABOVE
env->DeleteGlobalRef(_javaBitmapObj);
#else
env->DeleteGlobalRef(_javaByteBufferObj);
#endif
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
}
}
WebRtc_Word32 AndroidSurfaceViewChannel::Init(
WebRtc_Word32 /*zOrder*/,
const float left,
const float top,
const float right,
const float bottom) {
WEBRTC_TRACE(kTraceDebug,
kTraceVideoRenderer,
_id,
"%s: AndroidSurfaceViewChannel",
__FUNCTION__);
if (!_jvm) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Not a valid Java VM pointer",
__FUNCTION__);
return -1;
}
if( (top > 1 || top < 0) ||
(right > 1 || right < 0) ||
(bottom > 1 || bottom < 0) ||
(left > 1 || left < 0)) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"%s: Wrong coordinates", __FUNCTION__);
return -1;
}
// get the JNI env for this thread
bool isAttached = false;
JNIEnv* env = NULL;
if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
// try to attach the thread and get the env
// Attach this thread to JVM
jint res = _jvm->AttachCurrentThread(&env, NULL);
// Get the JNI env for this thread
if ((res < 0) || !env) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Could not attach thread to JVM (%d, %p)",
__FUNCTION__,
res,
env);
return -1;
}
isAttached = true;
}
jclass javaRenderClass =
env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
if (!javaRenderClass) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not find ViESurfaceRenderer",
__FUNCTION__);
return -1;
}
#ifdef ANDROID_NDK_8_OR_ABOVE
// get the method ID for the CreateBitmap
_createBitmapCid =
env->GetMethodID(_javaRenderClass,
"CreateBitmap",
"(II)Landroid/graphics/Bitmap;");
if (_createBitmapCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get CreateBitmap ID",
__FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the DrawBitmap function
_drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V");
if (_drawBitmapCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get DrawBitmap ID",
__FUNCTION__);
return -1; /* exception thrown */
}
#else
// get the method ID for the CreateIntArray
_createByteBufferCid =
env->GetMethodID(javaRenderClass,
"CreateByteBuffer",
"(II)Ljava/nio/ByteBuffer;");
if (_createByteBufferCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get CreateByteBuffer ID",
__FUNCTION__);
return -1; /* exception thrown */
}
// get the method ID for the DrawByteBuffer function
_drawByteBufferCid = env->GetMethodID(javaRenderClass,
"DrawByteBuffer",
"()V");
if (_drawByteBufferCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get DrawByteBuffer ID",
__FUNCTION__);
return -1; /* exception thrown */
}
#endif
// get the method ID for the SetCoordinates function
_setCoordinatesCid = env->GetMethodID(javaRenderClass,
"SetCoordinates",
"(FFFF)V");
if (_setCoordinatesCid == NULL) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: could not get SetCoordinates ID",
__FUNCTION__);
return -1; /* exception thrown */
}
env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
left, top, right, bottom);
// Detach this thread if it was attached
if (isAttached) {
if (_jvm->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceWarning,
kTraceVideoRenderer,
_id,
"%s: Could not detach thread from JVM",
__FUNCTION__);
}
}
WEBRTC_TRACE(kTraceDebug,
kTraceVideoRenderer,
_id,
"%s: AndroidSurfaceViewChannel done",
__FUNCTION__);
return 0;
}
WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(
const WebRtc_UWord32 /*streamId*/,
VideoFrame& videoFrame) {
// WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
_renderCritSect.Enter();
_bufferToRender.SwapFrame(videoFrame);
_renderCritSect.Leave();
_renderer.ReDraw();
return 0;
}
/*Implements AndroidStream
* Calls the Java object and render the buffer in _bufferToRender
*/
void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
_renderCritSect.Enter();
#ifdef ANDROID_NDK_8_OR_ABOVE
if (_bitmapWidth != _bufferToRender.Width() ||
_bitmapHeight != _bufferToRender.Height()) {
// Create the bitmap to write to
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u "
"%u", __FUNCTION__, _bufferToRender.Width(),
_bufferToRender.Height());
if (_javaBitmapObj) {
jniEnv->DeleteGlobalRef(_javaBitmapObj);
_javaBitmapObj = NULL;
}
jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj,
_createBitmapCid,
videoFrame.Width(),
videoFrame.Height());
_javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap);
if (!_javaBitmapObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
"create Java Bitmap object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
} else {
_bitmapWidth = _bufferToRender.Width();
_bitmapHeight = _bufferToRender.Height();
}
}
void* pixels;
if (_javaBitmapObj &&
AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap",
__FUNCTION__);
// Convert I420 straight into the Java bitmap.
int ret = ConvertI420ToRGB565((unsigned char*)_bufferToRender.Buffer(),
(unsigned char*) pixels,
_bitmapWidth, _bitmapHeight);
if (ret < 0) {
WEBRTC_TRACE(kTraceError,
kTraceVideoRenderer,
_id,
"%s: Color conversion failed.",
__FUNCTION__);
}
AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj);
// Draw the Surface.
jniEnv->CallVoidMethod(_javaRenderObj,_drawCid);
} else {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock "
"bitmap", __FUNCTION__);
}
_renderCritSect.Leave();
#else
if (_bitmapWidth != _bufferToRender.Width() ||
_bitmapHeight != _bufferToRender.Height()) {
WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
"%d",__FUNCTION__,
_bufferToRender.Width(), _bufferToRender.Height());
if (_javaByteBufferObj) {
jniEnv->DeleteGlobalRef(_javaByteBufferObj);
_javaByteBufferObj = NULL;
_directBuffer = NULL;
}
jobject javaByteBufferObj =
jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
_bufferToRender.Width(),
_bufferToRender.Height());
_javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
if (!_javaByteBufferObj) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not "
"create Java ByteBuffer object reference", __FUNCTION__);
_renderCritSect.Leave();
return;
} else {
_directBuffer = static_cast<unsigned char*>
(jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
_bitmapWidth = _bufferToRender.Width();
_bitmapHeight = _bufferToRender.Height();
}
}
if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
// Android requires a vertically flipped image compared to std convert.
// This is done by giving a negative height input.
const int conversionResult =
ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(),
_directBuffer, _bitmapWidth, -_bitmapHeight);
if (conversionResult < 0) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
" failed.", __FUNCTION__);
_renderCritSect.Leave();
return;
}
}
_renderCritSect.Leave();
// Draw the Surface
jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
#endif
}
} // namespace webrtc

View File

@ -0,0 +1,94 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
#include <jni.h>
#include "video_render_defines.h"
#include "video_render_android_impl.h"
namespace webrtc {
class CriticalSectionWrapper;
class AndroidSurfaceViewChannel: public AndroidStream
{
public:
AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
JavaVM* jvm,
VideoRenderAndroid& renderer,
jobject javaRenderObj);
~AndroidSurfaceViewChannel();
WebRtc_Word32 Init(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom);
//Implement VideoRenderCallback
virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
VideoFrame& videoFrame);
//Implements AndroidStream
virtual void DeliverFrame(JNIEnv* jniEnv);
private:
WebRtc_UWord32 _id;
CriticalSectionWrapper& _renderCritSect;
VideoFrame _bufferToRender;
VideoRenderAndroid& _renderer;
JavaVM* _jvm;
jobject _javaRenderObj;
#ifdef ANDROID_NDK_8_OR_ABOVE
jclass _javaBitmapClass;
jmethodID _createBitmapCid;
jobject _javaBitmapObj;
jmethodID _drawBitmapCid;
#else
jobject _javaByteBufferObj;
unsigned char* _directBuffer;
jmethodID _createByteBufferCid;
jmethodID _drawByteBufferCid;
#endif
jmethodID _setCoordinatesCid;
unsigned int _bitmapWidth;
unsigned int _bitmapHeight;
};
class AndroidSurfaceViewRenderer: private VideoRenderAndroid
{
public:
AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
const VideoRenderType videoRenderType,
void* window,
const bool fullscreen);
~AndroidSurfaceViewRenderer();
WebRtc_Word32 Init();
virtual AndroidStream* CreateAndroidRenderChannel(
WebRtc_Word32 streamId,
WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom,
VideoRenderAndroid& renderer);
private:
jobject _javaRenderObj;
jclass _javaRenderClass;
};
} //namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -139,7 +139,8 @@ WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
}
// set the vertices array in the shader
// _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5
* sizeof(GLfloat), _vertices);
checkGlError("glVertexAttribPointer aPosition");
@ -148,7 +149,8 @@ WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
checkGlError("glEnableVertexAttribArray positionHandle");
// set the texture coordinate array in the shader
// _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture
// _vertices contains 4 vertices with 5 coordinates.
// 3 for (xyz) for the vertices and 2 for the texture
glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
* sizeof(GLfloat), &_vertices[3]);
checkGlError("glVertexAttribPointer maTextureHandle");
@ -178,13 +180,14 @@ WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
}
/*
* SetCoordinates
* Sets the coordinates where the stream shall be rendered. Values must be between 0 and 1.
* Sets the coordinates where the stream shall be rendered.
* Values must be between 0 and 1.
*/
WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
const float left,
const float top,
const float right,
const float bottom)
const float left,
const float top,
const float right,
const float bottom)
{
if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1
|| bottom < 0) || (left > 1 || left < 0))
@ -344,16 +347,14 @@ void VideoRenderOpenGles20::printGLString(const char *name, GLenum s)
name, v);
}
void VideoRenderOpenGles20::checkGlError(const char* op)
{
void VideoRenderOpenGles20::checkGlError(const char* op) {
#ifdef ANDROID_LOG
for (GLint error = glGetError(); error; error
= glGetError())
{
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "after %s() glError (0x%x)\n", op, error);
}
for (GLint error = glGetError(); error; error = glGetError()) {
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
"after %s() glError (0x%x)\n", op, error);
}
#else
return;
return;
#endif
}
@ -443,4 +444,3 @@ void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender)
}
} //namespace webrtc

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -37,7 +37,8 @@ private:
void printGLString(const char *name, GLenum s);
void checkGlError(const char* op);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint createProgram(const char* pVertexSource, const char* pFragmentSource);
GLuint createProgram(const char* pVertexSource,
const char* pFragmentSource);
void SetupTextures(const VideoFrame& frameToRender);
void UpdateTextures(const VideoFrame& frameToRender);

View File

@ -38,10 +38,10 @@
'video_render_impl.h',
'i_video_render.h',
# Android
'Android/video_render_android_impl.h',
'Android/video_render_android_native_opengl2.h',
'Android/video_render_android_surface_view.h',
'Android/video_render_opengles20.h',
'android/video_render_android_impl.h',
'android/video_render_android_native_opengl2.h',
'android/video_render_android_surface_view.h',
'android/video_render_opengles20.h',
# Linux
'linux/video_render_linux_impl.h',
'linux/video_x11_channel.h',
@ -67,10 +67,10 @@
'video_render_impl.cc',
# PLATFORM SPECIFIC SOURCE FILES - Will be filtered below
# Android
'Android/video_render_android_impl.cc',
'Android/video_render_android_native_opengl2.cc',
'Android/video_render_android_surface_view.cc',
'Android/video_render_opengles20.cc',
'android/video_render_android_impl.cc',
'android/video_render_android_native_opengl2.cc',
'android/video_render_android_surface_view.cc',
'android/video_render_opengles20.cc',
# Linux
'linux/video_render_linux_impl.cc',
'linux/video_x11_channel.cc',
@ -100,14 +100,14 @@
['OS!="android" or include_internal_video_render==0', {
'sources!': [
# Android
'Android/video_render_android_impl.h',
'Android/video_render_android_native_opengl2.h',
'Android/video_render_android_surface_view.h',
'Android/video_render_opengles20.h',
'Android/video_render_android_impl.cc',
'Android/video_render_android_native_opengl2.cc',
'Android/video_render_android_surface_view.cc',
'Android/video_render_opengles20.cc',
'android/video_render_android_impl.h',
'android/video_render_android_native_opengl2.h',
'android/video_render_android_surface_view.h',
'android/video_render_opengles20.h',
'android/video_render_android_impl.cc',
'android/video_render_android_native_opengl2.cc',
'android/video_render_android_surface_view.cc',
'android/video_render_opengles20.cc',
],
}],
['OS!="linux" or include_internal_video_render==0', {

View File

@ -40,9 +40,9 @@
#endif
#elif defined(WEBRTC_ANDROID)
#include "Android/video_render_android_impl.h"
#include "Android/video_render_android_surface_view.h"
#include "Android/video_render_android_native_opengl2.h"
#include "android/video_render_android_impl.h"
#include "android/video_render_android_surface_view.h"
#include "android/video_render_android_native_opengl2.h"
#define STANDARD_RENDERING kRenderAndroid
#elif defined(WEBRTC_LINUX)
@ -978,4 +978,3 @@ WebRtc_Word32 ModuleVideoRenderImpl::MirrorRenderStream(const int renderId,
}
} //namespace webrtc