From ac9fd8af0964158842792daf9e6ab1ba3bc748a7 Mon Sep 17 00:00:00 2001 From: "leozwang@webrtc.org" Date: Mon, 19 Mar 2012 21:09:42 +0000 Subject: [PATCH] Change folder name from Android to android Review URL: https://webrtc-codereview.appspot.com/447012 git-svn-id: http://webrtc.googlecode.com/svn/trunk@1912 4adac7df-926f-26a2-2b94-8c16560cd09d --- .../video_capture/main/source/Android.mk | 8 +- .../source/Android/device_info_android.cc | 362 ----------- .../source/android/device_info_android.cc | 348 +++++++++++ .../device_info_android.h | 8 +- .../videoengine/CaptureCapabilityAndroid.java | 2 +- .../videoengine/VideoCaptureAndroid.java | 2 +- .../VideoCaptureDeviceInfoAndroid.java | 2 +- .../video_capture_android.cc | 109 ++-- .../video_capture_android.h | 6 +- .../main/source/video_capture.gypi | 10 +- .../video_render/main/source/Android.mk | 12 +- .../Android/video_render_android_impl.h | 162 ----- .../video_render_android_native_opengl2.h | 93 --- .../video_render_android_surface_view.cc | 470 --------------- .../video_render_android_surface_view.h | 96 --- .../webrtc/videoengine/ViEAndroidGLES20.java | 2 +- .../org/webrtc/videoengine/ViERenderer.java | 2 +- .../videoengine/ViESurfaceRenderer.java | 2 +- .../video_render_android_impl.cc | 221 ++++--- .../android/video_render_android_impl.h | 168 ++++++ .../video_render_android_native_opengl2.cc | 113 ++-- .../video_render_android_native_opengl2.h | 99 +++ .../video_render_android_surface_view.cc | 562 ++++++++++++++++++ .../video_render_android_surface_view.h | 94 +++ .../video_render_opengles20.cc | 34 +- .../video_render_opengles20.h | 5 +- .../main/source/video_render.gypi | 32 +- .../main/source/video_render_impl.cc | 7 +- 28 files changed, 1567 insertions(+), 1464 deletions(-) delete mode 100644 src/modules/video_capture/main/source/Android/device_info_android.cc create mode 100644 src/modules/video_capture/main/source/android/device_info_android.cc rename src/modules/video_capture/main/source/{Android => android}/device_info_android.h (91%) rename src/modules/video_capture/main/source/{Android => android}/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java (88%) rename src/modules/video_capture/main/source/{Android => android}/java/org/webrtc/videoengine/VideoCaptureAndroid.java (99%) rename src/modules/video_capture/main/source/{Android => android}/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java (99%) rename src/modules/video_capture/main/source/{Android => android}/video_capture_android.cc (88%) rename src/modules/video_capture/main/source/{Android => android}/video_capture_android.h (92%) delete mode 100644 src/modules/video_render/main/source/Android/video_render_android_impl.h delete mode 100644 src/modules/video_render/main/source/Android/video_render_android_native_opengl2.h delete mode 100644 src/modules/video_render/main/source/Android/video_render_android_surface_view.cc delete mode 100644 src/modules/video_render/main/source/Android/video_render_android_surface_view.h rename src/modules/video_render/main/source/{Android => android}/java/org/webrtc/videoengine/ViEAndroidGLES20.java (99%) rename src/modules/video_render/main/source/{Android => android}/java/org/webrtc/videoengine/ViERenderer.java (96%) rename src/modules/video_render/main/source/{Android => android}/java/org/webrtc/videoengine/ViESurfaceRenderer.java (98%) rename src/modules/video_render/main/source/{Android => android}/video_render_android_impl.cc (57%) create mode 100644 src/modules/video_render/main/source/android/video_render_android_impl.h rename src/modules/video_render/main/source/{Android => android}/video_render_android_native_opengl2.cc (79%) create mode 100644 src/modules/video_render/main/source/android/video_render_android_native_opengl2.h create mode 100644 src/modules/video_render/main/source/android/video_render_android_surface_view.cc create mode 100644 src/modules/video_render/main/source/android/video_render_android_surface_view.h rename src/modules/video_render/main/source/{Android => android}/video_render_opengles20.cc (93%) rename src/modules/video_render/main/source/{Android => android}/video_render_opengles20.h (91%) diff --git a/src/modules/video_capture/main/source/Android.mk b/src/modules/video_capture/main/source/Android.mk index 8976f1925..971f282b2 100644 --- a/src/modules/video_capture/main/source/Android.mk +++ b/src/modules/video_capture/main/source/Android.mk @@ -1,4 +1,4 @@ -# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. +# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source @@ -21,15 +21,15 @@ LOCAL_SRC_FILES := \ video_capture_impl.cc \ device_info_impl.cc \ video_capture_factory.cc \ - Android/video_capture_android.cc \ - Android/device_info_android.cc + android/video_capture_android.cc \ + android/device_info_android.cc # Flags passed to both C and C++ files. LOCAL_CFLAGS := \ $(MY_WEBRTC_COMMON_DEFS) LOCAL_C_INCLUDES := \ - $(LOCAL_PATH)/Android \ + $(LOCAL_PATH)/android \ $(LOCAL_PATH)/../interface \ $(LOCAL_PATH)/../source \ $(LOCAL_PATH)/../../../interface \ diff --git a/src/modules/video_capture/main/source/Android/device_info_android.cc b/src/modules/video_capture/main/source/Android/device_info_android.cc deleted file mode 100644 index 3e227161b..000000000 --- a/src/modules/video_capture/main/source/Android/device_info_android.cc +++ /dev/null @@ -1,362 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "device_info_android.h" - -#include - -#include "ref_count.h" -#include "trace.h" -#include "video_capture_android.h" - -namespace webrtc -{ -namespace videocapturemodule -{ -VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo ( - const WebRtc_Word32 id) -{ - videocapturemodule::DeviceInfoAndroid *deviceInfo = - new videocapturemodule::DeviceInfoAndroid(id); - if (deviceInfo && deviceInfo->Init() != 0) // Failed to init - { - delete deviceInfo; - deviceInfo = NULL; - } - return deviceInfo; -} - -DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) : - DeviceInfoImpl(id) -{ -} - -WebRtc_Word32 DeviceInfoAndroid::Init() -{ - return 0; -} - -DeviceInfoAndroid::~DeviceInfoAndroid() -{ -} - -WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices() -{ - - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - { - return 0; - } - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "%s GetMethodId", __FUNCTION__); - // get the method ID for the Android Java GetDeviceUniqueName name. - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, - "NumberOfDevices", - "()I"); - - jint numberOfDevices = 0; - if (cid != NULL) - { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "%s Calling Number of devices", __FUNCTION__); - numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid); - } - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - - if (numberOfDevices > 0) - return numberOfDevices; - return 0; -} - -WebRtc_Word32 DeviceInfoAndroid::GetDeviceName( - WebRtc_UWord32 deviceNumber, - char* deviceNameUTF8, - WebRtc_UWord32 deviceNameLength, - char* deviceUniqueIdUTF8, - WebRtc_UWord32 deviceUniqueIdUTF8Length, - char* /*productUniqueIdUTF8*/, - WebRtc_UWord32 /*productUniqueIdUTF8Length*/) -{ - - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - WebRtc_Word32 result = 0; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached)!= 0) - { - return -1; - } - - // get the method ID for the Android Java GetDeviceUniqueName name. - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName", - "(I)Ljava/lang/String;"); - if (cid != NULL) - { - - jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject, - cid, deviceNumber); - if (javaDeviceNameObj == NULL) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to get device name for device %d.", - __FUNCTION__, (int) deviceNumber); - result = -1; - } - else - { - jboolean isCopy; - const char* javaDeviceNameChar = env->GetStringUTFChars( - (jstring) javaDeviceNameObj - ,&isCopy); - const jsize javaDeviceNameCharLength = env->GetStringUTFLength( - (jstring) javaDeviceNameObj); - if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceUniqueIdUTF8Length) - { - memcpy(deviceUniqueIdUTF8, - javaDeviceNameChar, - javaDeviceNameCharLength + 1); - } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, "%s: deviceUniqueIdUTF8 to short.", - __FUNCTION__); - result = -1; - } - if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength) - { - memcpy(deviceNameUTF8, - javaDeviceNameChar, - javaDeviceNameCharLength + 1); - } - env->ReleaseStringUTFChars((jstring) javaDeviceNameObj, - javaDeviceNameChar); - }//javaDeviceNameObj==NULL - - } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find GetDeviceUniqueName function id", - __FUNCTION__); - result = -1; - } - - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: result %d", __FUNCTION__, (int) result); - return result; - -} - -WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( - const char* deviceUniqueIdUTF8) -{ - - MapItem* item = NULL; - while ((item = _captureCapabilities.Last())) - { - delete (VideoCaptureCapability*) item->GetItem(); - _captureCapabilities.Erase(item); - } - - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - { - return -1; - } - - // Find the capability class - jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass); - if (javaCapClassLocal == NULL) - { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't find java class VideoCaptureCapabilityAndroid.", - __FUNCTION__); - return -1; - } - - // get the method ID for the Android Java GetCapabilityArray . - char signature[256]; - sprintf(signature, - "(Ljava/lang/String;)[L%s;", - AndroidJavaCaptureCapabilityClass); - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, - "GetCapabilityArray", - signature); - if (cid == NULL) - { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't find method GetCapabilityArray.", __FUNCTION__); - return -1; - } - // Create a jstring so we can pass the deviceUniquName to the java method. - jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); - - if (capureIdString == NULL) - { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't create string for method GetCapabilityArray.", - __FUNCTION__); - return -1; - } - // Call the java class and get an array with capabilities back. - jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject, - cid, capureIdString); - if (!javaCapabilitiesObj) - { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to call java GetCapabilityArray.", - __FUNCTION__); - return -1; - } - - jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I"); - jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I"); - jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I"); - if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) - { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to get field Id.", __FUNCTION__); - return -1; - } - - const jsize numberOfCapabilities = - env->GetArrayLength((jarray) javaCapabilitiesObj); - - for (jsize i = 0; i < numberOfCapabilities; ++i) - { - VideoCaptureCapability *cap = new VideoCaptureCapability(); - jobject capabilityElement = env->GetObjectArrayElement( - (jobjectArray) javaCapabilitiesObj, - i); - - cap->width = env->GetIntField(capabilityElement, widthField); - cap->height = env->GetIntField(capabilityElement, heigtField); - cap->expectedCaptureDelay = _expectedCaptureDelay; - cap->rawType = kVideoNV21; - cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Cap width %d, height %d, fps %d", __FUNCTION__, - cap->width, cap->height, cap->maxFPS); - _captureCapabilities.Insert(i, cap); - } - - _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8); - _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName, - _lastUsedDeviceNameLength + 1); - memcpy(_lastUsedDeviceName, - deviceUniqueIdUTF8, - _lastUsedDeviceNameLength + 1); - - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, - "CreateCapabilityMap %d", _captureCapabilities.Size()); - - return _captureCapabilities.Size(); -} - -WebRtc_Word32 DeviceInfoAndroid::GetOrientation( - const char* deviceUniqueIdUTF8, - VideoCaptureRotation& orientation) -{ - - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - { - return -1; - } - - // get the method ID for the Android Java GetOrientation . - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation", - "(Ljava/lang/String;)I"); - if (cid == NULL) - { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't find method GetOrientation.", __FUNCTION__); - return -1; - } - // Create a jstring so we can pass the deviceUniquName to the java method. - jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); - if (capureIdString == NULL) - { - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't create string for method GetCapabilityArray.", - __FUNCTION__); - return -1; - } - // Call the java class and get the orientation. - jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid, - capureIdString); - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - - WebRtc_Word32 retValue = 0; - switch (jorientation) - { - case -1: //Error - orientation = kCameraRotate0; - retValue = -1; - break; - case 0: - orientation = kCameraRotate0; - break; - case 90: - orientation = kCameraRotate90; - break; - case 180: - orientation = kCameraRotate180; - break; - case 270: - orientation = kCameraRotate270; - break; - case 360: - orientation = kCameraRotate0; - break; - } - return retValue; -} -} // namespace videocapturemodule -} // namespace webrtc diff --git a/src/modules/video_capture/main/source/android/device_info_android.cc b/src/modules/video_capture/main/source/android/device_info_android.cc new file mode 100644 index 000000000..9d11b85c2 --- /dev/null +++ b/src/modules/video_capture/main/source/android/device_info_android.cc @@ -0,0 +1,348 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "device_info_android.h" + +#include + +#include "ref_count.h" +#include "trace.h" +#include "video_capture_android.h" + +namespace webrtc +{ +namespace videocapturemodule +{ +VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo ( + const WebRtc_Word32 id) +{ + videocapturemodule::DeviceInfoAndroid *deviceInfo = + new videocapturemodule::DeviceInfoAndroid(id); + if (deviceInfo && deviceInfo->Init() != 0) // Failed to init + { + delete deviceInfo; + deviceInfo = NULL; + } + return deviceInfo; +} + +DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) : + DeviceInfoImpl(id) +{ +} + +WebRtc_Word32 DeviceInfoAndroid::Init() +{ + return 0; +} + +DeviceInfoAndroid::~DeviceInfoAndroid() +{ +} + +WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices() +{ + + JNIEnv *env; + jclass javaCmDevInfoClass; + jobject javaCmDevInfoObject; + bool attached = false; + if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( + env, + javaCmDevInfoClass, + javaCmDevInfoObject, + attached) != 0) + { + return 0; + } + + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, + "%s GetMethodId", __FUNCTION__); + // get the method ID for the Android Java GetDeviceUniqueName name. + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, + "NumberOfDevices", + "()I"); + + jint numberOfDevices = 0; + if (cid != NULL) + { + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, + "%s Calling Number of devices", __FUNCTION__); + numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid); + } + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + + if (numberOfDevices > 0) + return numberOfDevices; + return 0; +} + +WebRtc_Word32 DeviceInfoAndroid::GetDeviceName( + WebRtc_UWord32 deviceNumber, + char* deviceNameUTF8, + WebRtc_UWord32 deviceNameLength, + char* deviceUniqueIdUTF8, + WebRtc_UWord32 deviceUniqueIdUTF8Length, + char* /*productUniqueIdUTF8*/, + WebRtc_UWord32 /*productUniqueIdUTF8Length*/) { + + JNIEnv *env; + jclass javaCmDevInfoClass; + jobject javaCmDevInfoObject; + WebRtc_Word32 result = 0; + bool attached = false; + if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( + env, + javaCmDevInfoClass, + javaCmDevInfoObject, + attached)!= 0) + { + return -1; + } + + // get the method ID for the Android Java GetDeviceUniqueName name. + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName", + "(I)Ljava/lang/String;"); + if (cid != NULL) + { + + jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject, + cid, deviceNumber); + if (javaDeviceNameObj == NULL) + { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Failed to get device name for device %d.", + __FUNCTION__, (int) deviceNumber); + result = -1; + } + else + { + jboolean isCopy; + const char* javaDeviceNameChar = env->GetStringUTFChars( + (jstring) javaDeviceNameObj + ,&isCopy); + const jsize javaDeviceNameCharLength = + env->GetStringUTFLength((jstring) javaDeviceNameObj); + if ((WebRtc_UWord32) javaDeviceNameCharLength < + deviceUniqueIdUTF8Length) { + memcpy(deviceUniqueIdUTF8, + javaDeviceNameChar, + javaDeviceNameCharLength + 1); + } + else + { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, + _id, "%s: deviceUniqueIdUTF8 to short.", + __FUNCTION__); + result = -1; + } + if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength) + { + memcpy(deviceNameUTF8, + javaDeviceNameChar, + javaDeviceNameCharLength + 1); + } + env->ReleaseStringUTFChars((jstring) javaDeviceNameObj, + javaDeviceNameChar); + }//javaDeviceNameObj==NULL + + } + else + { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to find GetDeviceUniqueName function id", + __FUNCTION__); + result = -1; + } + + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: result %d", __FUNCTION__, (int) result); + return result; + +} + +WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( + const char* deviceUniqueIdUTF8) +{ + MapItem* item = NULL; + while ((item = _captureCapabilities.Last())) { + delete (VideoCaptureCapability*) item->GetItem(); + _captureCapabilities.Erase(item); + } + + JNIEnv *env; + jclass javaCmDevInfoClass; + jobject javaCmDevInfoObject; + bool attached = false; + if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( + env, + javaCmDevInfoClass, + javaCmDevInfoObject, + attached) != 0) { + return -1; + } + + // Find the capability class + jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass); + if (javaCapClassLocal == NULL) { + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Can't find java class VideoCaptureCapabilityAndroid.", + __FUNCTION__); + return -1; + } + + // get the method ID for the Android Java GetCapabilityArray . + char signature[256]; + sprintf(signature, + "(Ljava/lang/String;)[L%s;", + AndroidJavaCaptureCapabilityClass); + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, + "GetCapabilityArray", + signature); + if (cid == NULL) { + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Can't find method GetCapabilityArray.", __FUNCTION__); + return -1; + } + // Create a jstring so we can pass the deviceUniquName to the java method. + jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); + + if (capureIdString == NULL) { + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Can't create string for method GetCapabilityArray.", + __FUNCTION__); + return -1; + } + // Call the java class and get an array with capabilities back. + jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject, + cid, capureIdString); + if (!javaCapabilitiesObj) { + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Failed to call java GetCapabilityArray.", + __FUNCTION__); + return -1; + } + + jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I"); + jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I"); + jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I"); + if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) { + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Failed to get field Id.", __FUNCTION__); + return -1; + } + + const jsize numberOfCapabilities = + env->GetArrayLength((jarray) javaCapabilitiesObj); + + for (jsize i = 0; i < numberOfCapabilities; ++i) { + VideoCaptureCapability *cap = new VideoCaptureCapability(); + jobject capabilityElement = env->GetObjectArrayElement( + (jobjectArray) javaCapabilitiesObj, + i); + + cap->width = env->GetIntField(capabilityElement, widthField); + cap->height = env->GetIntField(capabilityElement, heigtField); + cap->expectedCaptureDelay = _expectedCaptureDelay; + cap->rawType = kVideoNV21; + cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Cap width %d, height %d, fps %d", __FUNCTION__, + cap->width, cap->height, cap->maxFPS); + _captureCapabilities.Insert(i, cap); + } + + _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8); + _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName, + _lastUsedDeviceNameLength + 1); + memcpy(_lastUsedDeviceName, + deviceUniqueIdUTF8, + _lastUsedDeviceNameLength + 1); + + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, + "CreateCapabilityMap %d", _captureCapabilities.Size()); + + return _captureCapabilities.Size(); +} + +WebRtc_Word32 DeviceInfoAndroid::GetOrientation( + const char* deviceUniqueIdUTF8, + VideoCaptureRotation& orientation) +{ + JNIEnv *env; + jclass javaCmDevInfoClass; + jobject javaCmDevInfoObject; + bool attached = false; + if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( + env, + javaCmDevInfoClass, + javaCmDevInfoObject, + attached) != 0) { + return -1; + } + + // get the method ID for the Android Java GetOrientation . + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation", + "(Ljava/lang/String;)I"); + if (cid == NULL) { + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Can't find method GetOrientation.", __FUNCTION__); + return -1; + } + // Create a jstring so we can pass the deviceUniquName to the java method. + jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); + if (capureIdString == NULL) { + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Can't create string for method GetCapabilityArray.", + __FUNCTION__); + return -1; + } + // Call the java class and get the orientation. + jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid, + capureIdString); + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + + WebRtc_Word32 retValue = 0; + switch (jorientation) { + case -1: //Error + orientation = kCameraRotate0; + retValue = -1; + break; + case 0: + orientation = kCameraRotate0; + break; + case 90: + orientation = kCameraRotate90; + break; + case 180: + orientation = kCameraRotate180; + break; + case 270: + orientation = kCameraRotate270; + break; + case 360: + orientation = kCameraRotate0; + break; + } + return retValue; +} + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/src/modules/video_capture/main/source/Android/device_info_android.h b/src/modules/video_capture/main/source/android/device_info_android.h similarity index 91% rename from src/modules/video_capture/main/source/Android/device_info_android.h rename to src/modules/video_capture/main/source/android/device_info_android.h index 5125d960a..8e02b754a 100644 --- a/src/modules/video_capture/main/source/Android/device_info_android.h +++ b/src/modules/video_capture/main/source/android/device_info_android.h @@ -23,9 +23,11 @@ namespace webrtc namespace videocapturemodule { -// Android logging, uncomment to print trace to logcat instead of trace file/callback -//#include -//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) +// Android logging, uncomment to print trace to +// logcat instead of trace file/callback +// #include +// #define WEBRTC_TRACE(a,b,c,...) +// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) class DeviceInfoAndroid: public DeviceInfoImpl { diff --git a/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java similarity index 88% rename from src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java rename to src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java index 044395367..0cfe45730 100644 --- a/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java +++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source diff --git a/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java similarity index 99% rename from src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java rename to src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java index a4c39a8c2..102916288 100644 --- a/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureAndroid.java +++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source diff --git a/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java similarity index 99% rename from src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java rename to src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java index 4ccf06013..e8f63f041 100644 --- a/src/modules/video_capture/main/source/Android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java +++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source diff --git a/src/modules/video_capture/main/source/Android/video_capture_android.cc b/src/modules/video_capture/main/source/android/video_capture_android.cc similarity index 88% rename from src/modules/video_capture/main/source/Android/video_capture_android.cc rename to src/modules/video_capture/main/source/android/video_capture_android.cc index 7694716df..f73837dc9 100644 --- a/src/modules/video_capture/main/source/Android/video_capture_android.cc +++ b/src/modules/video_capture/main/source/android/video_capture_android.cc @@ -35,15 +35,20 @@ VideoCaptureModule* VideoCaptureImpl::Create( return implementation; } -// Android logging, uncomment to print trace to logcat instead of trace file/callback -//#include -//#undef WEBRTC_TRACE -//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) +// Android logging, uncomment to print trace to +// logcat instead of trace file/callback +// #include +// #undef WEBRTC_TRACE +// #define WEBRTC_TRACE(a,b,c,...) +// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) JavaVM* VideoCaptureAndroid::g_jvm = NULL; -jclass VideoCaptureAndroid::g_javaCmClass = NULL; //VideoCaptureAndroid.java -jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL; //VideoCaptureDeviceInfoAndroid.java -jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL; //static instance of VideoCaptureDeviceInfoAndroid.java +//VideoCaptureAndroid.java +jclass VideoCaptureAndroid::g_javaCmClass = NULL; +//VideoCaptureDeviceInfoAndroid.java +jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL; +//static instance of VideoCaptureDeviceInfoAndroid.java +jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL; jobject VideoCaptureAndroid::g_javaContext = NULL; /* @@ -73,7 +78,8 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, "%s: could not find java class", __FUNCTION__); return -1; } - // create a global reference to the class (to tell JNI that we are referencing it + // create a global reference to the class + // (to tell JNI that we are referencing it // after this function has returned) g_javaCmClass = static_cast (env->NewGlobalRef(javaCmClassLocal)); @@ -112,7 +118,8 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, return -1; } - // create a global reference to the class (to tell JNI that we are referencing it + // create a global reference to the class + // (to tell JNI that we are referencing it // after this function has returned) g_javaCmDevInfoClass = static_cast (env->NewGlobalRef(javaCmDevInfoClassLocal)); @@ -132,14 +139,16 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, // get the method ID for the Android Java CaptureClass static //CreateVideoCaptureAndroid factory method. - jmethodID cid = env->GetStaticMethodID(g_javaCmDevInfoClass, - "CreateVideoCaptureDeviceInfoAndroid", - "(ILandroid/content/Context;)" - "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;"); + jmethodID cid = env->GetStaticMethodID( + g_javaCmDevInfoClass, + "CreateVideoCaptureDeviceInfoAndroid", + "(ILandroid/content/Context;)" + "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;"); if (cid == NULL) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not get java VideoCaptureDeviceInfoAndroid constructor ID", + "%s: could not get java" + "VideoCaptureDeviceInfoAndroid constructor ID", __FUNCTION__); return -1; } @@ -148,10 +157,10 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, "%s: construct static java device object", __FUNCTION__); // construct the object by calling the static constructor object - jobject javaCameraDeviceInfoObjLocal = env->CallStaticObjectMethod( - g_javaCmDevInfoClass, - cid, (int) -1, - g_javaContext); + jobject javaCameraDeviceInfoObjLocal = + env->CallStaticObjectMethod(g_javaCmDevInfoClass, + cid, (int) -1, + g_javaContext); if (!javaCameraDeviceInfoObjLocal) { WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, @@ -159,13 +168,16 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, __FUNCTION__); return -1; } - // create a reference to the object (to tell JNI that we are referencing it - // after this function has returned) + // create a reference to the object (to tell JNI that + // we are referencing it after this function has returned) g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal); if (!g_javaCmDevInfoObject) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1, - "%s: could not create Java cameradevinceinfo object reference", + WEBRTC_TRACE(webrtc::kTraceError, + webrtc::kTraceAudioDevice, + -1, + "%s: could not create Java" + "cameradevinceinfo object reference", __FUNCTION__); return -1; } @@ -216,10 +228,10 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, } WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - JNIEnv*& env, - jclass& javaCmDevInfoClass, - jobject& javaCmDevInfoObject, - bool& attached) + JNIEnv*& env, + jclass& javaCmDevInfoClass, + jobject& javaCmDevInfoObject, + bool& attached) { // get the JNI env for this thread if (!g_jvm) @@ -250,19 +262,19 @@ WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( } -WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(bool attached) -{ - if (attached && g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, - "%s: Could not detach thread from JVM", __FUNCTION__); - return -1; - } - return 0; +WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects( + bool attached) { + if (attached && g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, + "%s: Could not detach thread from JVM", __FUNCTION__); + return -1; + } + return 0; } /* - * JNI callback from Java class. Called when the camera has a new frame to deliver + * JNI callback from Java class. Called + * when the camera has a new frame to deliver * Class: org_webrtc_capturemodule_VideoCaptureAndroid * Method: ProvideCameraFrame * Signature: ([BIJ)V @@ -273,7 +285,8 @@ void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env, jint length, jlong context) { - VideoCaptureAndroid* captureModule=reinterpret_cast(context); + VideoCaptureAndroid* captureModule = + reinterpret_cast(context); WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, -1, "%s: IncomingFrame %d", __FUNCTION__,length); jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL); @@ -313,8 +326,11 @@ WebRtc_Word32 VideoCaptureAndroid::Init(const WebRtc_Word32 id, if (_capInfo.Init() != 0) { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to initialize CaptureDeviceInfo", __FUNCTION__); + WEBRTC_TRACE(webrtc::kTraceError, + webrtc::kTraceVideoCapture, + _id, + "%s: Failed to initialize CaptureDeviceInfo", + __FUNCTION__); return -1; } @@ -350,7 +366,8 @@ WebRtc_Word32 VideoCaptureAndroid::Init(const WebRtc_Word32 id, WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "get method id"); - // get the method ID for the Android Java CaptureDeviceInfoClass AllocateCamera factory method. + // get the method ID for the Android Java + // CaptureDeviceInfoClass AllocateCamera factory method. char signature[256]; sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass); @@ -438,9 +455,10 @@ VideoCaptureAndroid::~VideoCaptureAndroid() // get the method ID for the Android Java CaptureClass static // DeleteVideoCaptureAndroid method. Call this to release the camera so // another application can use it. - jmethodID cid = env->GetStaticMethodID(g_javaCmClass, - "DeleteVideoCaptureAndroid", - "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); + jmethodID cid = env->GetStaticMethodID( + g_javaCmClass, + "DeleteVideoCaptureAndroid", + "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); if (cid != NULL) { WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, @@ -473,7 +491,7 @@ VideoCaptureAndroid::~VideoCaptureAndroid() } WebRtc_Word32 VideoCaptureAndroid::StartCapture( - const VideoCaptureCapability& capability) + const VideoCaptureCapability& capability) { CriticalSectionScoped cs(&_apiCs); WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, @@ -516,7 +534,8 @@ WebRtc_Word32 VideoCaptureAndroid::StartCapture( "%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width, _frameInfo.height); - // get the method ID for the Android Java CaptureClass static StartCapture method. + // get the method ID for the Android Java + // CaptureClass static StartCapture method. jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I"); if (cid != NULL) { diff --git a/src/modules/video_capture/main/source/Android/video_capture_android.h b/src/modules/video_capture/main/source/android/video_capture_android.h similarity index 92% rename from src/modules/video_capture/main/source/Android/video_capture_android.h rename to src/modules/video_capture/main/source/android/video_capture_android.h index 1ea70cfd2..fc72323bc 100644 --- a/src/modules/video_capture/main/source/Android/video_capture_android.h +++ b/src/modules/video_capture/main/source/android/video_capture_android.h @@ -36,7 +36,8 @@ public: const char* deviceUniqueIdUTF8); - virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability); + virtual WebRtc_Word32 StartCapture( + const VideoCaptureCapability& capability); virtual WebRtc_Word32 StopCapture(); virtual bool CaptureStarted(); virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings); @@ -56,7 +57,8 @@ protected: static JavaVM* g_jvm; static jclass g_javaCmClass; static jclass g_javaCmDevInfoClass; - static jobject g_javaCmDevInfoObject; //Static java object implementing the needed device info functions; + //Static java object implementing the needed device info functions; + static jobject g_javaCmDevInfoObject; static jobject g_javaContext; // Java Application context }; } // namespace videocapturemodule diff --git a/src/modules/video_capture/main/source/video_capture.gypi b/src/modules/video_capture/main/source/video_capture.gypi index de696aeb9..99a4937fc 100644 --- a/src/modules/video_capture/main/source/video_capture.gypi +++ b/src/modules/video_capture/main/source/video_capture.gypi @@ -186,13 +186,13 @@ }], # win ['OS=="android"', { 'include_dirs': [ - 'Android', + 'android', ], 'sources': [ - 'Android/device_info_android.cc', - 'Android/device_info_android.h', - 'Android/video_capture_android.cc', - 'Android/video_capture_android.h', + 'android/device_info_android.cc', + 'android/device_info_android.h', + 'android/video_capture_android.cc', + 'android/video_capture_android.h', ], }], # android ], # conditions diff --git a/src/modules/video_render/main/source/Android.mk b/src/modules/video_render/main/source/Android.mk index 73eec93c3..c11a89c0b 100644 --- a/src/modules/video_render/main/source/Android.mk +++ b/src/modules/video_render/main/source/Android.mk @@ -1,4 +1,4 @@ -# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. +# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source @@ -21,10 +21,10 @@ LOCAL_SRC_FILES := \ video_render_frames.cc \ video_render_impl.cc \ external/video_render_external_impl.cc \ - Android/video_render_android_impl.cc \ - Android/video_render_android_native_opengl2.cc \ - Android/video_render_android_surface_view.cc \ - Android/video_render_opengles20.cc + android/video_render_android_impl.cc \ + android/video_render_android_native_opengl2.cc \ + android/video_render_android_surface_view.cc \ + android/video_render_opengles20.cc # Flags passed to both C and C++ files. LOCAL_CFLAGS := \ @@ -33,7 +33,7 @@ LOCAL_CFLAGS := \ LOCAL_C_INCLUDES := \ $(LOCAL_PATH) \ - $(LOCAL_PATH)/Android \ + $(LOCAL_PATH)/android \ $(LOCAL_PATH)/../interface \ $(LOCAL_PATH)/../../../.. \ $(LOCAL_PATH)/../../../audio_coding/main/interface \ diff --git a/src/modules/video_render/main/source/Android/video_render_android_impl.h b/src/modules/video_render/main/source/Android/video_render_android_impl.h deleted file mode 100644 index 705887126..000000000 --- a/src/modules/video_render/main/source/Android/video_render_android_impl.h +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ -#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ - -#include -#include "i_video_render.h" -#include "map_wrapper.h" - - -namespace webrtc { - -//#define ANDROID_LOG - - -class CriticalSectionWrapper; -class EventWrapper; -class ThreadWrapper; - - -// The object a module user uses to send new frames to the java renderer -// Base class for android render streams. - -class AndroidStream: public VideoRenderCallback -{ -public: - /* - * DeliverFrame is called from a thread connected to the Java VM. - * Used for Delivering frame for rendering. - */ - virtual void DeliverFrame(JNIEnv* jniEnv)=0; - - virtual ~AndroidStream() - { - }; -}; - -class VideoRenderAndroid: IVideoRender -{ -public: - static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM); - - VideoRenderAndroid(const WebRtc_Word32 id, - const VideoRenderType videoRenderType, void* window, - const bool fullscreen); - - virtual ~VideoRenderAndroid(); - - virtual WebRtc_Word32 Init()=0; - - virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id); - - virtual WebRtc_Word32 ChangeWindow(void* window); - - virtual VideoRenderCallback - * AddIncomingRenderStream(const WebRtc_UWord32 streamId, - const WebRtc_UWord32 zOrder, - const float left, const float top, - const float right, const float bottom); - - virtual WebRtc_Word32 - DeleteIncomingRenderStream(const WebRtc_UWord32 streamId); - - virtual WebRtc_Word32 - GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId, - WebRtc_UWord32& zOrder, - float& left, float& top, - float& right, float& bottom) const; - virtual WebRtc_Word32 StartRender(); - - virtual WebRtc_Word32 StopRender(); - - virtual void ReDraw(); - - /************************************************************************** - * - * Properties - * - ***************************************************************************/ - - virtual VideoRenderType RenderType(); - - virtual RawVideoType PerferedVideoType(); - - virtual bool FullScreen(); - - virtual WebRtc_Word32 - GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory, - WebRtc_UWord64& availableGraphicsMemory) const; - - virtual WebRtc_Word32 - GetScreenResolution(WebRtc_UWord32& screenWidth, - WebRtc_UWord32& screenHeight) const; - - virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId); - - virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId, - const float left, const float top, - const float right, - const float bottom); - - virtual WebRtc_Word32 SetTransparentBackground(const bool enable); - - virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId, - const unsigned int zOrder, - const float left, const float top, - const float right, - const float bottom); - - virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId, - const WebRtc_UWord8* text, - const WebRtc_Word32 textLength, - const WebRtc_UWord32 textColorRef, - const WebRtc_UWord32 backgroundColorRef, - const float left, const float top, - const float rigth, const float bottom); - - virtual WebRtc_Word32 SetBitmap(const void* bitMap, - const WebRtc_UWord8 pictureId, - const void* colorKey, const float left, - const float top, const float right, - const float bottom); - -protected: - virtual AndroidStream - * CreateAndroidRenderChannel(WebRtc_Word32 streamId, - WebRtc_Word32 zOrder, - const float left, const float top, - const float right, const float bottom, - VideoRenderAndroid& renderer) = 0; - - WebRtc_Word32 _id; - CriticalSectionWrapper& _critSect; - VideoRenderType _renderType; - jobject _ptrWindow; - - static JavaVM* g_jvm; - -private: - static bool JavaRenderThreadFun(void* obj); - bool JavaRenderThreadProcess(); - - MapWrapper _streamsMap; // Map with streams to render. - bool _javaShutDownFlag; // True if the _javaRenderThread thread shall be detached from the JVM. - EventWrapper& _javaShutdownEvent; - EventWrapper& _javaRenderEvent; - WebRtc_Word64 _lastJavaRenderEvent; - JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread. - ThreadWrapper* _javaRenderThread; -}; - -} //namespace webrtc - -#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ diff --git a/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.h b/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.h deleted file mode 100644 index 54532a6bc..000000000 --- a/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.h +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ -#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ - -#include - -#include "video_render_defines.h" - -#include "video_render_android_impl.h" -#include "video_render_opengles20.h" - -namespace webrtc { -class CriticalSectionWrapper; - -class AndroidNativeOpenGl2Channel: public AndroidStream -{ - -public: - AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj); - ~AndroidNativeOpenGl2Channel(); - - WebRtc_Word32 Init(WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom); - - //Implement VideoRenderCallback - virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame); - - //Implements AndroidStream - virtual void DeliverFrame(JNIEnv* jniEnv); - -private: - static jint CreateOpenGLNativeStatic(JNIEnv * env,jobject, jlong context, jint width, jint height); - jint CreateOpenGLNative(int width, int height); - - static void DrawNativeStatic(JNIEnv * env,jobject, jlong context); - void DrawNative(); - WebRtc_UWord32 _id; - CriticalSectionWrapper& _renderCritSect; - - VideoFrame _bufferToRender; - VideoRenderAndroid& _renderer; - JavaVM* _jvm; - jobject _javaRenderObj; - - jmethodID _redrawCid; - jmethodID _registerNativeCID; - jmethodID _deRegisterNativeCID; - VideoRenderOpenGles20 _openGLRenderer; - -}; - - -class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid -{ -public: - AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id, - const VideoRenderType videoRenderType, - void* window, - const bool fullscreen); - - ~AndroidNativeOpenGl2Renderer(); - static bool UseOpenGL2(void* window); - - WebRtc_Word32 Init(); - virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId, - WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom, - VideoRenderAndroid& renderer); - -private: - jobject _javaRenderObj; - jclass _javaRenderClass; - -}; - -} //namespace webrtc - -#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ diff --git a/src/modules/video_render/main/source/Android/video_render_android_surface_view.cc b/src/modules/video_render/main/source/Android/video_render_android_surface_view.cc deleted file mode 100644 index 253d831ee..000000000 --- a/src/modules/video_render/main/source/Android/video_render_android_surface_view.cc +++ /dev/null @@ -1,470 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video_render_android_surface_view.h" -#include "critical_section_wrapper.h" -#include "common_video/libyuv/include/libyuv.h" -#include "tick_util.h" -#ifdef ANDROID_NDK_8_OR_ABOVE - #include -#endif - - -#ifdef ANDROID_LOG -#include -#include - -#undef WEBRTC_TRACE -#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__) -#else -#include "trace.h" -#endif - -namespace webrtc { - -AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id, - const VideoRenderType videoRenderType, - void* window, - const bool fullscreen) -: - VideoRenderAndroid(id,videoRenderType,window,fullscreen), - _javaRenderObj(NULL), - _javaRenderClass(NULL) -{ -} - -AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() -{ - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewRenderer dtor"); - if(g_jvm) - { - // get the JNI env for this thread - bool isAttached = false; - JNIEnv* env = NULL; - if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env); - env=NULL; - } - else - { - isAttached = true; - } - } - env->DeleteGlobalRef(_javaRenderObj); - env->DeleteGlobalRef(_javaRenderClass); - - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - } -} - - -WebRtc_Word32 -AndroidSurfaceViewRenderer::Init() -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); - if (!g_jvm) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "(%s): Not a valid Java VM pointer.", __FUNCTION__); - return -1; - } - if(!_ptrWindow) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "(%s): No window have been provided.", __FUNCTION__); - return -1; - } - - // get the JNI env for this thread - bool isAttached = false; - JNIEnv* env = NULL; - if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env); - return -1; - } - isAttached = true; - } - - // get the ViESurfaceRender class - jclass javaRenderClassLocal = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer"); - if (!javaRenderClassLocal) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__); - return -1; - } - - // create a global reference to the class (to tell JNI that we are referencing it after this function has returned) - _javaRenderClass = reinterpret_cast(env->NewGlobalRef(javaRenderClassLocal)); - if (!_javaRenderClass) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java ViESurfaceRenderer class reference", __FUNCTION__); - return -1; - } - - // Delete local class ref, we only use the global ref - env->DeleteLocalRef(javaRenderClassLocal); - - // get the method ID for the constructor - jmethodID cid = env->GetMethodID(_javaRenderClass, "", "(Landroid/view/SurfaceView;)V"); - if (cid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get constructor ID", __FUNCTION__); - return -1; /* exception thrown */ - } - - // construct the object - jobject javaRenderObjLocal = env->NewObject(_javaRenderClass, cid, _ptrWindow); - if (!javaRenderObjLocal) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java Render", __FUNCTION__); - return -1; - } - - // create a reference to the object (to tell JNI that we are referencing it - // after this function has returned) - _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal); - if (!_javaRenderObj) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not create Java SurfaceRender object reference", __FUNCTION__); - return -1; - } - - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__); - return 0; - -} -AndroidStream* -AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(WebRtc_Word32 streamId, - WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom, - VideoRenderAndroid& renderer) -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", __FUNCTION__,streamId); - AndroidSurfaceViewChannel* stream=new AndroidSurfaceViewChannel(streamId,g_jvm,renderer,_javaRenderObj); - if(stream && stream->Init(zOrder,left,top,right,bottom)==0) - return stream; - else - delete stream; - return NULL; -} - - - - - - -AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,JavaVM* jvm,VideoRenderAndroid& renderer,jobject javaRenderObj) -: -_id(streamId), -_renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()), -_renderer(renderer), -_jvm(jvm), -_javaRenderObj(javaRenderObj), -_bitmapWidth(0), -_bitmapHeight(0) -{ - -} -AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() -{ - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "AndroidSurfaceViewChannel dtor"); - delete &_renderCritSect; - if(_jvm) - { - // get the JNI env for this thread - bool isAttached = false; - JNIEnv* env = NULL; - if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _jvm->AttachCurrentThread(&env, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env); - env=NULL; - } - else - { - isAttached = true; - } - } - -#ifdef ANDROID_NDK_8_OR_ABOVE - env->DeleteGlobalRef(_javaBitmapObj); -#else - env->DeleteGlobalRef(_javaByteBufferObj); -#endif - if (isAttached) - { - if (_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - } -} - -WebRtc_Word32 -AndroidSurfaceViewChannel::Init(WebRtc_Word32 /*zOrder*/, - const float left, - const float top, - const float right, - const float bottom) -{ - - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel", __FUNCTION__); - if (!_jvm) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer,_id, "%s: Not a valid Java VM pointer", __FUNCTION__); - return -1; - } - - if((top>1 || top<0) || (right>1 || right<0) || (bottom>1 || bottom<0) || (left>1 || left<0)) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Wrong coordinates", - __FUNCTION__); - return -1; - } - - - // get the JNI env for this thread - bool isAttached = false; - JNIEnv* env = NULL; - if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _jvm->AttachCurrentThread(&env, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not attach thread to JVM (%d, %p)", __FUNCTION__, res, env); - return -1; - } - isAttached = true; - } - - jclass javaRenderClass = env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer"); - if (!javaRenderClass) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not find ViESurfaceRenderer", __FUNCTION__); - return -1; - } -#ifdef ANDROID_NDK_8_OR_ABOVE - // get the method ID for the CreateBitmap - _createBitmapCid = env->GetMethodID(_javaRenderClass, "CreateBitmap", "(II)Landroid/graphics/Bitmap;"); - if (_createBitmapCid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateBitmap ID", __FUNCTION__); - return -1; /* exception thrown */ - } - // get the method ID for the DrawBitmap function - _drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V"); - if (_drawBitmapCid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawBitmap ID", __FUNCTION__); - return -1; /* exception thrown */ - } -#else - // get the method ID for the CreateIntArray - _createByteBufferCid = env->GetMethodID(javaRenderClass, "CreateByteBuffer", "(II)Ljava/nio/ByteBuffer;"); - if (_createByteBufferCid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get CreateByteBuffer ID", __FUNCTION__); - return -1; /* exception thrown */ - } - - // get the method ID for the DrawByteBuffer function - _drawByteBufferCid = env->GetMethodID(javaRenderClass, "DrawByteBuffer", "()V"); - if (_drawByteBufferCid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get DrawByteBuffer ID", __FUNCTION__); - return -1; /* exception thrown */ - } -#endif - - // get the method ID for the SetCoordinates function - _setCoordinatesCid = env->GetMethodID(javaRenderClass, "SetCoordinates", "(FFFF)V"); - if (_setCoordinatesCid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not get SetCoordinates ID", __FUNCTION__); - return -1; /* exception thrown */ - } - - env->CallVoidMethod(_javaRenderObj,_setCoordinatesCid,left,top,right,bottom); - - // Detach this thread if it was attached - if (isAttached) - { - if (_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - - - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: AndroidSurfaceViewChannel done", __FUNCTION__); - return 0; -} - - -WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame) -{ - // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); - _renderCritSect.Enter(); - _bufferToRender.SwapFrame(videoFrame); - _renderCritSect.Leave(); - _renderer.ReDraw(); - return 0; -} - - -/*Implements AndroidStream - * Calls the Java object and render the buffer in _bufferToRender - */ -void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { - _renderCritSect.Enter(); - -#ifdef ANDROID_NDK_8_OR_ABOVE - if (_bitmapWidth != _bufferToRender.Width() || - _bitmapHeight != _bufferToRender.Height()) { - // Create the bitmap to write to - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u " - "%u", __FUNCTION__, _bufferToRender.Width(), - _bufferToRender.Height()); - if (_javaBitmapObj) { - jniEnv->DeleteGlobalRef(_javaBitmapObj); - _javaBitmapObj = NULL; - } - jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj, - _createBitmapCid, - videoFrame.Width(), - videoFrame.Height()); - _javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap); - if (!_javaBitmapObj) { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " - "create Java Bitmap object reference", __FUNCTION__); - _renderCritSect.Leave(); - return; - } else { - _bitmapWidth=_bufferToRender.Width(); - _bitmapHeight=_bufferToRender.Height(); - } - } - void* pixels; - if (_javaBitmapObj && - AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) { - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap", - __FUNCTION__); - // Convert I420 straight into the Java bitmap. - int ret = ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(), - (unsigned char* ) pixels, - _bitmapWidth, _bitmapHeight); - if (ret < 0) { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion " - "failed.", __FUNCTION__); - } - - AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj); - // Draw the Surface. - jniEnv->CallVoidMethod(_javaRenderObj,_drawCid); - - } else { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock " - "bitmap", __FUNCTION__); - } - _renderCritSect.Leave(); - -#else - if (_bitmapWidth != _bufferToRender.Width() || - _bitmapHeight != _bufferToRender.Height()) { - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d " - "%d",__FUNCTION__, - _bufferToRender.Width(), _bufferToRender.Height()); - if (_javaByteBufferObj) { - jniEnv->DeleteGlobalRef(_javaByteBufferObj); - _javaByteBufferObj = NULL; - _directBuffer = NULL; - } - jobject javaByteBufferObj = - jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid, - _bufferToRender.Width(), - _bufferToRender.Height()); - _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj); - if (!_javaByteBufferObj) { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " - "create Java ByteBuffer object reference", __FUNCTION__); - _renderCritSect.Leave(); - return; - } else { - _directBuffer = static_cast - (jniEnv->GetDirectBufferAddress(_javaByteBufferObj)); - _bitmapWidth = _bufferToRender.Width(); - _bitmapHeight = _bufferToRender.Height(); - } - } - - if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) { - // Android requires a vertically flipped image compared to std convert. - // This is done by giving a negative height input. - const int conversionResult = - ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(), - _directBuffer, _bitmapWidth, -_bitmapHeight); - if (conversionResult < 0) { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion" - " failed.", __FUNCTION__); - _renderCritSect.Leave(); - return; - } - } - _renderCritSect.Leave(); - // Draw the Surface - jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid); -#endif -} - -} // namespace webrtc - diff --git a/src/modules/video_render/main/source/Android/video_render_android_surface_view.h b/src/modules/video_render/main/source/Android/video_render_android_surface_view.h deleted file mode 100644 index f55e60bf6..000000000 --- a/src/modules/video_render/main/source/Android/video_render_android_surface_view.h +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ -#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ - -#include - -#include "video_render_defines.h" - -#include "video_render_android_impl.h" - -namespace webrtc { -class CriticalSectionWrapper; - - -class AndroidSurfaceViewChannel: public AndroidStream -{ - -public: - AndroidSurfaceViewChannel(WebRtc_UWord32 streamId, - JavaVM* jvm, - VideoRenderAndroid& renderer, - jobject javaRenderObj); - ~AndroidSurfaceViewChannel(); - - WebRtc_Word32 Init(WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom); - - //Implement VideoRenderCallback - virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, - VideoFrame& videoFrame); - - //Implements AndroidStream - virtual void DeliverFrame(JNIEnv* jniEnv); - -private: - WebRtc_UWord32 _id; - CriticalSectionWrapper& _renderCritSect; - - VideoFrame _bufferToRender; - VideoRenderAndroid& _renderer; - JavaVM* _jvm; - jobject _javaRenderObj; - -#ifdef ANDROID_NDK_8_OR_ABOVE - jclass _javaBitmapClass; - jmethodID _createBitmapCid; - jobject _javaBitmapObj; - jmethodID _drawBitmapCid; -#else - jobject _javaByteBufferObj; - unsigned char* _directBuffer; - jmethodID _createByteBufferCid; - jmethodID _drawByteBufferCid; -#endif - jmethodID _setCoordinatesCid; - unsigned int _bitmapWidth; - unsigned int _bitmapHeight; -}; - -class AndroidSurfaceViewRenderer: private VideoRenderAndroid -{ -public: - AndroidSurfaceViewRenderer(const WebRtc_Word32 id, - const VideoRenderType videoRenderType, - void* window, - const bool fullscreen); - ~AndroidSurfaceViewRenderer(); - WebRtc_Word32 Init(); - virtual AndroidStream* CreateAndroidRenderChannel(WebRtc_Word32 streamId, - WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom, - VideoRenderAndroid& renderer); -private: - jobject _javaRenderObj; - jclass _javaRenderClass; - -}; - -} //namespace webrtc - -#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ diff --git a/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java similarity index 99% rename from src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java rename to src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java index fc306071a..73cf251cc 100644 --- a/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViEAndroidGLES20.java +++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source diff --git a/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java similarity index 96% rename from src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java rename to src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java index 56d5261c9..6d8744129 100644 --- a/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViERenderer.java +++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source diff --git a/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java similarity index 98% rename from src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java rename to src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java index 341258298..9ae4b8b9c 100644 --- a/src/modules/video_render/main/source/Android/java/org/webrtc/videoengine/ViESurfaceRenderer.java +++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source diff --git a/src/modules/video_render/main/source/Android/video_render_android_impl.cc b/src/modules/video_render/main/source/android/video_render_android_impl.cc similarity index 57% rename from src/modules/video_render/main/source/Android/video_render_android_impl.cc rename to src/modules/video_render/main/source/android/video_render_android_impl.cc index 8df91d298..2c40fa54c 100644 --- a/src/modules/video_render/main/source/Android/video_render_android_impl.cc +++ b/src/modules/video_render/main/source/android/video_render_android_impl.cc @@ -39,10 +39,10 @@ WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM) } VideoRenderAndroid::VideoRenderAndroid( - const WebRtc_Word32 id, - const VideoRenderType videoRenderType, - void* window, - const bool /*fullscreen*/): + const WebRtc_Word32 id, + const VideoRenderType videoRenderType, + void* window, + const bool /*fullscreen*/): _id(id), _critSect(*CriticalSectionWrapper::CreateCriticalSection()), _renderType(videoRenderType), @@ -127,7 +127,7 @@ VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId, } WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream( - const WebRtc_UWord32 streamId) + const WebRtc_UWord32 streamId) { CriticalSectionScoped cs(&_critSect); @@ -147,53 +147,47 @@ WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream( } WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties( - const WebRtc_UWord32 streamId, - WebRtc_UWord32& zOrder, - float& left, - float& top, - float& right, - float& bottom) const -{ - - return -1; + const WebRtc_UWord32 streamId, + WebRtc_UWord32& zOrder, + float& left, + float& top, + float& right, + float& bottom) const { + return -1; } -WebRtc_Word32 VideoRenderAndroid::StartRender() -{ - CriticalSectionScoped cs(&_critSect); +WebRtc_Word32 VideoRenderAndroid::StartRender() { + CriticalSectionScoped cs(&_critSect); - if (_javaRenderThread) - { - // StartRender is called when this stream should start render. - // However StopRender is not called when the streams stop rendering. Thus the the thread is only deleted when the renderer is removed. - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, - "%s, Render thread already exist", __FUNCTION__); - return 0; - } - - _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this, - kRealtimePriority, - "AndroidRenderThread"); - if (!_javaRenderThread) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: No thread", __FUNCTION__); - return -1; - } - - unsigned int tId = 0; - if (_javaRenderThread->Start(tId)) - { - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, - "%s: thread started: %u", __FUNCTION__, tId); - } - else - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not start send thread", __FUNCTION__); - return -1; - } + if (_javaRenderThread) { + // StartRender is called when this stream should start render. + // However StopRender is not called when the streams stop rendering. + // Thus the the thread is only deleted when the renderer is removed. + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s, Render thread already exist", __FUNCTION__); return 0; + } + + _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this, + kRealtimePriority, + "AndroidRenderThread"); + if (!_javaRenderThread) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: No thread", __FUNCTION__); + return -1; + } + + unsigned int tId = 0; + if (_javaRenderThread->Start(tId)) { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "%s: thread started: %u", __FUNCTION__, tId); + } + else { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not start send thread", __FUNCTION__); + return -1; + } + return 0; } WebRtc_Word32 VideoRenderAndroid::StopRender() @@ -228,19 +222,17 @@ WebRtc_Word32 VideoRenderAndroid::StopRender() return 0; } -void VideoRenderAndroid::ReDraw() -{ - CriticalSectionScoped cs(&_critSect); - if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) // Allow redraw if it was more than 20ms since last. - { - _lastJavaRenderEvent = TickTime::MillisecondTimestamp(); - _javaRenderEvent.Set(); - } +void VideoRenderAndroid::ReDraw() { + CriticalSectionScoped cs(&_critSect); + // Allow redraw if it was more than 20ms since last. + if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) { + _lastJavaRenderEvent = TickTime::MillisecondTimestamp(); + _javaRenderEvent.Set(); + } } -bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) -{ - return static_cast (obj)->JavaRenderThreadProcess(); +bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) { + return static_cast (obj)->JavaRenderThreadProcess(); } bool VideoRenderAndroid::JavaRenderThreadProcess() @@ -268,7 +260,7 @@ bool VideoRenderAndroid::JavaRenderThreadProcess() = _streamsMap.Next(item)) { static_cast (item->GetItem())->DeliverFrame( - _javaRenderJniEnv); + _javaRenderJniEnv); } if (_javaShutDownFlag) @@ -307,74 +299,68 @@ bool VideoRenderAndroid::FullScreen() } WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory( - WebRtc_UWord64& /*totalGraphicsMemory*/, - WebRtc_UWord64& /*availableGraphicsMemory*/) const -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; + WebRtc_UWord64& /*totalGraphicsMemory*/, + WebRtc_UWord64& /*availableGraphicsMemory*/) const { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } WebRtc_Word32 VideoRenderAndroid::GetScreenResolution( - WebRtc_UWord32& /*screenWidth*/, - WebRtc_UWord32& /*screenHeight*/) const -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; + WebRtc_UWord32& /*screenWidth*/, + WebRtc_UWord32& /*screenHeight*/) const { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } -WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(const WebRtc_UWord32 /*streamId*/) -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; +WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate( + const WebRtc_UWord32 /*streamId*/) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } WebRtc_Word32 VideoRenderAndroid::SetStreamCropping( - const WebRtc_UWord32 /*streamId*/, - const float /*left*/, - const float /*top*/, - const float /*right*/, - const float /*bottom*/) -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; + const WebRtc_UWord32 /*streamId*/, + const float /*left*/, + const float /*top*/, + const float /*right*/, + const float /*bottom*/) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } -WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable) -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; +WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer( - const WebRtc_UWord32 streamId, - const unsigned int zOrder, - const float left, - const float top, - const float right, - const float bottom) -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; + const WebRtc_UWord32 streamId, + const unsigned int zOrder, + const float left, + const float top, + const float right, + const float bottom) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } WebRtc_Word32 VideoRenderAndroid::SetText( - const WebRtc_UWord8 textId, - const WebRtc_UWord8* text, - const WebRtc_Word32 textLength, - const WebRtc_UWord32 textColorRef, - const WebRtc_UWord32 backgroundColorRef, - const float left, const float top, - const float rigth, const float bottom) -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; + const WebRtc_UWord8 textId, + const WebRtc_UWord8* text, + const WebRtc_Word32 textLength, + const WebRtc_UWord32 textColorRef, + const WebRtc_UWord32 backgroundColorRef, + const float left, const float top, + const float rigth, const float bottom) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap, @@ -382,11 +368,10 @@ WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap, const void* colorKey, const float left, const float top, const float right, - const float bottom) -{ - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s - not supported on Android", __FUNCTION__); - return -1; + const float bottom) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s - not supported on Android", __FUNCTION__); + return -1; } -} //namespace webrtc +} //namespace webrtc diff --git a/src/modules/video_render/main/source/android/video_render_android_impl.h b/src/modules/video_render/main/source/android/video_render_android_impl.h new file mode 100644 index 000000000..b3f152559 --- /dev/null +++ b/src/modules/video_render/main/source/android/video_render_android_impl.h @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ + +#include +#include "i_video_render.h" +#include "map_wrapper.h" + + +namespace webrtc { + +//#define ANDROID_LOG + + +class CriticalSectionWrapper; +class EventWrapper; +class ThreadWrapper; + + +// The object a module user uses to send new frames to the java renderer +// Base class for android render streams. + +class AndroidStream: public VideoRenderCallback +{ +public: + /* + * DeliverFrame is called from a thread connected to the Java VM. + * Used for Delivering frame for rendering. + */ + virtual void DeliverFrame(JNIEnv* jniEnv)=0; + + virtual ~AndroidStream() + { + }; +}; + +class VideoRenderAndroid: IVideoRender +{ +public: + static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM); + + VideoRenderAndroid(const WebRtc_Word32 id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + + virtual ~VideoRenderAndroid(); + + virtual WebRtc_Word32 Init()=0; + + virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id); + + virtual WebRtc_Word32 ChangeWindow(void* window); + + virtual VideoRenderCallback* AddIncomingRenderStream( + const WebRtc_UWord32 streamId, + const WebRtc_UWord32 zOrder, + const float left, const float top, + const float right, const float bottom); + + virtual WebRtc_Word32 DeleteIncomingRenderStream( + const WebRtc_UWord32 streamId); + + virtual WebRtc_Word32 GetIncomingRenderStreamProperties( + const WebRtc_UWord32 streamId, + WebRtc_UWord32& zOrder, + float& left, float& top, + float& right, float& bottom) const; + + virtual WebRtc_Word32 StartRender(); + + virtual WebRtc_Word32 StopRender(); + + virtual void ReDraw(); + + /************************************************************************** + * + * Properties + * + ***************************************************************************/ + + virtual VideoRenderType RenderType(); + + virtual RawVideoType PerferedVideoType(); + + virtual bool FullScreen(); + + virtual WebRtc_Word32 GetGraphicsMemory( + WebRtc_UWord64& totalGraphicsMemory, + WebRtc_UWord64& availableGraphicsMemory) const; + + virtual WebRtc_Word32 GetScreenResolution( + WebRtc_UWord32& screenWidth, + WebRtc_UWord32& screenHeight) const; + + virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId); + + virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId, + const float left, const float top, + const float right, + const float bottom); + + virtual WebRtc_Word32 SetTransparentBackground(const bool enable); + + virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId, + const unsigned int zOrder, + const float left, const float top, + const float right, + const float bottom); + + virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId, + const WebRtc_UWord8* text, + const WebRtc_Word32 textLength, + const WebRtc_UWord32 textColorRef, + const WebRtc_UWord32 backgroundColorRef, + const float left, const float top, + const float rigth, const float bottom); + + virtual WebRtc_Word32 SetBitmap(const void* bitMap, + const WebRtc_UWord8 pictureId, + const void* colorKey, const float left, + const float top, const float right, + const float bottom); + + protected: + virtual AndroidStream* CreateAndroidRenderChannel( + WebRtc_Word32 streamId, + WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer) = 0; + + WebRtc_Word32 _id; + CriticalSectionWrapper& _critSect; + VideoRenderType _renderType; + jobject _ptrWindow; + + static JavaVM* g_jvm; + + private: + static bool JavaRenderThreadFun(void* obj); + bool JavaRenderThreadProcess(); + + // Map with streams to render. + MapWrapper _streamsMap; + // True if the _javaRenderThread thread shall be detached from the JVM. + bool _javaShutDownFlag; + EventWrapper& _javaShutdownEvent; + EventWrapper& _javaRenderEvent; + WebRtc_Word64 _lastJavaRenderEvent; + JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread. + ThreadWrapper* _javaRenderThread; +}; + +} //namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ diff --git a/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc similarity index 79% rename from src/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc rename to src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc index ad93bf28d..69de8c710 100644 --- a/src/modules/video_render/main/source/Android/video_render_android_native_opengl2.cc +++ b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc @@ -25,10 +25,10 @@ namespace webrtc { AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer( - const WebRtc_Word32 id, - const VideoRenderType videoRenderType, - void* window, - const bool fullscreen) : + const WebRtc_Word32 id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : VideoRenderAndroid(id, videoRenderType, window, fullscreen), _javaRenderObj(NULL), _javaRenderClass(NULL) @@ -54,12 +54,12 @@ bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) // Get the JNI env for this thread if ((res < 0) || !env) { - WEBRTC_TRACE( - kTraceError, - kTraceVideoRenderer, - -1, - "RendererAndroid(): Could not attach thread to JVM (%d, %p)", - res, env); + WEBRTC_TRACE( + kTraceError, + kTraceVideoRenderer, + -1, + "RendererAndroid(): Could not attach thread to JVM (%d, %p)", + res, env); return false; } isAttached = true; @@ -191,9 +191,10 @@ WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() return -1; } - // create a global reference to the class (to tell JNI that we are referencing it after this function has returned) - _javaRenderClass - = reinterpret_cast (env->NewGlobalRef(javaRenderClassLocal)); + // create a global reference to the class (to tell JNI that + // we are referencing it after this function has returned) + _javaRenderClass = + reinterpret_cast (env->NewGlobalRef(javaRenderClassLocal)); if (!_javaRenderClass) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, @@ -211,11 +212,11 @@ WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() if (!_javaRenderObj) { WEBRTC_TRACE( - kTraceError, - kTraceVideoRenderer, - _id, - "%s: could not create Java SurfaceRender object reference", - __FUNCTION__); + kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java SurfaceRender object reference", + __FUNCTION__); return -1; } @@ -236,13 +237,13 @@ WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() } AndroidStream* AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel( - WebRtc_Word32 streamId, - WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom, - VideoRenderAndroid& renderer) + WebRtc_Word32 streamId, + WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer) { WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", __FUNCTION__, streamId); @@ -258,9 +259,10 @@ AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel( return NULL; } -AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(WebRtc_UWord32 streamId, - JavaVM* jvm, - VideoRenderAndroid& renderer,jobject javaRenderObj): +AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel( + WebRtc_UWord32 streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer,jobject javaRenderObj): _id(streamId), _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()), _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj), @@ -428,8 +430,9 @@ WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder, return 0; } -WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(const WebRtc_UWord32 /*streamId*/, - VideoFrame& videoFrame) +WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame( + const WebRtc_UWord32 /*streamId*/, + VideoFrame& videoFrame) { // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); _renderCritSect.Enter(); @@ -449,46 +452,50 @@ void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) //Draw the Surface jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid); - //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s: time to deliver %lld" ,__FUNCTION__,(TickTime::Now()-timeNow).Milliseconds()); + // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, + // "%s: time to deliver %lld" ,__FUNCTION__, + // (TickTime::Now()-timeNow).Milliseconds()); } /* - * JNI callback from Java class. Called when the render want to render a frame. Called from the GLRenderThread + * JNI callback from Java class. Called when the render + * want to render a frame. Called from the GLRenderThread * Method: DrawNative * Signature: (J)V */ -void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic -(JNIEnv * env, jobject, jlong context) -{ - AndroidNativeOpenGl2Channel* renderChannel=reinterpret_cast(context); - renderChannel->DrawNative(); +void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic( + JNIEnv * env, jobject, jlong context) { + AndroidNativeOpenGl2Channel* renderChannel = + reinterpret_cast(context); + renderChannel->DrawNative(); } void AndroidNativeOpenGl2Channel::DrawNative() { - _openGLRenderer.Render(_bufferToRender); + _openGLRenderer.Render(_bufferToRender); } + /* - * JNI callback from Java class. Called when the GLSurfaceview have created a surface. Called from the GLRenderThread + * JNI callback from Java class. Called when the GLSurfaceview + * have created a surface. Called from the GLRenderThread * Method: CreateOpenGLNativeStatic * Signature: (JII)I */ -jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(JNIEnv * env, - jobject, - jlong context, - jint width, - jint height) -{ - AndroidNativeOpenGl2Channel* renderChannel = - reinterpret_cast (context); - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__); - return renderChannel->CreateOpenGLNative(width, height); +jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic( + JNIEnv * env, + jobject, + jlong context, + jint width, + jint height) { + AndroidNativeOpenGl2Channel* renderChannel = + reinterpret_cast (context); + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__); + return renderChannel->CreateOpenGLNative(width, height); } -jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(int width, int height) -{ - - return _openGLRenderer.Setup(width, height); +jint AndroidNativeOpenGl2Channel::CreateOpenGLNative( + int width, int height) { + return _openGLRenderer.Setup(width, height); } } //namespace webrtc diff --git a/src/modules/video_render/main/source/android/video_render_android_native_opengl2.h b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.h new file mode 100644 index 000000000..c69f17d12 --- /dev/null +++ b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.h @@ -0,0 +1,99 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ + +#include + +#include "video_render_defines.h" +#include "video_render_android_impl.h" +#include "video_render_opengles20.h" + +namespace webrtc { + +class CriticalSectionWrapper; + +class AndroidNativeOpenGl2Channel: public AndroidStream { + public: + AndroidNativeOpenGl2Channel( + WebRtc_UWord32 streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer,jobject javaRenderObj); + ~AndroidNativeOpenGl2Channel(); + + WebRtc_Word32 Init(WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom); + + //Implement VideoRenderCallback + virtual WebRtc_Word32 RenderFrame( + const WebRtc_UWord32 streamId, + VideoFrame& videoFrame); + + //Implements AndroidStream + virtual void DeliverFrame(JNIEnv* jniEnv); + + private: + static jint CreateOpenGLNativeStatic( + JNIEnv * env, + jobject, + jlong context, + jint width, + jint height); + jint CreateOpenGLNative(int width, int height); + + static void DrawNativeStatic(JNIEnv * env,jobject, jlong context); + void DrawNative(); + WebRtc_UWord32 _id; + CriticalSectionWrapper& _renderCritSect; + + VideoFrame _bufferToRender; + VideoRenderAndroid& _renderer; + JavaVM* _jvm; + jobject _javaRenderObj; + + jmethodID _redrawCid; + jmethodID _registerNativeCID; + jmethodID _deRegisterNativeCID; + VideoRenderOpenGles20 _openGLRenderer; +}; + + +class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid { + public: + AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + + ~AndroidNativeOpenGl2Renderer(); + static bool UseOpenGL2(void* window); + + WebRtc_Word32 Init(); + virtual AndroidStream* CreateAndroidRenderChannel( + WebRtc_Word32 streamId, + WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer); + + private: + jobject _javaRenderObj; + jclass _javaRenderClass; +}; + +} //namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_ diff --git a/src/modules/video_render/main/source/android/video_render_android_surface_view.cc b/src/modules/video_render/main/source/android/video_render_android_surface_view.cc new file mode 100644 index 000000000..20555b105 --- /dev/null +++ b/src/modules/video_render/main/source/android/video_render_android_surface_view.cc @@ -0,0 +1,562 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video_render_android_surface_view.h" +#include "critical_section_wrapper.h" +#include "common_video/libyuv/include/libyuv.h" +#include "tick_util.h" +#ifdef ANDROID_NDK_8_OR_ABOVE + #include +#endif + + +#ifdef ANDROID_LOG +#include +#include + +#undef WEBRTC_TRACE +#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__) +#else +#include "trace.h" +#endif + +namespace webrtc { + +AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) +: + VideoRenderAndroid(id,videoRenderType,window,fullscreen), + _javaRenderObj(NULL), + _javaRenderClass(NULL) +{ +} + +AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "AndroidSurfaceViewRenderer dtor"); + if(g_jvm) { + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + env=NULL; + } + else { + isAttached = true; + } + } + env->DeleteGlobalRef(_javaRenderObj); + env->DeleteGlobalRef(_javaRenderClass); + + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + } +} + +WebRtc_Word32 AndroidSurfaceViewRenderer::Init() { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); + if (!g_jvm) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "(%s): Not a valid Java VM pointer.", + __FUNCTION__); + return -1; + } + if(!_ptrWindow) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "(%s): No window have been provided.", + __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + return -1; + } + isAttached = true; + } + + // get the ViESurfaceRender class + jclass javaRenderClassLocal = + env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer"); + if (!javaRenderClassLocal) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not find ViESurfaceRenderer", + __FUNCTION__); + return -1; + } + + // create a global reference to the class (to tell JNI that + // we are referencing it after this function has returned) + _javaRenderClass = + reinterpret_cast(env->NewGlobalRef(javaRenderClassLocal)); + if (!_javaRenderClass) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java ViESurfaceRenderer class reference", + __FUNCTION__); + return -1; + } + + // Delete local class ref, we only use the global ref + env->DeleteLocalRef(javaRenderClassLocal); + + // get the method ID for the constructor + jmethodID cid = env->GetMethodID(_javaRenderClass, + "", + "(Landroid/view/SurfaceView;)V"); + if (cid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get constructor ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + + // construct the object + jobject javaRenderObjLocal = env->NewObject(_javaRenderClass, + cid, + _ptrWindow); + if (!javaRenderObjLocal) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java Render", + __FUNCTION__); + return -1; + } + + // create a reference to the object (to tell JNI that we are referencing it + // after this function has returned) + _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal); + if (!_javaRenderObj) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java SurfaceRender object reference", + __FUNCTION__); + return -1; + } + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__); + return 0; +} + +AndroidStream* +AndroidSurfaceViewRenderer::CreateAndroidRenderChannel( + WebRtc_Word32 streamId, + WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer) { + WEBRTC_TRACE(kTraceDebug, + kTraceVideoRenderer, + _id, + "%s: Id %d", + __FUNCTION__, + streamId); + AndroidSurfaceViewChannel* stream = + new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj); + if(stream && stream->Init(zOrder, left, top, right, bottom) == 0) + return stream; + else + delete stream; + return NULL; +} + +AndroidSurfaceViewChannel::AndroidSurfaceViewChannel( + WebRtc_UWord32 streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer, + jobject javaRenderObj) : + _id(streamId), + _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()), + _renderer(renderer), + _jvm(jvm), + _javaRenderObj(javaRenderObj), + _bitmapWidth(0), + _bitmapHeight(0) { +} + +AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() { + WEBRTC_TRACE(kTraceInfo, + kTraceVideoRenderer, + _id, + "AndroidSurfaceViewChannel dtor"); + delete &_renderCritSect; + if(_jvm) { + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + env=NULL; + } + else { + isAttached = true; + } + } + +#ifdef ANDROID_NDK_8_OR_ABOVE + env->DeleteGlobalRef(_javaBitmapObj); +#else + env->DeleteGlobalRef(_javaByteBufferObj); +#endif + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + } +} + +WebRtc_Word32 AndroidSurfaceViewChannel::Init( + WebRtc_Word32 /*zOrder*/, + const float left, + const float top, + const float right, + const float bottom) { + + WEBRTC_TRACE(kTraceDebug, + kTraceVideoRenderer, + _id, + "%s: AndroidSurfaceViewChannel", + __FUNCTION__); + if (!_jvm) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Not a valid Java VM pointer", + __FUNCTION__); + return -1; + } + + if( (top > 1 || top < 0) || + (right > 1 || right < 0) || + (bottom > 1 || bottom < 0) || + (left > 1 || left < 0)) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Wrong coordinates", __FUNCTION__); + return -1; + } + + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, + res, + env); + return -1; + } + isAttached = true; + } + + jclass javaRenderClass = + env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer"); + if (!javaRenderClass) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not find ViESurfaceRenderer", + __FUNCTION__); + return -1; + } +#ifdef ANDROID_NDK_8_OR_ABOVE + // get the method ID for the CreateBitmap + _createBitmapCid = + env->GetMethodID(_javaRenderClass, + "CreateBitmap", + "(II)Landroid/graphics/Bitmap;"); + if (_createBitmapCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get CreateBitmap ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + // get the method ID for the DrawBitmap function + _drawBitmapCid = env->GetMethodID(_javaRenderClass, "DrawBitmap", "()V"); + if (_drawBitmapCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get DrawBitmap ID", + __FUNCTION__); + return -1; /* exception thrown */ + } +#else + // get the method ID for the CreateIntArray + _createByteBufferCid = + env->GetMethodID(javaRenderClass, + "CreateByteBuffer", + "(II)Ljava/nio/ByteBuffer;"); + if (_createByteBufferCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get CreateByteBuffer ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + + // get the method ID for the DrawByteBuffer function + _drawByteBufferCid = env->GetMethodID(javaRenderClass, + "DrawByteBuffer", + "()V"); + if (_drawByteBufferCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get DrawByteBuffer ID", + __FUNCTION__); + return -1; /* exception thrown */ + } +#endif + + // get the method ID for the SetCoordinates function + _setCoordinatesCid = env->GetMethodID(javaRenderClass, + "SetCoordinates", + "(FFFF)V"); + if (_setCoordinatesCid == NULL) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not get SetCoordinates ID", + __FUNCTION__); + return -1; /* exception thrown */ + } + + env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid, + left, top, right, bottom); + + // Detach this thread if it was attached + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, + kTraceVideoRenderer, + _id, + "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + + WEBRTC_TRACE(kTraceDebug, + kTraceVideoRenderer, + _id, + "%s: AndroidSurfaceViewChannel done", + __FUNCTION__); + return 0; +} + + +WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame( + const WebRtc_UWord32 /*streamId*/, + VideoFrame& videoFrame) { + // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); + _renderCritSect.Enter(); + _bufferToRender.SwapFrame(videoFrame); + _renderCritSect.Leave(); + _renderer.ReDraw(); + return 0; +} + + +/*Implements AndroidStream + * Calls the Java object and render the buffer in _bufferToRender + */ +void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { + _renderCritSect.Enter(); + +#ifdef ANDROID_NDK_8_OR_ABOVE + if (_bitmapWidth != _bufferToRender.Width() || + _bitmapHeight != _bufferToRender.Height()) { + // Create the bitmap to write to + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u " + "%u", __FUNCTION__, _bufferToRender.Width(), + _bufferToRender.Height()); + if (_javaBitmapObj) { + jniEnv->DeleteGlobalRef(_javaBitmapObj); + _javaBitmapObj = NULL; + } + jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj, + _createBitmapCid, + videoFrame.Width(), + videoFrame.Height()); + _javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap); + if (!_javaBitmapObj) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " + "create Java Bitmap object reference", __FUNCTION__); + _renderCritSect.Leave(); + return; + } else { + _bitmapWidth = _bufferToRender.Width(); + _bitmapHeight = _bufferToRender.Height(); + } + } + void* pixels; + if (_javaBitmapObj && + AndroidBitmap_lockPixels(jniEnv, _javaBitmapObj, &pixels) >= 0) { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Locked bitmap", + __FUNCTION__); + // Convert I420 straight into the Java bitmap. + int ret = ConvertI420ToRGB565((unsigned char*)_bufferToRender.Buffer(), + (unsigned char*) pixels, + _bitmapWidth, _bitmapHeight); + if (ret < 0) { + WEBRTC_TRACE(kTraceError, + kTraceVideoRenderer, + _id, + "%s: Color conversion failed.", + __FUNCTION__); + } + + AndroidBitmap_unlockPixels(jniEnv, _javaBitmapObj); + // Draw the Surface. + jniEnv->CallVoidMethod(_javaRenderObj,_drawCid); + + } else { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not lock " + "bitmap", __FUNCTION__); + } + _renderCritSect.Leave(); + +#else + if (_bitmapWidth != _bufferToRender.Width() || + _bitmapHeight != _bufferToRender.Height()) { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d " + "%d",__FUNCTION__, + _bufferToRender.Width(), _bufferToRender.Height()); + if (_javaByteBufferObj) { + jniEnv->DeleteGlobalRef(_javaByteBufferObj); + _javaByteBufferObj = NULL; + _directBuffer = NULL; + } + jobject javaByteBufferObj = + jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid, + _bufferToRender.Width(), + _bufferToRender.Height()); + _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj); + if (!_javaByteBufferObj) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " + "create Java ByteBuffer object reference", __FUNCTION__); + _renderCritSect.Leave(); + return; + } else { + _directBuffer = static_cast + (jniEnv->GetDirectBufferAddress(_javaByteBufferObj)); + _bitmapWidth = _bufferToRender.Width(); + _bitmapHeight = _bufferToRender.Height(); + } + } + + if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) { + // Android requires a vertically flipped image compared to std convert. + // This is done by giving a negative height input. + const int conversionResult = + ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(), + _directBuffer, _bitmapWidth, -_bitmapHeight); + if (conversionResult < 0) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion" + " failed.", __FUNCTION__); + _renderCritSect.Leave(); + return; + } + } + _renderCritSect.Leave(); + // Draw the Surface + jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid); +#endif +} + +} // namespace webrtc diff --git a/src/modules/video_render/main/source/android/video_render_android_surface_view.h b/src/modules/video_render/main/source/android/video_render_android_surface_view.h new file mode 100644 index 000000000..1355e8323 --- /dev/null +++ b/src/modules/video_render/main/source/android/video_render_android_surface_view.h @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ +#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ + +#include + +#include "video_render_defines.h" +#include "video_render_android_impl.h" + +namespace webrtc { + +class CriticalSectionWrapper; + +class AndroidSurfaceViewChannel: public AndroidStream +{ +public: + AndroidSurfaceViewChannel(WebRtc_UWord32 streamId, + JavaVM* jvm, + VideoRenderAndroid& renderer, + jobject javaRenderObj); + ~AndroidSurfaceViewChannel(); + + WebRtc_Word32 Init(WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom); + + //Implement VideoRenderCallback + virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, + VideoFrame& videoFrame); + + //Implements AndroidStream + virtual void DeliverFrame(JNIEnv* jniEnv); + + private: + WebRtc_UWord32 _id; + CriticalSectionWrapper& _renderCritSect; + + VideoFrame _bufferToRender; + VideoRenderAndroid& _renderer; + JavaVM* _jvm; + jobject _javaRenderObj; + +#ifdef ANDROID_NDK_8_OR_ABOVE + jclass _javaBitmapClass; + jmethodID _createBitmapCid; + jobject _javaBitmapObj; + jmethodID _drawBitmapCid; +#else + jobject _javaByteBufferObj; + unsigned char* _directBuffer; + jmethodID _createByteBufferCid; + jmethodID _drawByteBufferCid; +#endif + jmethodID _setCoordinatesCid; + unsigned int _bitmapWidth; + unsigned int _bitmapHeight; +}; + +class AndroidSurfaceViewRenderer: private VideoRenderAndroid +{ +public: + AndroidSurfaceViewRenderer(const WebRtc_Word32 id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen); + ~AndroidSurfaceViewRenderer(); + WebRtc_Word32 Init(); + virtual AndroidStream* CreateAndroidRenderChannel( + WebRtc_Word32 streamId, + WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom, + VideoRenderAndroid& renderer); + private: + jobject _javaRenderObj; + jclass _javaRenderClass; +}; + +} //namespace webrtc + +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ diff --git a/src/modules/video_render/main/source/Android/video_render_opengles20.cc b/src/modules/video_render/main/source/android/video_render_opengles20.cc similarity index 93% rename from src/modules/video_render/main/source/Android/video_render_opengles20.cc rename to src/modules/video_render/main/source/android/video_render_opengles20.cc index 8f4e5c5a6..f207a16d7 100644 --- a/src/modules/video_render/main/source/Android/video_render_opengles20.cc +++ b/src/modules/video_render/main/source/android/video_render_opengles20.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -139,7 +139,8 @@ WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width, } // set the vertices array in the shader - // _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture + // _vertices contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5 * sizeof(GLfloat), _vertices); checkGlError("glVertexAttribPointer aPosition"); @@ -148,7 +149,8 @@ WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width, checkGlError("glEnableVertexAttribArray positionHandle"); // set the texture coordinate array in the shader - // _vertices contains 4 vertices with 5 coordinates. 3 for (xyz) for the vertices and 2 for the texture + // _vertices contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5 * sizeof(GLfloat), &_vertices[3]); checkGlError("glVertexAttribPointer maTextureHandle"); @@ -178,13 +180,14 @@ WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width, } /* * SetCoordinates - * Sets the coordinates where the stream shall be rendered. Values must be between 0 and 1. + * Sets the coordinates where the stream shall be rendered. + * Values must be between 0 and 1. */ WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom) + const float left, + const float top, + const float right, + const float bottom) { if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) @@ -344,16 +347,14 @@ void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) name, v); } -void VideoRenderOpenGles20::checkGlError(const char* op) -{ +void VideoRenderOpenGles20::checkGlError(const char* op) { #ifdef ANDROID_LOG - for (GLint error = glGetError(); error; error - = glGetError()) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "after %s() glError (0x%x)\n", op, error); - } + for (GLint error = glGetError(); error; error = glGetError()) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "after %s() glError (0x%x)\n", op, error); + } #else - return; + return; #endif } @@ -443,4 +444,3 @@ void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) } } //namespace webrtc - diff --git a/src/modules/video_render/main/source/Android/video_render_opengles20.h b/src/modules/video_render/main/source/android/video_render_opengles20.h similarity index 91% rename from src/modules/video_render/main/source/Android/video_render_opengles20.h rename to src/modules/video_render/main/source/android/video_render_opengles20.h index 379b1e752..eee40892e 100644 --- a/src/modules/video_render/main/source/Android/video_render_opengles20.h +++ b/src/modules/video_render/main/source/android/video_render_opengles20.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -37,7 +37,8 @@ private: void printGLString(const char *name, GLenum s); void checkGlError(const char* op); GLuint loadShader(GLenum shaderType, const char* pSource); - GLuint createProgram(const char* pVertexSource, const char* pFragmentSource); + GLuint createProgram(const char* pVertexSource, + const char* pFragmentSource); void SetupTextures(const VideoFrame& frameToRender); void UpdateTextures(const VideoFrame& frameToRender); diff --git a/src/modules/video_render/main/source/video_render.gypi b/src/modules/video_render/main/source/video_render.gypi index 414f99a15..6fe7c881e 100644 --- a/src/modules/video_render/main/source/video_render.gypi +++ b/src/modules/video_render/main/source/video_render.gypi @@ -38,10 +38,10 @@ 'video_render_impl.h', 'i_video_render.h', # Android - 'Android/video_render_android_impl.h', - 'Android/video_render_android_native_opengl2.h', - 'Android/video_render_android_surface_view.h', - 'Android/video_render_opengles20.h', + 'android/video_render_android_impl.h', + 'android/video_render_android_native_opengl2.h', + 'android/video_render_android_surface_view.h', + 'android/video_render_opengles20.h', # Linux 'linux/video_render_linux_impl.h', 'linux/video_x11_channel.h', @@ -67,10 +67,10 @@ 'video_render_impl.cc', # PLATFORM SPECIFIC SOURCE FILES - Will be filtered below # Android - 'Android/video_render_android_impl.cc', - 'Android/video_render_android_native_opengl2.cc', - 'Android/video_render_android_surface_view.cc', - 'Android/video_render_opengles20.cc', + 'android/video_render_android_impl.cc', + 'android/video_render_android_native_opengl2.cc', + 'android/video_render_android_surface_view.cc', + 'android/video_render_opengles20.cc', # Linux 'linux/video_render_linux_impl.cc', 'linux/video_x11_channel.cc', @@ -100,14 +100,14 @@ ['OS!="android" or include_internal_video_render==0', { 'sources!': [ # Android - 'Android/video_render_android_impl.h', - 'Android/video_render_android_native_opengl2.h', - 'Android/video_render_android_surface_view.h', - 'Android/video_render_opengles20.h', - 'Android/video_render_android_impl.cc', - 'Android/video_render_android_native_opengl2.cc', - 'Android/video_render_android_surface_view.cc', - 'Android/video_render_opengles20.cc', + 'android/video_render_android_impl.h', + 'android/video_render_android_native_opengl2.h', + 'android/video_render_android_surface_view.h', + 'android/video_render_opengles20.h', + 'android/video_render_android_impl.cc', + 'android/video_render_android_native_opengl2.cc', + 'android/video_render_android_surface_view.cc', + 'android/video_render_opengles20.cc', ], }], ['OS!="linux" or include_internal_video_render==0', { diff --git a/src/modules/video_render/main/source/video_render_impl.cc b/src/modules/video_render/main/source/video_render_impl.cc index 10122442d..2ce0b915c 100644 --- a/src/modules/video_render/main/source/video_render_impl.cc +++ b/src/modules/video_render/main/source/video_render_impl.cc @@ -40,9 +40,9 @@ #endif #elif defined(WEBRTC_ANDROID) -#include "Android/video_render_android_impl.h" -#include "Android/video_render_android_surface_view.h" -#include "Android/video_render_android_native_opengl2.h" +#include "android/video_render_android_impl.h" +#include "android/video_render_android_surface_view.h" +#include "android/video_render_android_native_opengl2.h" #define STANDARD_RENDERING kRenderAndroid #elif defined(WEBRTC_LINUX) @@ -978,4 +978,3 @@ WebRtc_Word32 ModuleVideoRenderImpl::MirrorRenderStream(const int renderId, } } //namespace webrtc -