diff --git a/src/modules/video_capture/main/source/android/device_info_android.cc b/src/modules/video_capture/main/source/android/device_info_android.cc index 9d11b85c2..d119531b5 100644 --- a/src/modules/video_capture/main/source/android/device_info_android.cc +++ b/src/modules/video_capture/main/source/android/device_info_android.cc @@ -18,70 +18,62 @@ namespace webrtc { + namespace videocapturemodule { -VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo ( - const WebRtc_Word32 id) -{ - videocapturemodule::DeviceInfoAndroid *deviceInfo = - new videocapturemodule::DeviceInfoAndroid(id); - if (deviceInfo && deviceInfo->Init() != 0) // Failed to init - { - delete deviceInfo; - deviceInfo = NULL; - } - return deviceInfo; + +VideoCaptureModule::DeviceInfo* +VideoCaptureImpl::CreateDeviceInfo (const WebRtc_Word32 id) { + videocapturemodule::DeviceInfoAndroid *deviceInfo = + new videocapturemodule::DeviceInfoAndroid(id); + if (deviceInfo && deviceInfo->Init() != 0) { + delete deviceInfo; + deviceInfo = NULL; + } + return deviceInfo; } DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) : - DeviceInfoImpl(id) -{ + DeviceInfoImpl(id) { } -WebRtc_Word32 DeviceInfoAndroid::Init() -{ +WebRtc_Word32 DeviceInfoAndroid::Init() { + return 0; +} + +DeviceInfoAndroid::~DeviceInfoAndroid() { +} + +WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices() { + JNIEnv *env; + jclass javaCmDevInfoClass; + jobject javaCmDevInfoObject; + bool attached = false; + if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( + env, + javaCmDevInfoClass, + javaCmDevInfoObject, + attached) != 0) return 0; -} -DeviceInfoAndroid::~DeviceInfoAndroid() -{ -} - -WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices() -{ - - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached) != 0) - { - return 0; - } + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, + "%s GetMethodId", __FUNCTION__); + // get the method ID for the Android Java GetDeviceUniqueName name. + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, + "NumberOfDevices", + "()I"); + jint numberOfDevices = 0; + if (cid != NULL) { WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "%s GetMethodId", __FUNCTION__); - // get the method ID for the Android Java GetDeviceUniqueName name. - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, - "NumberOfDevices", - "()I"); + "%s Calling Number of devices", __FUNCTION__); + numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid); + } + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - jint numberOfDevices = 0; - if (cid != NULL) - { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "%s Calling Number of devices", __FUNCTION__); - numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid); - } - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); - - if (numberOfDevices > 0) - return numberOfDevices; - return 0; + if (numberOfDevices > 0) + return numberOfDevices; + return 0; } WebRtc_Word32 DeviceInfoAndroid::GetDeviceName( @@ -93,86 +85,75 @@ WebRtc_Word32 DeviceInfoAndroid::GetDeviceName( char* /*productUniqueIdUTF8*/, WebRtc_UWord32 /*productUniqueIdUTF8Length*/) { - JNIEnv *env; - jclass javaCmDevInfoClass; - jobject javaCmDevInfoObject; - WebRtc_Word32 result = 0; - bool attached = false; - if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( - env, - javaCmDevInfoClass, - javaCmDevInfoObject, - attached)!= 0) - { - return -1; - } + JNIEnv *env; + jclass javaCmDevInfoClass; + jobject javaCmDevInfoObject; + WebRtc_Word32 result = 0; + bool attached = false; + if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( + env, + javaCmDevInfoClass, + javaCmDevInfoObject, + attached)!= 0) + return -1; - // get the method ID for the Android Java GetDeviceUniqueName name. - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName", - "(I)Ljava/lang/String;"); - if (cid != NULL) - { - - jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject, - cid, deviceNumber); - if (javaDeviceNameObj == NULL) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Failed to get device name for device %d.", - __FUNCTION__, (int) deviceNumber); - result = -1; - } - else - { - jboolean isCopy; - const char* javaDeviceNameChar = env->GetStringUTFChars( - (jstring) javaDeviceNameObj - ,&isCopy); - const jsize javaDeviceNameCharLength = - env->GetStringUTFLength((jstring) javaDeviceNameObj); - if ((WebRtc_UWord32) javaDeviceNameCharLength < - deviceUniqueIdUTF8Length) { - memcpy(deviceUniqueIdUTF8, - javaDeviceNameChar, - javaDeviceNameCharLength + 1); - } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, "%s: deviceUniqueIdUTF8 to short.", - __FUNCTION__); - result = -1; - } - if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength) - { - memcpy(deviceNameUTF8, - javaDeviceNameChar, - javaDeviceNameCharLength + 1); - } - env->ReleaseStringUTFChars((jstring) javaDeviceNameObj, - javaDeviceNameChar); - }//javaDeviceNameObj==NULL - - } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find GetDeviceUniqueName function id", + // get the method ID for the Android Java GetDeviceUniqueName name. + jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName", + "(I)Ljava/lang/String;"); + if (cid != NULL) { + jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject, + cid, deviceNumber); + if (javaDeviceNameObj == NULL) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Failed to get device name for device %d.", + __FUNCTION__, (int) deviceNumber); + result = -1; + } else { + jboolean isCopy; + const char* javaDeviceNameChar = env->GetStringUTFChars( + (jstring) javaDeviceNameObj + ,&isCopy); + const jsize javaDeviceNameCharLength = + env->GetStringUTFLength((jstring) javaDeviceNameObj); + if ((WebRtc_UWord32) javaDeviceNameCharLength < + deviceUniqueIdUTF8Length) { + memcpy(deviceUniqueIdUTF8, + javaDeviceNameChar, + javaDeviceNameCharLength + 1); + } + else { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, + _id, "%s: deviceUniqueIdUTF8 to short.", __FUNCTION__); result = -1; - } + } + if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength) { + memcpy(deviceNameUTF8, + javaDeviceNameChar, + javaDeviceNameCharLength + 1); + } + env->ReleaseStringUTFChars((jstring) javaDeviceNameObj, + javaDeviceNameChar); + } // javaDeviceNameObj == NULL - VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + } + else { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to find GetDeviceUniqueName function id", + __FUNCTION__); + result = -1; + } - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: result %d", __FUNCTION__, (int) result); - return result; + VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); + + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: result %d", __FUNCTION__, (int) result); + return result; } WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( - const char* deviceUniqueIdUTF8) -{ + const char* deviceUniqueIdUTF8) { MapItem* item = NULL; while ((item = _captureCapabilities.Last())) { delete (VideoCaptureCapability*) item->GetItem(); @@ -187,9 +168,8 @@ WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( env, javaCmDevInfoClass, javaCmDevInfoObject, - attached) != 0) { + attached) != 0) return -1; - } // Find the capability class jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass); @@ -282,8 +262,7 @@ WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( WebRtc_Word32 DeviceInfoAndroid::GetOrientation( const char* deviceUniqueIdUTF8, - VideoCaptureRotation& orientation) -{ + VideoCaptureRotation& orientation) { JNIEnv *env; jclass javaCmDevInfoClass; jobject javaCmDevInfoObject; @@ -292,9 +271,8 @@ WebRtc_Word32 DeviceInfoAndroid::GetOrientation( env, javaCmDevInfoClass, javaCmDevInfoObject, - attached) != 0) { + attached) != 0) return -1; - } // get the method ID for the Android Java GetOrientation . jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation", @@ -321,7 +299,7 @@ WebRtc_Word32 DeviceInfoAndroid::GetOrientation( WebRtc_Word32 retValue = 0; switch (jorientation) { - case -1: //Error + case -1: // Error orientation = kCameraRotate0; retValue = -1; break; @@ -344,5 +322,5 @@ WebRtc_Word32 DeviceInfoAndroid::GetOrientation( return retValue; } -} // namespace videocapturemodule -} // namespace webrtc +} // namespace videocapturemodule +} // namespace webrtc diff --git a/src/modules/video_capture/main/source/android/device_info_android.h b/src/modules/video_capture/main/source/android/device_info_android.h index 8e02b754a..855a29195 100644 --- a/src/modules/video_capture/main/source/android/device_info_android.h +++ b/src/modules/video_capture/main/source/android/device_info_android.h @@ -29,35 +29,37 @@ namespace videocapturemodule // #define WEBRTC_TRACE(a,b,c,...) // __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__) -class DeviceInfoAndroid: public DeviceInfoImpl -{ -public: +class DeviceInfoAndroid : public DeviceInfoImpl { - DeviceInfoAndroid(const WebRtc_Word32 id); - WebRtc_Word32 Init(); - virtual ~DeviceInfoAndroid(); - virtual WebRtc_UWord32 NumberOfDevices(); - virtual WebRtc_Word32 GetDeviceName(WebRtc_UWord32 deviceNumber, - char* deviceNameUTF8, - WebRtc_UWord32 deviceNameLength, - char* deviceUniqueIdUTF8, - WebRtc_UWord32 deviceUniqueIdUTF8Length, - char* productUniqueIdUTF8 = 0, - WebRtc_UWord32 productUniqueIdUTF8Length = 0); - virtual WebRtc_Word32 CreateCapabilityMap(const char* deviceUniqueIdUTF8); + public: + DeviceInfoAndroid(const WebRtc_Word32 id); + WebRtc_Word32 Init(); + virtual ~DeviceInfoAndroid(); + virtual WebRtc_UWord32 NumberOfDevices(); + virtual WebRtc_Word32 GetDeviceName( + WebRtc_UWord32 deviceNumber, + char* deviceNameUTF8, + WebRtc_UWord32 deviceNameLength, + char* deviceUniqueIdUTF8, + WebRtc_UWord32 deviceUniqueIdUTF8Length, + char* productUniqueIdUTF8 = 0, + WebRtc_UWord32 productUniqueIdUTF8Length = 0); + virtual WebRtc_Word32 CreateCapabilityMap(const char* deviceUniqueIdUTF8); - virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox( - const char* /*deviceUniqueIdUTF8*/, - const char* /*dialogTitleUTF8*/, - void* /*parentWindow*/, - WebRtc_UWord32 /*positionX*/, - WebRtc_UWord32 /*positionY*/){return -1;} - virtual WebRtc_Word32 GetOrientation(const char* deviceUniqueIdUTF8, - VideoCaptureRotation& orientation); -private: - bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); - enum {_expectedCaptureDelay = 190}; + virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox( + const char* /*deviceUniqueIdUTF8*/, + const char* /*dialogTitleUTF8*/, + void* /*parentWindow*/, + WebRtc_UWord32 /*positionX*/, + WebRtc_UWord32 /*positionY*/) { return -1; } + virtual WebRtc_Word32 GetOrientation(const char* deviceUniqueIdUTF8, + VideoCaptureRotation& orientation); + private: + bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8); + enum {_expectedCaptureDelay = 190}; }; -} // namespace videocapturemodule -} // namespace webrtc + +} // namespace videocapturemodule +} // namespace webrtc + #endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_ diff --git a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java index 0cfe45730..33c9927d5 100644 --- a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java +++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java @@ -11,7 +11,7 @@ package org.webrtc.videoengine; public class CaptureCapabilityAndroid { - public int width = 0; - public int height = 0; - public int maxFPS = 0; + public int width = 0; + public int height = 0; + public int maxFPS = 0; } diff --git a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java index 102916288..38fe75dd7 100644 --- a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java +++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java @@ -27,236 +27,236 @@ import android.view.SurfaceHolder.Callback; public class VideoCaptureAndroid implements PreviewCallback, Callback { - private Camera camera; - private AndroidVideoCaptureDevice currentDevice = null; - public ReentrantLock previewBufferLock = new ReentrantLock(); - private int PIXEL_FORMAT = ImageFormat.NV21; - PixelFormat pixelFormat = new PixelFormat(); - // True when the C++ layer has ordered the camera to be started. - private boolean isRunning=false; + private Camera camera; + private AndroidVideoCaptureDevice currentDevice = null; + public ReentrantLock previewBufferLock = new ReentrantLock(); + private int PIXEL_FORMAT = ImageFormat.NV21; + PixelFormat pixelFormat = new PixelFormat(); + // True when the C++ layer has ordered the camera to be started. + private boolean isRunning=false; - private final int numCaptureBuffers = 3; - private int expectedFrameSize = 0; - private int orientation = 0; - private int id = 0; - // C++ callback context variable. - private long context = 0; - private SurfaceHolder localPreview = null; - // True if this class owns the preview video buffers. - private boolean ownsBuffers = false; + private final int numCaptureBuffers = 3; + private int expectedFrameSize = 0; + private int orientation = 0; + private int id = 0; + // C++ callback context variable. + private long context = 0; + private SurfaceHolder localPreview = null; + // True if this class owns the preview video buffers. + private boolean ownsBuffers = false; - // Set this to 2 for VERBOSE logging. 1 for DEBUG - private static int LOGLEVEL = 0; - private static boolean VERBOSE = LOGLEVEL > 2; - private static boolean DEBUG = LOGLEVEL > 1; + // Set this to 2 for VERBOSE logging. 1 for DEBUG + private static int LOGLEVEL = 0; + private static boolean VERBOSE = LOGLEVEL > 2; + private static boolean DEBUG = LOGLEVEL > 1; - CaptureCapabilityAndroid currentCapability = null; + CaptureCapabilityAndroid currentCapability = null; - public static - void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) { - if(DEBUG) Log.d("*WEBRTC*", "DeleteVideoCaptureAndroid"); + public static + void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) { + if(DEBUG) Log.d("*WEBRTC*", "DeleteVideoCaptureAndroid"); - captureAndroid.StopCapture(); - captureAndroid.camera.release(); - captureAndroid.camera = null; - captureAndroid.context = 0; + captureAndroid.StopCapture(); + captureAndroid.camera.release(); + captureAndroid.camera = null; + captureAndroid.context = 0; - if(DEBUG) Log.v("*WEBRTC*", "DeleteVideoCaptureAndroid ended"); + if(DEBUG) Log.v("*WEBRTC*", "DeleteVideoCaptureAndroid ended"); - } + } - public VideoCaptureAndroid(int in_id, - long in_context, - Camera in_camera, - AndroidVideoCaptureDevice in_device) { - id = in_id; - context = in_context; - camera = in_camera; - currentDevice = in_device; - } + public VideoCaptureAndroid(int in_id, + long in_context, + Camera in_camera, + AndroidVideoCaptureDevice in_device) { + id = in_id; + context = in_context; + camera = in_camera; + currentDevice = in_device; + } - public int StartCapture(int width, int height, int frameRate) { - if(DEBUG) Log.d("*WEBRTC*", "StartCapture width" + width + - " height " + height +" frame rate " + frameRate); - try { - if (camera == null) { - Log.e("*WEBRTC*", - String.format(Locale.US,"Camera not initialized %d",id)); - return -1; - } - currentCapability = new CaptureCapabilityAndroid(); - currentCapability.width = width; - currentCapability.height = height; - currentCapability.maxFPS = frameRate; - PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); + public int StartCapture(int width, int height, int frameRate) { + if(DEBUG) Log.d("*WEBRTC*", "StartCapture width" + width + + " height " + height +" frame rate " + frameRate); + try { + if (camera == null) { + Log.e("*WEBRTC*", + String.format(Locale.US,"Camera not initialized %d",id)); + return -1; + } + currentCapability = new CaptureCapabilityAndroid(); + currentCapability.width = width; + currentCapability.height = height; + currentCapability.maxFPS = frameRate; + PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); - Camera.Parameters parameters = camera.getParameters(); - parameters.setPreviewSize(currentCapability.width, - currentCapability.height); - parameters.setPreviewFormat(PIXEL_FORMAT ); - parameters.setPreviewFrameRate(currentCapability.maxFPS); - camera.setParameters(parameters); + Camera.Parameters parameters = camera.getParameters(); + parameters.setPreviewSize(currentCapability.width, + currentCapability.height); + parameters.setPreviewFormat(PIXEL_FORMAT ); + parameters.setPreviewFrameRate(currentCapability.maxFPS); + camera.setParameters(parameters); - // Get the local preview SurfaceHolder from the static render class - localPreview = ViERenderer.GetLocalRenderer(); - if(localPreview != null) { - localPreview.addCallback(this); - } + // Get the local preview SurfaceHolder from the static render class + localPreview = ViERenderer.GetLocalRenderer(); + if(localPreview != null) { + localPreview.addCallback(this); + } - int bufSize = width * height * pixelFormat.bitsPerPixel / 8; - if(android.os.Build.VERSION.SDK_INT >= 7) { - // According to Doc addCallbackBuffer belongs to API level 8. - // But it seems like it works on Android 2.1 as well. - // At least SE X10 and Milestone - byte[] buffer = null; - for (int i = 0; i < numCaptureBuffers; i++) { - buffer = new byte[bufSize]; - camera.addCallbackBuffer(buffer); + int bufSize = width * height * pixelFormat.bitsPerPixel / 8; + if(android.os.Build.VERSION.SDK_INT >= 7) { + // According to Doc addCallbackBuffer belongs to API level 8. + // But it seems like it works on Android 2.1 as well. + // At least SE X10 and Milestone + byte[] buffer = null; + for (int i = 0; i < numCaptureBuffers; i++) { + buffer = new byte[bufSize]; + camera.addCallbackBuffer(buffer); + } + + camera.setPreviewCallbackWithBuffer(this); + ownsBuffers = true; + } + else { + camera.setPreviewCallback(this); + } + + camera.startPreview(); + previewBufferLock.lock(); + expectedFrameSize = bufSize; + isRunning = true; + previewBufferLock.unlock(); + } + catch (Exception ex) { + Log.e("*WEBRTC*", "Failed to start camera"); + return -1; + } + return 0; + } + + public int StopCapture() { + if(DEBUG) Log.d("*WEBRTC*", "StopCapture"); + try { + previewBufferLock.lock(); + isRunning = false; + previewBufferLock.unlock(); + + camera.stopPreview(); + + if(android.os.Build.VERSION.SDK_INT > 7) { + camera.setPreviewCallbackWithBuffer(null); + } + else { + camera.setPreviewCallback(null); + } + } + catch (Exception ex) { + Log.e("*WEBRTC*", "Failed to stop camera"); + return -1; } - camera.setPreviewCallbackWithBuffer(this); - ownsBuffers = true; - } - else { - camera.setPreviewCallback(this); - } - - camera.startPreview(); - previewBufferLock.lock(); - expectedFrameSize = bufSize; - isRunning = true; - previewBufferLock.unlock(); - } - catch (Exception ex) { - Log.e("*WEBRTC*", "Failed to start camera"); - return -1; - } - return 0; - } - - public int StopCapture() { - if(DEBUG) Log.d("*WEBRTC*", "StopCapture"); - try { - previewBufferLock.lock(); - isRunning = false; - previewBufferLock.unlock(); - - camera.stopPreview(); - - if(android.os.Build.VERSION.SDK_INT > 7) { - camera.setPreviewCallbackWithBuffer(null); - } - else { - camera.setPreviewCallback(null); - } - } - catch (Exception ex) { - Log.e("*WEBRTC*", "Failed to stop camera"); - return -1; - } - - if(DEBUG) { - Log.d("*WEBRTC*", "StopCapture ended"); - } - return 0; - } - - native void ProvideCameraFrame(byte[] data,int length, long captureObject); - - public void onPreviewFrame(byte[] data, Camera camera) { - previewBufferLock.lock(); - - if(VERBOSE) { - Log.v("*WEBRTC*", - String.format(Locale.US, "preview frame length %d context %x", - data.length, context)); - } - if(isRunning) { - // If StartCapture has been called but not StopCapture - // Call the C++ layer with the captured frame - if (data.length == expectedFrameSize) { - ProvideCameraFrame(data, expectedFrameSize, context); - if (VERBOSE) { - Log.v("*WEBRTC*", String.format(Locale.US, "frame delivered")); + if(DEBUG) { + Log.d("*WEBRTC*", "StopCapture ended"); } - if(ownsBuffers) { - // Give the video buffer to the camera service again. - camera.addCallbackBuffer(data); + return 0; + } + + native void ProvideCameraFrame(byte[] data,int length, long captureObject); + + public void onPreviewFrame(byte[] data, Camera camera) { + previewBufferLock.lock(); + + if(VERBOSE) { + Log.v("*WEBRTC*", + String.format(Locale.US, "preview frame length %d context %x", + data.length, context)); } - } + if(isRunning) { + // If StartCapture has been called but not StopCapture + // Call the C++ layer with the captured frame + if (data.length == expectedFrameSize) { + ProvideCameraFrame(data, expectedFrameSize, context); + if (VERBOSE) { + Log.v("*WEBRTC*", String.format(Locale.US, "frame delivered")); + } + if(ownsBuffers) { + // Give the video buffer to the camera service again. + camera.addCallbackBuffer(data); + } + } + } + previewBufferLock.unlock(); } - previewBufferLock.unlock(); - } - public void surfaceChanged(SurfaceHolder holder, - int format, int width, int height) { + public void surfaceChanged(SurfaceHolder holder, + int format, int width, int height) { - try { - if(camera != null) { - camera.setPreviewDisplay(localPreview); - } - } catch (IOException e) { - Log.e("*WEBRTC*", - String.format(Locale.US, - "Failed to set Local preview. " + e.getMessage())); + try { + if(camera != null) { + camera.setPreviewDisplay(localPreview); + } + } catch (IOException e) { + Log.e("*WEBRTC*", + String.format(Locale.US, + "Failed to set Local preview. " + e.getMessage())); + } } - } - // Sets the rotation of the preview render window. - // Does not affect the captured video image. - public void SetPreviewRotation(int rotation) { - if(camera != null) { - previewBufferLock.lock(); - final boolean running = isRunning; - int width = 0; - int height = 0; - int framerate = 0; + // Sets the rotation of the preview render window. + // Does not affect the captured video image. + public void SetPreviewRotation(int rotation) { + if(camera != null) { + previewBufferLock.lock(); + final boolean running = isRunning; + int width = 0; + int height = 0; + int framerate = 0; - if(running) { - width = currentCapability.width; - height = currentCapability.height; - framerate = currentCapability.maxFPS; + if(running) { + width = currentCapability.width; + height = currentCapability.height; + framerate = currentCapability.maxFPS; - StopCapture(); - } + StopCapture(); + } - int resultRotation = 0; - if(currentDevice.frontCameraType == - VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) { - // this is a 2.3 or later front facing camera. - // SetDisplayOrientation will flip the image horizontally - // before doing the rotation. - resultRotation=(360-rotation) % 360; // compensate the mirror - } - else { - // Back facing or 2.2 or previous front camera - resultRotation=rotation; - } - if(android.os.Build.VERSION.SDK_INT>7) { - camera.setDisplayOrientation(resultRotation); - } - else { - // Android 2.1 and previous - // This rotation unfortunately does not seems to work. - // http://code.google.com/p/android/issues/detail?id=1193 - Camera.Parameters parameters = camera.getParameters(); - parameters.setRotation(resultRotation); - camera.setParameters(parameters); - } + int resultRotation = 0; + if(currentDevice.frontCameraType == + VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) { + // this is a 2.3 or later front facing camera. + // SetDisplayOrientation will flip the image horizontally + // before doing the rotation. + resultRotation=(360-rotation) % 360; // compensate the mirror + } + else { + // Back facing or 2.2 or previous front camera + resultRotation=rotation; + } + if(android.os.Build.VERSION.SDK_INT>7) { + camera.setDisplayOrientation(resultRotation); + } + else { + // Android 2.1 and previous + // This rotation unfortunately does not seems to work. + // http://code.google.com/p/android/issues/detail?id=1193 + Camera.Parameters parameters = camera.getParameters(); + parameters.setRotation(resultRotation); + camera.setParameters(parameters); + } - if(running) { - StartCapture(width, height, framerate); - } - previewBufferLock.unlock(); + if(running) { + StartCapture(width, height, framerate); + } + previewBufferLock.unlock(); + } } - } - public void surfaceCreated(SurfaceHolder holder) { - } + public void surfaceCreated(SurfaceHolder holder) { + } - public void surfaceDestroyed(SurfaceHolder holder) { - } + public void surfaceDestroyed(SurfaceHolder holder) { + } } diff --git a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java index e8f63f041..2fc71b4f6 100644 --- a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java +++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java @@ -26,407 +26,406 @@ import android.util.Log; public class VideoCaptureDeviceInfoAndroid { - //Context - Context context; + //Context + Context context; - // Set this to 2 for VERBOSE logging. 1 for DEBUG - private static int LOGLEVEL = 0; - private static boolean VERBOSE = LOGLEVEL > 2; - private static boolean DEBUG = LOGLEVEL > 1; + // Set this to 2 for VERBOSE logging. 1 for DEBUG + private static int LOGLEVEL = 0; + private static boolean VERBOSE = LOGLEVEL > 2; + private static boolean DEBUG = LOGLEVEL > 1; - // Private class with info about all available cameras and the capabilities - public class AndroidVideoCaptureDevice { - AndroidVideoCaptureDevice() { - frontCameraType = FrontFacingCameraType.None; - index = 0; - } - - public String deviceUniqueName; - public CaptureCapabilityAndroid captureCapabilies[]; - public FrontFacingCameraType frontCameraType; - - // Orientation of camera as described in - // android.hardware.Camera.CameraInfo.Orientation - public int orientation; - // Camera index used in Camera.Open on Android 2.3 and onwards - public int index; - } - - public enum FrontFacingCameraType { - None, // This is not a front facing camera - GalaxyS, // Galaxy S front facing camera. - HTCEvo, // HTC Evo front facing camera - Android23, // Android 2.3 front facing camera. - } - - String currentDeviceUniqueId; - int id; - List deviceList; - - public static VideoCaptureDeviceInfoAndroid - CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) { - if(DEBUG) { - Log.d("*WEBRTC*", - String.format(Locale.US, "VideoCaptureDeviceInfoAndroid")); - } - - VideoCaptureDeviceInfoAndroid self = - new VideoCaptureDeviceInfoAndroid(in_id, in_context); - if(self != null && self.Init() == 0) { - return self; - } - else { - if(DEBUG) { - Log.d("*WEBRTC*", "Failed to create VideoCaptureDeviceInfoAndroid."); - } - } - return null; - } - - private VideoCaptureDeviceInfoAndroid(int in_id, - Context in_context) { - id = in_id; - context = in_context; - deviceList = new ArrayList(); - } - - private int Init() { - // Populate the deviceList with available cameras and their capabilities. - Camera camera = null; - try{ - if(android.os.Build.VERSION.SDK_INT > 8) { - // From Android 2.3 and onwards - for(int i = 0; i < Camera.getNumberOfCameras(); ++i) { - AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice(); - - Camera.CameraInfo info = new Camera.CameraInfo(); - Camera.getCameraInfo(i, info); - newDevice.index = i; - newDevice.orientation=info.orientation; - if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { - newDevice.deviceUniqueName = - "Camera " + i +", Facing back, Orientation "+ info.orientation; - } - else { - newDevice.deviceUniqueName = - "Camera " + i +", Facing front, Orientation "+ info.orientation; - newDevice.frontCameraType = FrontFacingCameraType.Android23; - } - - camera = Camera.open(i); - Camera.Parameters parameters = camera.getParameters(); - AddDeviceInfo(newDevice, parameters); - camera.release(); - camera = null; - deviceList.add(newDevice); + // Private class with info about all available cameras and the capabilities + public class AndroidVideoCaptureDevice { + AndroidVideoCaptureDevice() { + frontCameraType = FrontFacingCameraType.None; + index = 0; } - } - else { - // Prior to Android 2.3 - AndroidVideoCaptureDevice newDevice; - Camera.Parameters parameters; - newDevice = new AndroidVideoCaptureDevice(); - camera = Camera.open(); - parameters = camera.getParameters(); - newDevice.deviceUniqueName = "Camera 1, Facing back"; - newDevice.orientation = 90; - AddDeviceInfo(newDevice, parameters); + public String deviceUniqueName; + public CaptureCapabilityAndroid captureCapabilies[]; + public FrontFacingCameraType frontCameraType; - deviceList.add(newDevice); - camera.release(); - camera=null; + // Orientation of camera as described in + // android.hardware.Camera.CameraInfo.Orientation + public int orientation; + // Camera index used in Camera.Open on Android 2.3 and onwards + public int index; + } - newDevice = new AndroidVideoCaptureDevice(); - newDevice.deviceUniqueName = "Camera 2, Facing front"; - parameters = SearchOldFrontFacingCameras(newDevice); - if(parameters != null) { - AddDeviceInfo(newDevice, parameters); - deviceList.add(newDevice); + public enum FrontFacingCameraType { + None, // This is not a front facing camera + GalaxyS, // Galaxy S front facing camera. + HTCEvo, // HTC Evo front facing camera + Android23, // Android 2.3 front facing camera. + } + + String currentDeviceUniqueId; + int id; + List deviceList; + + public static VideoCaptureDeviceInfoAndroid + CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) { + if(DEBUG) { + Log.d("*WEBRTC*", + String.format(Locale.US, "VideoCaptureDeviceInfoAndroid")); } - } - } - catch (Exception ex) { - Log.e("*WEBRTC*", "Failed to init VideoCaptureDeviceInfo ex" + - ex.getLocalizedMessage()); - return -1; - } - VerifyCapabilities(); - return 0; - } - // Adds the capture capabilities of the currently opened device - private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice, - Camera.Parameters parameters) { - - List sizes = parameters.getSupportedPreviewSizes(); - List frameRates = parameters.getSupportedPreviewFrameRates(); - int maxFPS=0; - for(Integer frameRate:frameRates) { - if(VERBOSE) { - Log.v("*WEBRTC*", - "VideoCaptureDeviceInfoAndroid:frameRate " + frameRate); - } - if(frameRate > maxFPS) { - maxFPS = frameRate; - } - } - - newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()]; - for(int i = 0; i < sizes.size(); ++i) { - Size s = sizes.get(i); - newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid(); - newDevice.captureCapabilies[i].height = s.height; - newDevice.captureCapabilies[i].width = s.width; - newDevice.captureCapabilies[i].maxFPS = maxFPS; - } - } - - // Function that make sure device specific capabilities are - // in the capability list. - // Ie Galaxy S supports CIF but does not list CIF as a supported capability. - // Motorola Droid Camera does not work with frame rate above 15fps. - // http://code.google.com/p/android/issues/detail?id=5514#c0 - private void VerifyCapabilities() { - // Nexus S or Galaxy S - if(android.os.Build.DEVICE.equals("GT-I9000") || - android.os.Build.DEVICE.equals("crespo")) { - CaptureCapabilityAndroid specificCapability = - new CaptureCapabilityAndroid(); - specificCapability.width = 352; - specificCapability.height = 288; - specificCapability.maxFPS = 15; - AddDeviceSpecificCapability(specificCapability); - - specificCapability = new CaptureCapabilityAndroid(); - specificCapability.width = 176; - specificCapability.height = 144; - specificCapability.maxFPS = 15; - AddDeviceSpecificCapability(specificCapability); - - specificCapability = new CaptureCapabilityAndroid(); - specificCapability.width = 320; - specificCapability.height = 240; - specificCapability.maxFPS = 15; - AddDeviceSpecificCapability(specificCapability); - } - // Motorola Milestone Camera server does not work at 30fps - // even though it reports that it can - if(android.os.Build.MANUFACTURER.equals("motorola") && - android.os.Build.DEVICE.equals("umts_sholes")) { - for(AndroidVideoCaptureDevice device:deviceList) { - for(CaptureCapabilityAndroid capability:device.captureCapabilies) { - capability.maxFPS=15; + VideoCaptureDeviceInfoAndroid self = + new VideoCaptureDeviceInfoAndroid(in_id, in_context); + if(self != null && self.Init() == 0) { + return self; } - } - } - } - - private void AddDeviceSpecificCapability( - CaptureCapabilityAndroid specificCapability) { - for(AndroidVideoCaptureDevice device:deviceList) { - boolean foundCapability = false; - for(CaptureCapabilityAndroid capability:device.captureCapabilies) { - if(capability.width == specificCapability.width && - capability.height == specificCapability.height) { - foundCapability = true; - break; + else { + if(DEBUG) { + Log.d("*WEBRTC*", "Failed to create VideoCaptureDeviceInfoAndroid."); + } } - } - if(foundCapability==false) { - CaptureCapabilityAndroid newCaptureCapabilies[]= - new CaptureCapabilityAndroid[device.captureCapabilies.length+1]; - for(int i = 0; i < device.captureCapabilies.length; ++i) { - newCaptureCapabilies[i+1] = device.captureCapabilies[i]; - } - newCaptureCapabilies[0] = specificCapability; - device.captureCapabilies = newCaptureCapabilies; - } - } - } - - // Returns the number of Capture devices that is supported - public int NumberOfDevices() { - return deviceList.size(); - } - - public String GetDeviceUniqueName(int deviceNumber) { - if(deviceNumber < 0 || deviceNumber >= deviceList.size()) { - return null; - } - return deviceList.get(deviceNumber).deviceUniqueName; - } - - public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId) - { - for (AndroidVideoCaptureDevice device: deviceList) { - if(device.deviceUniqueName.equals(deviceUniqueId)) { - return (CaptureCapabilityAndroid[]) device.captureCapabilies; - } - } - return null; - } - - // Returns the camera orientation as described by - // android.hardware.Camera.CameraInfo.orientation - public int GetOrientation(String deviceUniqueId) { - for (AndroidVideoCaptureDevice device: deviceList) { - if(device.deviceUniqueName.equals(deviceUniqueId)) { - return device.orientation; - } - } - return -1; - } - - // Returns an instance of VideoCaptureAndroid. - public VideoCaptureAndroid AllocateCamera(int id, long context, - String deviceUniqueId) { - try { - if(DEBUG) Log.d("*WEBRTC*", "AllocateCamera " + deviceUniqueId); - - Camera camera = null; - AndroidVideoCaptureDevice deviceToUse = null; - for (AndroidVideoCaptureDevice device: deviceList) { - if(device.deviceUniqueName.equals(deviceUniqueId)) { - // Found the wanted camera - deviceToUse = device; - switch(device.frontCameraType) { - case GalaxyS: - camera = AllocateGalaxySFrontCamera(); - break; - case HTCEvo: - camera = AllocateEVOFrontFacingCamera(); - break; - default: - // From Android 2.3 and onwards) - if(android.os.Build.VERSION.SDK_INT>8) - camera=Camera.open(device.index); - else - camera=Camera.open(); // Default camera - } - } - } - - if(camera == null) { return null; - } - if(VERBOSE) { - Log.v("*WEBRTC*", "AllocateCamera - creating VideoCaptureAndroid"); - } - - return new VideoCaptureAndroid(id,context,camera,deviceToUse); - - }catch (Exception ex) { - Log.e("*WEBRTC*", "AllocateCamera Failed to open camera- ex " + - ex.getLocalizedMessage()); - } - return null; - } - - // Searches for a front facing camera device. This is device specific code. - private Camera.Parameters - SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice) - throws SecurityException, IllegalArgumentException, - NoSuchMethodException, ClassNotFoundException, - IllegalAccessException, InvocationTargetException { - // Check the id of the opened camera device - // Returns null on X10 and 1 on Samsung Galaxy S. - Camera camera = Camera.open(); - Camera.Parameters parameters = camera.getParameters(); - String cameraId = parameters.get("camera-id"); - if(cameraId != null && cameraId.equals("1")) { - // This might be a Samsung Galaxy S with a front facing camera. - try { - parameters.set("camera-id", 2); - camera.setParameters(parameters); - parameters = camera.getParameters(); - newDevice.frontCameraType = FrontFacingCameraType.GalaxyS; - newDevice.orientation = 0; - camera.release(); - return parameters; - } - catch (Exception ex) { - //Nope - it did not work. - Log.e("*WEBRTC*", "Init Failed to open front camera camera - ex " + - ex.getLocalizedMessage()); - } - } - camera.release(); - - //Check for Evo front facing camera - File file = - new File("/system/framework/com.htc.hardware.twinCamDevice.jar"); - boolean exists = file.exists(); - if (!exists){ - file = - new File("/system/framework/com.sprint.hardware.twinCamDevice.jar"); - exists = file.exists(); - } - if(exists) { - newDevice.frontCameraType = FrontFacingCameraType.HTCEvo; - newDevice.orientation = 0; - Camera evCamera = AllocateEVOFrontFacingCamera(); - parameters = evCamera.getParameters(); - evCamera.release(); - return parameters; - } - return null; - } - - // Returns a handle to HTC front facing camera. - // The caller is responsible to release it on completion. - private Camera AllocateEVOFrontFacingCamera() - throws SecurityException, NoSuchMethodException, - ClassNotFoundException, IllegalArgumentException, - IllegalAccessException, InvocationTargetException { - String classPath = null; - File file = - new File("/system/framework/com.htc.hardware.twinCamDevice.jar"); - classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera"; - boolean exists = file.exists(); - if (!exists){ - file = - new File("/system/framework/com.sprint.hardware.twinCamDevice.jar"); - classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera"; - exists = file.exists(); - } - if(!exists) { - return null; } - String dexOutputDir = ""; - if(context != null) { - dexOutputDir = context.getFilesDir().getAbsolutePath(); - File mFilesDir = new File(dexOutputDir, "dexfiles"); - if(!mFilesDir.exists()){ - //Log.e("*WEBRTCN*", "Directory doesn't exists"); - if(!mFilesDir.mkdirs()) { - //Log.e("*WEBRTCN*", "Unable to create files directory"); + private VideoCaptureDeviceInfoAndroid(int in_id, + Context in_context) { + id = in_id; + context = in_context; + deviceList = new ArrayList(); + } + + private int Init() { + // Populate the deviceList with available cameras and their capabilities. + Camera camera = null; + try{ + if(android.os.Build.VERSION.SDK_INT > 8) { + // From Android 2.3 and onwards + for(int i = 0; i < Camera.getNumberOfCameras(); ++i) { + AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice(); + + Camera.CameraInfo info = new Camera.CameraInfo(); + Camera.getCameraInfo(i, info); + newDevice.index = i; + newDevice.orientation=info.orientation; + if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { + newDevice.deviceUniqueName = + "Camera " + i +", Facing back, Orientation "+ info.orientation; + } + else { + newDevice.deviceUniqueName = + "Camera " + i +", Facing front, Orientation "+ info.orientation; + newDevice.frontCameraType = FrontFacingCameraType.Android23; + } + + camera = Camera.open(i); + Camera.Parameters parameters = camera.getParameters(); + AddDeviceInfo(newDevice, parameters); + camera.release(); + camera = null; + deviceList.add(newDevice); + } + } + else { + // Prior to Android 2.3 + AndroidVideoCaptureDevice newDevice; + Camera.Parameters parameters; + + newDevice = new AndroidVideoCaptureDevice(); + camera = Camera.open(); + parameters = camera.getParameters(); + newDevice.deviceUniqueName = "Camera 1, Facing back"; + newDevice.orientation = 90; + AddDeviceInfo(newDevice, parameters); + + deviceList.add(newDevice); + camera.release(); + camera=null; + + newDevice = new AndroidVideoCaptureDevice(); + newDevice.deviceUniqueName = "Camera 2, Facing front"; + parameters = SearchOldFrontFacingCameras(newDevice); + if(parameters != null) { + AddDeviceInfo(newDevice, parameters); + deviceList.add(newDevice); + } + } } - } + catch (Exception ex) { + Log.e("*WEBRTC*", "Failed to init VideoCaptureDeviceInfo ex" + + ex.getLocalizedMessage()); + return -1; + } + VerifyCapabilities(); + return 0; } - dexOutputDir += "/dexfiles"; + // Adds the capture capabilities of the currently opened device + private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice, + Camera.Parameters parameters) { - DexClassLoader loader = - new DexClassLoader(file.getAbsolutePath(), dexOutputDir, - null, ClassLoader.getSystemClassLoader()); + List sizes = parameters.getSupportedPreviewSizes(); + List frameRates = parameters.getSupportedPreviewFrameRates(); + int maxFPS=0; + for(Integer frameRate:frameRates) { + if(VERBOSE) { + Log.v("*WEBRTC*", + "VideoCaptureDeviceInfoAndroid:frameRate " + frameRate); + } + if(frameRate > maxFPS) { + maxFPS = frameRate; + } + } - Method method = loader.loadClass(classPath).getDeclaredMethod( - "getFrontFacingCamera", (Class[]) null); - Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null); - return camera; - } + newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()]; + for(int i = 0; i < sizes.size(); ++i) { + Size s = sizes.get(i); + newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid(); + newDevice.captureCapabilies[i].height = s.height; + newDevice.captureCapabilies[i].width = s.width; + newDevice.captureCapabilies[i].maxFPS = maxFPS; + } + } - // Returns a handle to Galaxy S front camera. - // The caller is responsible to release it on completion. - private Camera AllocateGalaxySFrontCamera() - { - Camera camera = Camera.open(); - Camera.Parameters parameters = camera.getParameters(); - parameters.set("camera-id",2); - camera.setParameters(parameters); - return camera; - } + // Function that make sure device specific capabilities are + // in the capability list. + // Ie Galaxy S supports CIF but does not list CIF as a supported capability. + // Motorola Droid Camera does not work with frame rate above 15fps. + // http://code.google.com/p/android/issues/detail?id=5514#c0 + private void VerifyCapabilities() { + // Nexus S or Galaxy S + if(android.os.Build.DEVICE.equals("GT-I9000") || + android.os.Build.DEVICE.equals("crespo")) { + CaptureCapabilityAndroid specificCapability = + new CaptureCapabilityAndroid(); + specificCapability.width = 352; + specificCapability.height = 288; + specificCapability.maxFPS = 15; + AddDeviceSpecificCapability(specificCapability); + + specificCapability = new CaptureCapabilityAndroid(); + specificCapability.width = 176; + specificCapability.height = 144; + specificCapability.maxFPS = 15; + AddDeviceSpecificCapability(specificCapability); + + specificCapability = new CaptureCapabilityAndroid(); + specificCapability.width = 320; + specificCapability.height = 240; + specificCapability.maxFPS = 15; + AddDeviceSpecificCapability(specificCapability); + } + // Motorola Milestone Camera server does not work at 30fps + // even though it reports that it can + if(android.os.Build.MANUFACTURER.equals("motorola") && + android.os.Build.DEVICE.equals("umts_sholes")) { + for(AndroidVideoCaptureDevice device:deviceList) { + for(CaptureCapabilityAndroid capability:device.captureCapabilies) { + capability.maxFPS=15; + } + } + } + } + + private void AddDeviceSpecificCapability( + CaptureCapabilityAndroid specificCapability) { + for(AndroidVideoCaptureDevice device:deviceList) { + boolean foundCapability = false; + for(CaptureCapabilityAndroid capability:device.captureCapabilies) { + if(capability.width == specificCapability.width && + capability.height == specificCapability.height) { + foundCapability = true; + break; + } + } + if(foundCapability==false) { + CaptureCapabilityAndroid newCaptureCapabilies[]= + new CaptureCapabilityAndroid[device.captureCapabilies.length+1]; + for(int i = 0; i < device.captureCapabilies.length; ++i) { + newCaptureCapabilies[i+1] = device.captureCapabilies[i]; + } + newCaptureCapabilies[0] = specificCapability; + device.captureCapabilies = newCaptureCapabilies; + } + } + } + + // Returns the number of Capture devices that is supported + public int NumberOfDevices() { + return deviceList.size(); + } + + public String GetDeviceUniqueName(int deviceNumber) { + if(deviceNumber < 0 || deviceNumber >= deviceList.size()) { + return null; + } + return deviceList.get(deviceNumber).deviceUniqueName; + } + + public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId) + { + for (AndroidVideoCaptureDevice device: deviceList) { + if(device.deviceUniqueName.equals(deviceUniqueId)) { + return (CaptureCapabilityAndroid[]) device.captureCapabilies; + } + } + return null; + } + + // Returns the camera orientation as described by + // android.hardware.Camera.CameraInfo.orientation + public int GetOrientation(String deviceUniqueId) { + for (AndroidVideoCaptureDevice device: deviceList) { + if(device.deviceUniqueName.equals(deviceUniqueId)) { + return device.orientation; + } + } + return -1; + } + + // Returns an instance of VideoCaptureAndroid. + public VideoCaptureAndroid AllocateCamera(int id, long context, + String deviceUniqueId) { + try { + if(DEBUG) Log.d("*WEBRTC*", "AllocateCamera " + deviceUniqueId); + + Camera camera = null; + AndroidVideoCaptureDevice deviceToUse = null; + for (AndroidVideoCaptureDevice device: deviceList) { + if(device.deviceUniqueName.equals(deviceUniqueId)) { + // Found the wanted camera + deviceToUse = device; + switch(device.frontCameraType) { + case GalaxyS: + camera = AllocateGalaxySFrontCamera(); + break; + case HTCEvo: + camera = AllocateEVOFrontFacingCamera(); + break; + default: + // From Android 2.3 and onwards) + if(android.os.Build.VERSION.SDK_INT>8) + camera=Camera.open(device.index); + else + camera=Camera.open(); // Default camera + } + } + } + + if(camera == null) { + return null; + } + if(VERBOSE) { + Log.v("*WEBRTC*", "AllocateCamera - creating VideoCaptureAndroid"); + } + + return new VideoCaptureAndroid(id,context,camera,deviceToUse); + + }catch (Exception ex) { + Log.e("*WEBRTC*", "AllocateCamera Failed to open camera- ex " + + ex.getLocalizedMessage()); + } + return null; + } + + // Searches for a front facing camera device. This is device specific code. + private Camera.Parameters + SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice) + throws SecurityException, IllegalArgumentException, + NoSuchMethodException, ClassNotFoundException, + IllegalAccessException, InvocationTargetException { + // Check the id of the opened camera device + // Returns null on X10 and 1 on Samsung Galaxy S. + Camera camera = Camera.open(); + Camera.Parameters parameters = camera.getParameters(); + String cameraId = parameters.get("camera-id"); + if(cameraId != null && cameraId.equals("1")) { + // This might be a Samsung Galaxy S with a front facing camera. + try { + parameters.set("camera-id", 2); + camera.setParameters(parameters); + parameters = camera.getParameters(); + newDevice.frontCameraType = FrontFacingCameraType.GalaxyS; + newDevice.orientation = 0; + camera.release(); + return parameters; + } + catch (Exception ex) { + //Nope - it did not work. + Log.e("*WEBRTC*", "Init Failed to open front camera camera - ex " + + ex.getLocalizedMessage()); + } + } + camera.release(); + + //Check for Evo front facing camera + File file = + new File("/system/framework/com.htc.hardware.twinCamDevice.jar"); + boolean exists = file.exists(); + if (!exists) { + file = + new File("/system/framework/com.sprint.hardware.twinCamDevice.jar"); + exists = file.exists(); + } + if(exists) { + newDevice.frontCameraType = FrontFacingCameraType.HTCEvo; + newDevice.orientation = 0; + Camera evCamera = AllocateEVOFrontFacingCamera(); + parameters = evCamera.getParameters(); + evCamera.release(); + return parameters; + } + return null; + } + + // Returns a handle to HTC front facing camera. + // The caller is responsible to release it on completion. + private Camera AllocateEVOFrontFacingCamera() + throws SecurityException, NoSuchMethodException, + ClassNotFoundException, IllegalArgumentException, + IllegalAccessException, InvocationTargetException { + String classPath = null; + File file = + new File("/system/framework/com.htc.hardware.twinCamDevice.jar"); + classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera"; + boolean exists = file.exists(); + if (!exists){ + file = + new File("/system/framework/com.sprint.hardware.twinCamDevice.jar"); + classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera"; + exists = file.exists(); + } + if(!exists) { + return null; + } + + String dexOutputDir = ""; + if(context != null) { + dexOutputDir = context.getFilesDir().getAbsolutePath(); + File mFilesDir = new File(dexOutputDir, "dexfiles"); + if(!mFilesDir.exists()){ + //Log.e("*WEBRTCN*", "Directory doesn't exists"); + if(!mFilesDir.mkdirs()) { + //Log.e("*WEBRTCN*", "Unable to create files directory"); + } + } + } + + dexOutputDir += "/dexfiles"; + + DexClassLoader loader = + new DexClassLoader(file.getAbsolutePath(), dexOutputDir, + null, ClassLoader.getSystemClassLoader()); + + Method method = loader.loadClass(classPath).getDeclaredMethod( + "getFrontFacingCamera", (Class[]) null); + Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null); + return camera; + } + + // Returns a handle to Galaxy S front camera. + // The caller is responsible to release it on completion. + private Camera AllocateGalaxySFrontCamera() { + Camera camera = Camera.open(); + Camera.Parameters parameters = camera.getParameters(); + parameters.set("camera-id",2); + camera.setParameters(parameters); + return camera; + } } diff --git a/src/modules/video_capture/main/source/android/video_capture_android.cc b/src/modules/video_capture/main/source/android/video_capture_android.cc index f73837dc9..3e427ddff 100644 --- a/src/modules/video_capture/main/source/android/video_capture_android.cc +++ b/src/modules/video_capture/main/source/android/video_capture_android.cc @@ -15,24 +15,24 @@ #include "critical_section_wrapper.h" #include "ref_count.h" #include "trace.h" + namespace webrtc { namespace videocapturemodule { + VideoCaptureModule* VideoCaptureImpl::Create( const WebRtc_Word32 id, - const char* deviceUniqueIdUTF8) -{ + const char* deviceUniqueIdUTF8) { - RefCountImpl* implementation = - new RefCountImpl(id); + RefCountImpl* implementation = + new RefCountImpl(id); - if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0) - { - delete implementation; - implementation = NULL; - } - return implementation; + if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0) { + delete implementation; + implementation = NULL; + } + return implementation; } // Android logging, uncomment to print trace to @@ -55,210 +55,190 @@ jobject VideoCaptureAndroid::g_javaContext = NULL; * Register references to Java Capture class. */ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, - void* javaContext) -{ + void* javaContext) { - g_jvm = static_cast (javaVM); - g_javaContext = static_cast (javaContext); + g_jvm = static_cast (javaVM); + g_javaContext = static_cast (javaContext); - if (javaVM) - { - JNIEnv* env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not get Java environment", __FUNCTION__); - return -1; - } - // get java capture class type (note path to class packet) - jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass); - if (!javaCmClassLocal) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not find java class", __FUNCTION__); - return -1; - } - // create a global reference to the class - // (to tell JNI that we are referencing it - // after this function has returned) - g_javaCmClass = static_cast - (env->NewGlobalRef(javaCmClassLocal)); - if (!g_javaCmClass) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: InitVideoEngineJava(): could not create" - " Java Camera class reference", - __FUNCTION__); - return -1; - } - // Delete local class ref, we only use the global ref - env->DeleteLocalRef(javaCmClassLocal); - JNINativeMethod nativeFunctions = { "ProvideCameraFrame", "([BIJ)V", - (void*) &VideoCaptureAndroid::ProvideCameraFrame }; - if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) - { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Registered native functions", __FUNCTION__); - } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to register native functions", - __FUNCTION__); - return -1; - } - - // get java capture class type (note path to class packet) - jclass javaCmDevInfoClassLocal = env->FindClass( - AndroidJavaCaptureDeviceInfoClass); - if (!javaCmDevInfoClassLocal) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not find java class", __FUNCTION__); - return -1; - } - - // create a global reference to the class - // (to tell JNI that we are referencing it - // after this function has returned) - g_javaCmDevInfoClass = static_cast - (env->NewGlobalRef(javaCmDevInfoClassLocal)); - if (!g_javaCmDevInfoClass) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: InitVideoEngineJava(): could not create Java " - "Camera Device info class reference", - __FUNCTION__); - return -1; - } - // Delete local class ref, we only use the global ref - env->DeleteLocalRef(javaCmDevInfoClassLocal); - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "VideoCaptureDeviceInfoAndroid get method id"); - - // get the method ID for the Android Java CaptureClass static - //CreateVideoCaptureAndroid factory method. - jmethodID cid = env->GetStaticMethodID( - g_javaCmDevInfoClass, - "CreateVideoCaptureDeviceInfoAndroid", - "(ILandroid/content/Context;)" - "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;"); - if (cid == NULL) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not get java" - "VideoCaptureDeviceInfoAndroid constructor ID", - __FUNCTION__); - return -1; - } - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: construct static java device object", __FUNCTION__); - - // construct the object by calling the static constructor object - jobject javaCameraDeviceInfoObjLocal = - env->CallStaticObjectMethod(g_javaCmDevInfoClass, - cid, (int) -1, - g_javaContext); - if (!javaCameraDeviceInfoObjLocal) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, - "%s: could not create Java Capture Device info object", - __FUNCTION__); - return -1; - } - // create a reference to the object (to tell JNI that - // we are referencing it after this function has returned) - g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal); - if (!g_javaCmDevInfoObject) - { - WEBRTC_TRACE(webrtc::kTraceError, - webrtc::kTraceAudioDevice, - -1, - "%s: could not create Java" - "cameradevinceinfo object reference", - __FUNCTION__); - return -1; - } - // Delete local object ref, we only use the global ref - env->DeleteLocalRef(javaCameraDeviceInfoObjLocal); - return 0; + if (javaVM) { + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: could not get Java environment", __FUNCTION__); + return -1; } - else - { - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: JVM is NULL, assuming deinit", __FUNCTION__); - if (!g_jvm) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: SetAndroidObjects not called with a valid JVM.", - __FUNCTION__); - return -1; - } - JNIEnv* env = NULL; - bool attached = false; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - -1, "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - attached = true; - } - env->DeleteGlobalRef(g_javaCmDevInfoObject); - env->DeleteGlobalRef(g_javaCmDevInfoClass); - env->DeleteGlobalRef(g_javaCmClass); - if (attached && g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, - "%s: Could not detach thread from JVM", __FUNCTION__); - return -1; - } - return 0; - env = (JNIEnv *) NULL; + // get java capture class type (note path to class packet) + jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass); + if (!javaCmClassLocal) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: could not find java class", __FUNCTION__); + return -1; + } + // create a global reference to the class + // (to tell JNI that we are referencing it + // after this function has returned) + g_javaCmClass = static_cast + (env->NewGlobalRef(javaCmClassLocal)); + if (!g_javaCmClass) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: InitVideoEngineJava(): could not create" + " Java Camera class reference", + __FUNCTION__); + return -1; + } + // Delete local class ref, we only use the global ref + env->DeleteLocalRef(javaCmClassLocal); + JNINativeMethod nativeFunctions = + { "ProvideCameraFrame", "([BIJ)V", + (void*) &VideoCaptureAndroid::ProvideCameraFrame }; + if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) { + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "%s: Registered native functions", __FUNCTION__); + } + else { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to register native functions", + __FUNCTION__); + return -1; + } + + // get java capture class type (note path to class packet) + jclass javaCmDevInfoClassLocal = env->FindClass( + AndroidJavaCaptureDeviceInfoClass); + if (!javaCmDevInfoClassLocal) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: could not find java class", __FUNCTION__); + return -1; + } + + // create a global reference to the class + // (to tell JNI that we are referencing it + // after this function has returned) + g_javaCmDevInfoClass = static_cast + (env->NewGlobalRef(javaCmDevInfoClassLocal)); + if (!g_javaCmDevInfoClass) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: InitVideoEngineJava(): could not create Java " + "Camera Device info class reference", + __FUNCTION__); + return -1; + } + // Delete local class ref, we only use the global ref + env->DeleteLocalRef(javaCmDevInfoClassLocal); + + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "VideoCaptureDeviceInfoAndroid get method id"); + + // get the method ID for the Android Java CaptureClass static + //CreateVideoCaptureAndroid factory method. + jmethodID cid = env->GetStaticMethodID( + g_javaCmDevInfoClass, + "CreateVideoCaptureDeviceInfoAndroid", + "(ILandroid/content/Context;)" + "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;"); + if (cid == NULL) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: could not get java" + "VideoCaptureDeviceInfoAndroid constructor ID", + __FUNCTION__); + return -1; + } + + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "%s: construct static java device object", __FUNCTION__); + + // construct the object by calling the static constructor object + jobject javaCameraDeviceInfoObjLocal = + env->CallStaticObjectMethod(g_javaCmDevInfoClass, + cid, (int) -1, + g_javaContext); + if (!javaCameraDeviceInfoObjLocal) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, + "%s: could not create Java Capture Device info object", + __FUNCTION__); + return -1; + } + // create a reference to the object (to tell JNI that + // we are referencing it after this function has returned) + g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal); + if (!g_javaCmDevInfoObject) { + WEBRTC_TRACE(webrtc::kTraceError, + webrtc::kTraceAudioDevice, + -1, + "%s: could not create Java" + "cameradevinceinfo object reference", + __FUNCTION__); + return -1; + } + // Delete local object ref, we only use the global ref + env->DeleteLocalRef(javaCameraDeviceInfoObjLocal); + return 0; + } + else { + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: JVM is NULL, assuming deinit", __FUNCTION__); + if (!g_jvm) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: SetAndroidObjects not called with a valid JVM.", + __FUNCTION__); + return -1; + } + JNIEnv* env = NULL; + bool attached = false; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, + -1, "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + attached = true; + } + env->DeleteGlobalRef(g_javaCmDevInfoObject); + env->DeleteGlobalRef(g_javaCmDevInfoClass); + env->DeleteGlobalRef(g_javaCmClass); + if (attached && g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1, + "%s: Could not detach thread from JVM", __FUNCTION__); + return -1; } return 0; + env = (JNIEnv *) NULL; + } + return 0; } WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects( JNIEnv*& env, jclass& javaCmDevInfoClass, jobject& javaCmDevInfoObject, - bool& attached) -{ - // get the JNI env for this thread - if (!g_jvm) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: SetAndroidObjects not called with a valid JVM.", - __FUNCTION__); - return -1; + bool& attached) { + // get the JNI env for this thread + if (!g_jvm) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: SetAndroidObjects not called with a valid JVM.", + __FUNCTION__); + return -1; + } + attached = false; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; } - attached = false; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - attached = true; - } - javaCmDevInfoClass = g_javaCmDevInfoClass; - javaCmDevInfoObject = g_javaCmDevInfoObject; - return 0; + attached = true; + } + javaCmDevInfoClass = g_javaCmDevInfoClass; + javaCmDevInfoObject = g_javaCmDevInfoObject; + return 0; } @@ -280,31 +260,29 @@ WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects( * Signature: ([BIJ)V */ void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env, - jobject, - jbyteArray javaCameraFrame, - jint length, - jlong context) -{ - VideoCaptureAndroid* captureModule = - reinterpret_cast(context); - WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, - -1, "%s: IncomingFrame %d", __FUNCTION__,length); - jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL); - captureModule->IncomingFrame((WebRtc_UWord8*) cameraFrame, - length,captureModule->_frameInfo,0); - env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT); + jobject, + jbyteArray javaCameraFrame, + jint length, + jlong context) { + VideoCaptureAndroid* captureModule = + reinterpret_cast(context); + WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, + -1, "%s: IncomingFrame %d", __FUNCTION__,length); + jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL); + captureModule->IncomingFrame((WebRtc_UWord8*) cameraFrame, + length,captureModule->_frameInfo,0); + env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT); } VideoCaptureAndroid::VideoCaptureAndroid(const WebRtc_Word32 id) : VideoCaptureImpl(id), _capInfo(id), _javaCaptureObj(NULL), - _captureStarted(false) - -{ - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: context %x", __FUNCTION__, (int) this); + _captureStarted(false) { + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "%s: context %x", __FUNCTION__, (int) this); } + // ---------------------------------------------------------------------------- // Init // @@ -312,411 +290,365 @@ VideoCaptureAndroid::VideoCaptureAndroid(const WebRtc_Word32 id) // VideoCaptureAndroid.java // ---------------------------------------------------------------------------- WebRtc_Word32 VideoCaptureAndroid::Init(const WebRtc_Word32 id, - const char* deviceUniqueIdUTF8) -{ - const int nameLength = strlen(deviceUniqueIdUTF8); - if (nameLength >= kVideoCaptureUniqueNameLength) - { - return -1; - } + const char* deviceUniqueIdUTF8) { + const int nameLength = strlen(deviceUniqueIdUTF8); + if (nameLength >= kVideoCaptureUniqueNameLength) { + return -1; + } - // Store the device name - _deviceUniqueId = new char[nameLength + 1]; - memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1); + // Store the device name + _deviceUniqueId = new char[nameLength + 1]; + memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1); - if (_capInfo.Init() != 0) - { - WEBRTC_TRACE(webrtc::kTraceError, - webrtc::kTraceVideoCapture, - _id, - "%s: Failed to initialize CaptureDeviceInfo", - __FUNCTION__); - return -1; - } - - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", + if (_capInfo.Init() != 0) { + WEBRTC_TRACE(webrtc::kTraceError, + webrtc::kTraceVideoCapture, + _id, + "%s: Failed to initialize CaptureDeviceInfo", __FUNCTION__); - // use the jvm that has been set - if (!g_jvm) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Not a valid Java VM pointer", __FUNCTION__); - return -1; + return -1; + } + + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", + __FUNCTION__); + // use the jvm that has been set + if (!g_jvm) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Not a valid Java VM pointer", __FUNCTION__); + return -1; + } + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + + // get the JNI env for this thread + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; } - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; + isAttached = true; + } - // get the JNI env for this thread - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - isAttached = true; + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, + "get method id"); + + // get the method ID for the Android Java + // CaptureDeviceInfoClass AllocateCamera factory method. + char signature[256]; + sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass); + + jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera", + signature); + if (cid == NULL) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: could not get constructor ID", __FUNCTION__); + return -1; /* exception thrown */ + } + + jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); + // construct the object by calling the static constructor object + jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject, + cid, (jint) id, + (jlong) this, + capureIdString); + if (!javaCameraObjLocal) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, + "%s: could not create Java Capture object", __FUNCTION__); + return -1; + } + + // create a reference to the object (to tell JNI that we are referencing it + // after this function has returned) + _javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal); + if (!_javaCaptureObj) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id, + "%s: could not create Java camera object reference", + __FUNCTION__); + return -1; + } + + // Delete local object ref, we only use the global ref + env->DeleteLocalRef(javaCameraObjLocal); + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); } + } - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, - "get method id"); - - // get the method ID for the Android Java - // CaptureDeviceInfoClass AllocateCamera factory method. - char signature[256]; - sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass); - - jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera", - signature); - if (cid == NULL) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: could not get constructor ID", __FUNCTION__); - return -1; /* exception thrown */ - } - - jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8); - // construct the object by calling the static constructor object - jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject, - cid, (jint) id, - (jlong) this, - capureIdString); - if (!javaCameraObjLocal) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id, - "%s: could not create Java Capture object", __FUNCTION__); - return -1; - } - - // create a reference to the object (to tell JNI that we are referencing it - // after this function has returned) - _javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal); - if (!_javaCaptureObj) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id, - "%s: could not create Java camera object reference", - __FUNCTION__); - return -1; - } - - // Delete local object ref, we only use the global ref - env->DeleteLocalRef(javaCameraObjLocal); - - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - - return 0; + return 0; } -VideoCaptureAndroid::~VideoCaptureAndroid() -{ - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", - __FUNCTION__); - if (_javaCaptureObj == NULL || g_jvm == NULL) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Nothing to clean", __FUNCTION__); +VideoCaptureAndroid::~VideoCaptureAndroid() { + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:", + __FUNCTION__); + if (_javaCaptureObj == NULL || g_jvm == NULL) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Nothing to clean", __FUNCTION__); + } + else { + bool isAttached = false; + // get the JNI env for this thread + JNIEnv *env; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + } + else { + isAttached = true; + } } - else - { - bool isAttached = false; - // get the JNI env for this thread - JNIEnv *env; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else - { - isAttached = true; - } - } - // get the method ID for the Android Java CaptureClass static - // DeleteVideoCaptureAndroid method. Call this to release the camera so - // another application can use it. - jmethodID cid = env->GetStaticMethodID( - g_javaCmClass, - "DeleteVideoCaptureAndroid", - "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); - if (cid != NULL) - { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__); - // Close the camera by calling the static destruct function. - env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj); + // get the method ID for the Android Java CaptureClass static + // DeleteVideoCaptureAndroid method. Call this to release the camera so + // another application can use it. + jmethodID cid = env->GetStaticMethodID( + g_javaCmClass, + "DeleteVideoCaptureAndroid", + "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V"); + if (cid != NULL) { + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__); + // Close the camera by calling the static destruct function. + env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj); - // Delete global object ref to the camera. - env->DeleteGlobalRef(_javaCaptureObj); - _javaCaptureObj = NULL; - } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find DeleteVideoCaptureAndroid id", - __FUNCTION__); - } - - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, - _id, "%s: Could not detach thread from JVM", - __FUNCTION__); - } - } + // Delete global object ref to the camera. + env->DeleteGlobalRef(_javaCaptureObj); + _javaCaptureObj = NULL; } + else { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to find DeleteVideoCaptureAndroid id", + __FUNCTION__); + } + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, + _id, "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + } } WebRtc_Word32 VideoCaptureAndroid::StartCapture( - const VideoCaptureCapability& capability) -{ - CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); + const VideoCaptureCapability& capability) { + CriticalSectionScoped cs(&_apiCs); + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: ", __FUNCTION__); - bool isAttached = false; - WebRtc_Word32 result = 0; - // get the JNI env for this thread - JNIEnv *env; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else - { - isAttached = true; - } + bool isAttached = false; + WebRtc_Word32 result = 0; + // get the JNI env for this thread + JNIEnv *env; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); } - - if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability, - _frameInfo) < 0) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: GetBestMatchedCapability failed. Req cap w%d h%d", - __FUNCTION__, capability.width, capability.height); - return -1; + else { + isAttached = true; } + } - // Store the new expected capture delay - _captureDelay = _frameInfo.expectedCaptureDelay; + if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability, + _frameInfo) < 0) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: GetBestMatchedCapability failed. Req cap w%d h%d", + __FUNCTION__, capability.width, capability.height); + return -1; + } + // Store the new expected capture delay + _captureDelay = _frameInfo.expectedCaptureDelay; + + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width, + _frameInfo.height); + + // get the method ID for the Android Java + // CaptureClass static StartCapture method. + jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I"); + if (cid != NULL) { WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width, - _frameInfo.height); + "%s: Call StartCapture", __FUNCTION__); + // Close the camera by calling the static destruct function. + result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width, + _frameInfo.height, _frameInfo.maxFPS); + } + else { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to find StartCapture id", __FUNCTION__); + } - // get the method ID for the Android Java - // CaptureClass static StartCapture method. - jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I"); - if (cid != NULL) - { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Call StartCapture", __FUNCTION__); - // Close the camera by calling the static destruct function. - result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width, - _frameInfo.height, _frameInfo.maxFPS); + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find StartCapture id", __FUNCTION__); - } - - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - if (result == 0) - { - _requestedCapability = capability; - _captureStarted = true; - } - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: result %d", __FUNCTION__, result); - return result; -} -WebRtc_Word32 VideoCaptureAndroid::StopCapture() -{ - CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); - - bool isAttached = false; - WebRtc_Word32 result = 0; - // get the JNI env for this thread - JNIEnv *env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - } - else - { - isAttached = true; - } - } - - memset(&_requestedCapability, 0, sizeof(_requestedCapability)); - memset(&_frameInfo, 0, sizeof(_frameInfo)); - - // get the method ID for the Android Java CaptureClass StopCapture method. - jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I"); - if (cid != NULL) - { - WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, - "%s: Call StopCapture", __FUNCTION__); - // Close the camera by calling the static destruct function. - result = env->CallIntMethod(_javaCaptureObj, cid); - } - else - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: Failed to find StopCapture id", __FUNCTION__); - } - - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } - _captureStarted = false; - - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: result %d", __FUNCTION__, result); - return result; + } + if (result == 0) { + _requestedCapability = capability; + _captureStarted = true; + } + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: result %d", __FUNCTION__, result); + return result; } -bool VideoCaptureAndroid::CaptureStarted() -{ - CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); - return _captureStarted; +WebRtc_Word32 VideoCaptureAndroid::StopCapture() { + CriticalSectionScoped cs(&_apiCs); + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: ", __FUNCTION__); + + bool isAttached = false; + WebRtc_Word32 result = 0; + // get the JNI env for this thread + JNIEnv *env = NULL; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + } + else { + isAttached = true; + } + } + + memset(&_requestedCapability, 0, sizeof(_requestedCapability)); + memset(&_frameInfo, 0, sizeof(_frameInfo)); + + // get the method ID for the Android Java CaptureClass StopCapture method. + jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I"); + if (cid != NULL) { + WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, + "%s: Call StopCapture", __FUNCTION__); + // Close the camera by calling the static destruct function. + result = env->CallIntMethod(_javaCaptureObj, cid); + } + else { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: Failed to find StopCapture id", __FUNCTION__); + } + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + _captureStarted = false; + + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: result %d", __FUNCTION__, result); + return result; } + +bool VideoCaptureAndroid::CaptureStarted() { + CriticalSectionScoped cs(&_apiCs); + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: ", __FUNCTION__); + return _captureStarted; +} + WebRtc_Word32 VideoCaptureAndroid::CaptureSettings( - VideoCaptureCapability& settings) -{ - CriticalSectionScoped cs(&_apiCs); - WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, - "%s: ", __FUNCTION__); - settings = _requestedCapability; - return 0; + VideoCaptureCapability& settings) { + CriticalSectionScoped cs(&_apiCs); + WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1, + "%s: ", __FUNCTION__); + settings = _requestedCapability; + return 0; } WebRtc_Word32 VideoCaptureAndroid::SetCaptureRotation( - VideoCaptureRotation rotation) -{ - CriticalSectionScoped cs(&_apiCs); - if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) - { - if (!g_jvm) - return -1; + VideoCaptureRotation rotation) { + CriticalSectionScoped cs(&_apiCs); + if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) { + if (!g_jvm) + return -1; - // get the JNI env for this thread - JNIEnv *env; - bool isAttached = false; - - // get the JNI env for this thread - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - if ((res < 0) || !env) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, - _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } - isAttached = true; - } - - jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation", - "(I)V"); - if (cid == NULL) - { - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, - "%s: could not get java SetPreviewRotation ID", - __FUNCTION__); - return -1; - } - jint rotateFrame = 0; - switch (rotation) - { - case kCameraRotate0: - rotateFrame = 0; - break; - case kCameraRotate90: - rotateFrame = 90; - break; - case kCameraRotate180: - rotateFrame = 180; - break; - case kCameraRotate270: - rotateFrame = 270; - break; - } - env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame); - - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, - _id, "%s: Could not detach thread from JVM", - __FUNCTION__); - } - } + // get the JNI env for this thread + JNIEnv *env; + bool isAttached = false; + // get the JNI env for this thread + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + if ((res < 0) || !env) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, + _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; + } + isAttached = true; } - return 0; + + jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation", + "(I)V"); + if (cid == NULL) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: could not get java SetPreviewRotation ID", + __FUNCTION__); + return -1; + } + jint rotateFrame = 0; + switch (rotation) { + case kCameraRotate0: + rotateFrame = 0; + break; + case kCameraRotate90: + rotateFrame = 90; + break; + case kCameraRotate180: + rotateFrame = 180; + break; + case kCameraRotate270: + rotateFrame = 270; + break; + } + env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame); + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, + _id, "%s: Could not detach thread from JVM", + __FUNCTION__); + } + } + + } + return 0; } -} //namespace videocapturemodule -} //namespace webrtc + +} // namespace videocapturemodule +} // namespace webrtc diff --git a/src/modules/video_capture/main/source/android/video_capture_android.h b/src/modules/video_capture/main/source/android/video_capture_android.h index fc72323bc..3fd7e64b1 100644 --- a/src/modules/video_capture/main/source/android/video_capture_android.h +++ b/src/modules/video_capture/main/source/android/video_capture_android.h @@ -17,50 +17,50 @@ #define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid" -namespace webrtc -{ -namespace videocapturemodule -{ -class VideoCaptureAndroid: public VideoCaptureImpl -{ -public: - static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext); - static WebRtc_Word32 AttachAndUseAndroidDeviceInfoObjects(JNIEnv*& env, - jclass& javaCmDevInfoClass, - jobject& javaCmDevInfoObject, - bool& attached); - static WebRtc_Word32 ReleaseAndroidDeviceInfoObjects(bool attached); +namespace webrtc { +namespace videocapturemodule { - VideoCaptureAndroid(const WebRtc_Word32 id); - virtual WebRtc_Word32 Init(const WebRtc_Word32 id, - const char* deviceUniqueIdUTF8); +class VideoCaptureAndroid : public VideoCaptureImpl { + public: + static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext); + static WebRtc_Word32 AttachAndUseAndroidDeviceInfoObjects( + JNIEnv*& env, + jclass& javaCmDevInfoClass, + jobject& javaCmDevInfoObject, + bool& attached); + static WebRtc_Word32 ReleaseAndroidDeviceInfoObjects(bool attached); + + VideoCaptureAndroid(const WebRtc_Word32 id); + virtual WebRtc_Word32 Init(const WebRtc_Word32 id, + const char* deviceUniqueIdUTF8); - virtual WebRtc_Word32 StartCapture( - const VideoCaptureCapability& capability); - virtual WebRtc_Word32 StopCapture(); - virtual bool CaptureStarted(); - virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings); - virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation); + virtual WebRtc_Word32 StartCapture( + const VideoCaptureCapability& capability); + virtual WebRtc_Word32 StopCapture(); + virtual bool CaptureStarted(); + virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings); + virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation); -protected: - virtual ~VideoCaptureAndroid(); - static void JNICALL ProvideCameraFrame (JNIEnv * env, - jobject, - jbyteArray javaCameraFrame, - jint length, jlong context); - DeviceInfoAndroid _capInfo; - jobject _javaCaptureObj; // Java Camera object. - VideoCaptureCapability _frameInfo; - bool _captureStarted; + protected: + virtual ~VideoCaptureAndroid(); + static void JNICALL ProvideCameraFrame (JNIEnv * env, + jobject, + jbyteArray javaCameraFrame, + jint length, jlong context); + DeviceInfoAndroid _capInfo; + jobject _javaCaptureObj; // Java Camera object. + VideoCaptureCapability _frameInfo; + bool _captureStarted; - static JavaVM* g_jvm; - static jclass g_javaCmClass; - static jclass g_javaCmDevInfoClass; + static JavaVM* g_jvm; + static jclass g_javaCmClass; + static jclass g_javaCmDevInfoClass; //Static java object implementing the needed device info functions; - static jobject g_javaCmDevInfoObject; - static jobject g_javaContext; // Java Application context + static jobject g_javaCmDevInfoObject; + static jobject g_javaContext; // Java Application context }; -} // namespace videocapturemodule -} // namespace webrtc + +} // namespace videocapturemodule +} // namespace webrtc #endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_ diff --git a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java index 73cf251cc..2bcb4d482 100644 --- a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java +++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java @@ -25,239 +25,239 @@ import android.opengl.GLSurfaceView; import android.util.Log; public class ViEAndroidGLES20 extends GLSurfaceView - implements GLSurfaceView.Renderer { - // True if onSurfaceCreated has been called. - private boolean surfaceCreated = false; - private boolean openGLCreated = false; - // True if NativeFunctionsRegistered has been called. - private boolean nativeFunctionsRegisted = false; - private ReentrantLock nativeFunctionLock = new ReentrantLock(); - // Address of Native object that will do the drawing. - private long nativeObject = 0; - private int viewWidth = 0; - private int viewHeight = 0; + implements GLSurfaceView.Renderer { + // True if onSurfaceCreated has been called. + private boolean surfaceCreated = false; + private boolean openGLCreated = false; + // True if NativeFunctionsRegistered has been called. + private boolean nativeFunctionsRegisted = false; + private ReentrantLock nativeFunctionLock = new ReentrantLock(); + // Address of Native object that will do the drawing. + private long nativeObject = 0; + private int viewWidth = 0; + private int viewHeight = 0; - public static boolean UseOpenGL2(Object renderWindow) { - return ViEAndroidGLES20.class.isInstance(renderWindow); - } - - public ViEAndroidGLES20(Context context) { - super(context); - - // Setup the context factory for 2.0 rendering. - // See ContextFactory class definition below - setEGLContextFactory(new ContextFactory()); - - // We need to choose an EGLConfig that matches the format of - // our surface exactly. This is going to be done in our - // custom config chooser. See ConfigChooser class definition below - // Use RGB 565 without an alpha channel. - setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) ); - - this.setRenderer(this); - this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); - } - - // IsSupported - // Return true if this device support Open GL ES 2.0 rendering. - public static boolean IsSupported(Context context) { - ActivityManager am = - (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); - ConfigurationInfo info = am.getDeviceConfigurationInfo(); - if(info.reqGlEsVersion >= 0x20000) { - // Open GL ES 2.0 is supported. - return true; - } - return false; - } - - public void onDrawFrame(GL10 gl) { - nativeFunctionLock.lock(); - if(!nativeFunctionsRegisted || !surfaceCreated) { - nativeFunctionLock.unlock(); - return; + public static boolean UseOpenGL2(Object renderWindow) { + return ViEAndroidGLES20.class.isInstance(renderWindow); } - if(!openGLCreated) { - if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) { - return; // Failed to create OpenGL - } - openGLCreated = true; // Created OpenGL successfully - } - DrawNative(nativeObject); // Draw the new frame - nativeFunctionLock.unlock(); - } + public ViEAndroidGLES20(Context context) { + super(context); - public void onSurfaceChanged(GL10 gl, int width, int height) { - surfaceCreated = true; - viewWidth = width; - viewHeight = height; + // Setup the context factory for 2.0 rendering. + // See ContextFactory class definition below + setEGLContextFactory(new ContextFactory()); - nativeFunctionLock.lock(); - if(nativeFunctionsRegisted) { - if(CreateOpenGLNative(nativeObject,width,height) == 0) - openGLCreated = true; - } - nativeFunctionLock.unlock(); - } + // We need to choose an EGLConfig that matches the format of + // our surface exactly. This is going to be done in our + // custom config chooser. See ConfigChooser class definition below + // Use RGB 565 without an alpha channel. + setEGLConfigChooser( new ConfigChooser(5, 6, 5, 0, 0, 0) ); - public void onSurfaceCreated(GL10 gl, EGLConfig config) { - } - - public void RegisterNativeObject(long nativeObject) { - nativeFunctionLock.lock(); - nativeObject = nativeObject; - nativeFunctionsRegisted = true; - nativeFunctionLock.unlock(); - } - - public void DeRegisterNativeObject() { - nativeFunctionLock.lock(); - nativeFunctionsRegisted = false; - openGLCreated = false; - nativeObject = 0; - nativeFunctionLock.unlock(); - } - - public void ReDraw() { - if(surfaceCreated) { - // Request the renderer to redraw using the render thread context. - this.requestRender(); - } - } - - // EGL Context factory used for creating EGL 2.0 context - // on Android 2.1(and later, - // though there are simpler ways in 2.2) - // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni. - private static class ContextFactory - implements GLSurfaceView.EGLContextFactory { - private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; - public EGLContext createContext(EGL10 egl, - EGLDisplay display, - EGLConfig eglConfig) { - //checkEglError("Before eglCreateContext", egl); - int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; - // Create an Open GL ES 2.0 context - EGLContext context = egl.eglCreateContext(display, - eglConfig, - EGL10.EGL_NO_CONTEXT, - attrib_list); - checkEglError("ContextFactory eglCreateContext", egl); - return context; + this.setRenderer(this); + this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); } - public void destroyContext(EGL10 egl, EGLDisplay display, - EGLContext context) { - egl.eglDestroyContext(display, context); - } - } - - private static void checkEglError(String prompt, EGL10 egl) { - int error; - while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) { - Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error)); - } - } - - // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni - private static class ConfigChooser - implements GLSurfaceView.EGLConfigChooser { - - public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { - mRedSize = r; - mGreenSize = g; - mBlueSize = b; - mAlphaSize = a; - mDepthSize = depth; - mStencilSize = stencil; + // IsSupported + // Return true if this device support Open GL ES 2.0 rendering. + public static boolean IsSupported(Context context) { + ActivityManager am = + (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE); + ConfigurationInfo info = am.getDeviceConfigurationInfo(); + if(info.reqGlEsVersion >= 0x20000) { + // Open GL ES 2.0 is supported. + return true; + } + return false; } - // This EGL config specification is used to specify 2.0 rendering. - // We use a minimum size of 4 bits for red/green/blue, but will - // perform actual matching in chooseConfig() below. - private static int EGL_OPENGL_ES2_BIT = 4; - private static int[] s_configAttribs2 = - { - EGL10.EGL_RED_SIZE, 4, - EGL10.EGL_GREEN_SIZE, 4, - EGL10.EGL_BLUE_SIZE, 4, - EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, - EGL10.EGL_NONE - }; + public void onDrawFrame(GL10 gl) { + nativeFunctionLock.lock(); + if(!nativeFunctionsRegisted || !surfaceCreated) { + nativeFunctionLock.unlock(); + return; + } - public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { - - // Get the number of minimally matching EGL configurations - int[] num_config = new int[1]; - egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); - - int numConfigs = num_config[0]; - - if (numConfigs <= 0) { - throw new IllegalArgumentException("No configs match configSpec"); - } - - // Allocate then read the array of minimally matching EGL configs - EGLConfig[] configs = new EGLConfig[numConfigs]; - egl.eglChooseConfig(display, s_configAttribs2, configs, - numConfigs, num_config); - - // Now return the "best" one - return chooseConfig(egl, display, configs); + if(!openGLCreated) { + if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) { + return; // Failed to create OpenGL + } + openGLCreated = true; // Created OpenGL successfully + } + DrawNative(nativeObject); // Draw the new frame + nativeFunctionLock.unlock(); } - public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, - EGLConfig[] configs) { - for(EGLConfig config : configs) { - int d = findConfigAttrib(egl, display, config, - EGL10.EGL_DEPTH_SIZE, 0); - int s = findConfigAttrib(egl, display, config, - EGL10.EGL_STENCIL_SIZE, 0); + public void onSurfaceChanged(GL10 gl, int width, int height) { + surfaceCreated = true; + viewWidth = width; + viewHeight = height; - // We need at least mDepthSize and mStencilSize bits - if (d < mDepthSize || s < mStencilSize) - continue; - - // We want an *exact* match for red/green/blue/alpha - int r = findConfigAttrib(egl, display, config, - EGL10.EGL_RED_SIZE, 0); - int g = findConfigAttrib(egl, display, config, - EGL10.EGL_GREEN_SIZE, 0); - int b = findConfigAttrib(egl, display, config, - EGL10.EGL_BLUE_SIZE, 0); - int a = findConfigAttrib(egl, display, config, - EGL10.EGL_ALPHA_SIZE, 0); - - if (r == mRedSize && g == mGreenSize && - b == mBlueSize && a == mAlphaSize) - return config; - } - return null; + nativeFunctionLock.lock(); + if(nativeFunctionsRegisted) { + if(CreateOpenGLNative(nativeObject,width,height) == 0) + openGLCreated = true; + } + nativeFunctionLock.unlock(); } - private int findConfigAttrib(EGL10 egl, EGLDisplay display, - EGLConfig config, int attribute, - int defaultValue) { - - if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) { - return mValue[0]; - } - return defaultValue; + public void onSurfaceCreated(GL10 gl, EGLConfig config) { } - // Subclasses can adjust these values: - protected int mRedSize; - protected int mGreenSize; - protected int mBlueSize; - protected int mAlphaSize; - protected int mDepthSize; - protected int mStencilSize; - private int[] mValue = new int[1]; - } + public void RegisterNativeObject(long nativeObject) { + nativeFunctionLock.lock(); + nativeObject = nativeObject; + nativeFunctionsRegisted = true; + nativeFunctionLock.unlock(); + } - private native int CreateOpenGLNative(long nativeObject, - int width, int height); - private native void DrawNative(long nativeObject); + public void DeRegisterNativeObject() { + nativeFunctionLock.lock(); + nativeFunctionsRegisted = false; + openGLCreated = false; + nativeObject = 0; + nativeFunctionLock.unlock(); + } + + public void ReDraw() { + if(surfaceCreated) { + // Request the renderer to redraw using the render thread context. + this.requestRender(); + } + } + + // EGL Context factory used for creating EGL 2.0 context + // on Android 2.1(and later, + // though there are simpler ways in 2.2) + // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni. + private static class ContextFactory + implements GLSurfaceView.EGLContextFactory { + private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098; + public EGLContext createContext(EGL10 egl, + EGLDisplay display, + EGLConfig eglConfig) { + //checkEglError("Before eglCreateContext", egl); + int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE }; + // Create an Open GL ES 2.0 context + EGLContext context = egl.eglCreateContext(display, + eglConfig, + EGL10.EGL_NO_CONTEXT, + attrib_list); + checkEglError("ContextFactory eglCreateContext", egl); + return context; + } + + public void destroyContext(EGL10 egl, EGLDisplay display, + EGLContext context) { + egl.eglDestroyContext(display, context); + } + } + + private static void checkEglError(String prompt, EGL10 egl) { + int error; + while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) { + Log.e("*WEBRTC*", String.format("%s: EGL error: 0x%x", prompt, error)); + } + } + + // Code is from the NDK samples\hello-gl2\src\com\android\gl2jni + private static class ConfigChooser + implements GLSurfaceView.EGLConfigChooser { + + public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) { + mRedSize = r; + mGreenSize = g; + mBlueSize = b; + mAlphaSize = a; + mDepthSize = depth; + mStencilSize = stencil; + } + + // This EGL config specification is used to specify 2.0 rendering. + // We use a minimum size of 4 bits for red/green/blue, but will + // perform actual matching in chooseConfig() below. + private static int EGL_OPENGL_ES2_BIT = 4; + private static int[] s_configAttribs2 = + { + EGL10.EGL_RED_SIZE, 4, + EGL10.EGL_GREEN_SIZE, 4, + EGL10.EGL_BLUE_SIZE, 4, + EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, + EGL10.EGL_NONE + }; + + public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) { + + // Get the number of minimally matching EGL configurations + int[] num_config = new int[1]; + egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config); + + int numConfigs = num_config[0]; + + if (numConfigs <= 0) { + throw new IllegalArgumentException("No configs match configSpec"); + } + + // Allocate then read the array of minimally matching EGL configs + EGLConfig[] configs = new EGLConfig[numConfigs]; + egl.eglChooseConfig(display, s_configAttribs2, configs, + numConfigs, num_config); + + // Now return the "best" one + return chooseConfig(egl, display, configs); + } + + public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display, + EGLConfig[] configs) { + for(EGLConfig config : configs) { + int d = findConfigAttrib(egl, display, config, + EGL10.EGL_DEPTH_SIZE, 0); + int s = findConfigAttrib(egl, display, config, + EGL10.EGL_STENCIL_SIZE, 0); + + // We need at least mDepthSize and mStencilSize bits + if (d < mDepthSize || s < mStencilSize) + continue; + + // We want an *exact* match for red/green/blue/alpha + int r = findConfigAttrib(egl, display, config, + EGL10.EGL_RED_SIZE, 0); + int g = findConfigAttrib(egl, display, config, + EGL10.EGL_GREEN_SIZE, 0); + int b = findConfigAttrib(egl, display, config, + EGL10.EGL_BLUE_SIZE, 0); + int a = findConfigAttrib(egl, display, config, + EGL10.EGL_ALPHA_SIZE, 0); + + if (r == mRedSize && g == mGreenSize && + b == mBlueSize && a == mAlphaSize) + return config; + } + return null; + } + + private int findConfigAttrib(EGL10 egl, EGLDisplay display, + EGLConfig config, int attribute, + int defaultValue) { + + if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) { + return mValue[0]; + } + return defaultValue; + } + + // Subclasses can adjust these values: + protected int mRedSize; + protected int mGreenSize; + protected int mBlueSize; + protected int mAlphaSize; + protected int mDepthSize; + protected int mStencilSize; + private int[] mValue = new int[1]; + } + + private native int CreateOpenGLNative(long nativeObject, + int width, int height); + private native void DrawNative(long nativeObject); } diff --git a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java index 6d8744129..d45fb810a 100644 --- a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java +++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java @@ -16,43 +16,43 @@ import android.view.SurfaceView; public class ViERenderer { - // View used for local rendering that Cameras can use for Video Overlay. - private static SurfaceHolder g_localRenderer; + // View used for local rendering that Cameras can use for Video Overlay. + private static SurfaceHolder g_localRenderer; - public static SurfaceView CreateRenderer(Context context) { - return CreateRenderer(context,false); - } + public static SurfaceView CreateRenderer(Context context) { + return CreateRenderer(context,false); + } - public static SurfaceView CreateRenderer(Context context, - boolean useOpenGLES2) { - if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context)) - return new ViEAndroidGLES20(context); - else - return new SurfaceView(context); - } + public static SurfaceView CreateRenderer(Context context, + boolean useOpenGLES2) { + if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context)) + return new ViEAndroidGLES20(context); + else + return new SurfaceView(context); + } - // Creates a SurfaceView to be used by Android Camera - // service to display a local preview. - // This needs to be used on Android prior to version 2.1 - // in order to run the camera. - // Call this function before ViECapture::StartCapture. - // The created view needs to be added to a visible layout - // after a camera has been allocated - // (with the call ViECapture::AllocateCaptureDevice). - // IE. - // CreateLocalRenderer - // ViECapture::AllocateCaptureDevice - // LinearLayout.addview - // ViECapture::StartCapture - public static SurfaceView CreateLocalRenderer(Context context) { - SurfaceView localRender = new SurfaceView(context); - g_localRenderer = localRender.getHolder(); - g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); - return localRender; - } + // Creates a SurfaceView to be used by Android Camera + // service to display a local preview. + // This needs to be used on Android prior to version 2.1 + // in order to run the camera. + // Call this function before ViECapture::StartCapture. + // The created view needs to be added to a visible layout + // after a camera has been allocated + // (with the call ViECapture::AllocateCaptureDevice). + // IE. + // CreateLocalRenderer + // ViECapture::AllocateCaptureDevice + // LinearLayout.addview + // ViECapture::StartCapture + public static SurfaceView CreateLocalRenderer(Context context) { + SurfaceView localRender = new SurfaceView(context); + g_localRenderer = localRender.getHolder(); + g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); + return localRender; + } - public static SurfaceHolder GetLocalRenderer() { - return g_localRenderer; - } + public static SurfaceHolder GetLocalRenderer() { + return g_localRenderer; + } } diff --git a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java index 9ae4b8b9c..d9d416bda 100644 --- a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java +++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java @@ -22,134 +22,134 @@ import android.view.SurfaceHolder.Callback; public class ViESurfaceRenderer implements Callback { - // the bitmap used for drawing. - private Bitmap bitmap = null; - private ByteBuffer byteBuffer; - private SurfaceHolder surfaceHolder; - // Rect of the source bitmap to draw - private Rect srcRect = new Rect(); - // Rect of the destination canvas to draw to - private Rect dstRect = new Rect(); - private int dstHeight = 0; - private int dstWidth = 0; - private float dstTopScale = 0; - private float dstBottomScale = 1; - private float dstLeftScale = 0; - private float dstRightScale = 1; + // the bitmap used for drawing. + private Bitmap bitmap = null; + private ByteBuffer byteBuffer; + private SurfaceHolder surfaceHolder; + // Rect of the source bitmap to draw + private Rect srcRect = new Rect(); + // Rect of the destination canvas to draw to + private Rect dstRect = new Rect(); + private int dstHeight = 0; + private int dstWidth = 0; + private float dstTopScale = 0; + private float dstBottomScale = 1; + private float dstLeftScale = 0; + private float dstRightScale = 1; - public ViESurfaceRenderer(SurfaceView view) { - surfaceHolder = view.getHolder(); - if(surfaceHolder == null) - return; + public ViESurfaceRenderer(SurfaceView view) { + surfaceHolder = view.getHolder(); + if(surfaceHolder == null) + return; - Canvas canvas = surfaceHolder.lockCanvas(); - if(canvas != null) { - Rect dst =surfaceHolder.getSurfaceFrame(); - if(dst != null) { - dstRect = dst; - dstHeight =dstRect.bottom-dstRect.top; - dstWidth = dstRect.right-dstRect.left; - } - surfaceHolder.unlockCanvasAndPost(canvas); + Canvas canvas = surfaceHolder.lockCanvas(); + if(canvas != null) { + Rect dst =surfaceHolder.getSurfaceFrame(); + if(dst != null) { + dstRect = dst; + dstHeight =dstRect.bottom-dstRect.top; + dstWidth = dstRect.right-dstRect.left; + } + surfaceHolder.unlockCanvasAndPost(canvas); + } + + surfaceHolder.addCallback(this); } - surfaceHolder.addCallback(this); - } + public void surfaceChanged(SurfaceHolder holder, int format, + int in_width, int in_height) { - public void surfaceChanged(SurfaceHolder holder, int format, - int in_width, int in_height) { + dstHeight = in_height; + dstWidth = in_width; + dstRect.left = (int)(dstLeftScale*dstWidth); + dstRect.top = (int)(dstTopScale*dstHeight); + dstRect.bottom = (int)(dstBottomScale*dstHeight); + dstRect.right = (int) (dstRightScale*dstWidth); + } - dstHeight = in_height; - dstWidth = in_width; - dstRect.left = (int)(dstLeftScale*dstWidth); - dstRect.top = (int)(dstTopScale*dstHeight); - dstRect.bottom = (int)(dstBottomScale*dstHeight); - dstRect.right = (int) (dstRightScale*dstWidth); - } + public void surfaceCreated(SurfaceHolder holder) { + // TODO(leozwang) Auto-generated method stub + } - public void surfaceCreated(SurfaceHolder holder) { - // TODO(leozwang) Auto-generated method stub - } + public void surfaceDestroyed(SurfaceHolder holder) { + // TODO(leozwang) Auto-generated method stub + } - public void surfaceDestroyed(SurfaceHolder holder) { - // TODO(leozwang) Auto-generated method stub - } + public Bitmap CreateBitmap(int width, int height) { + if (bitmap == null) { + try { + android.os.Process.setThreadPriority( + android.os.Process.THREAD_PRIORITY_DISPLAY); + } + catch (Exception e) { + } + } + bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); + srcRect.left = 0; + srcRect.top = 0; + srcRect.bottom = height; + srcRect.right = width; - public Bitmap CreateBitmap(int width, int height) { - if (bitmap == null) { - try { - android.os.Process.setThreadPriority( - android.os.Process.THREAD_PRIORITY_DISPLAY); - } - catch (Exception e) { + return bitmap; + } + + public ByteBuffer CreateByteBuffer(int width, int height) { + if (bitmap == null) { + try { + android.os.Process + .setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY); + } + catch (Exception e) { + } + } + + try { + bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); + byteBuffer = ByteBuffer.allocateDirect(width*height*2); + srcRect.left = 0; + srcRect.top = 0; + srcRect.bottom = height; + srcRect.right = width; + } + catch (Exception ex) { + Log.e("*WEBRTC*", "Failed to CreateByteBuffer"); + bitmap = null; + byteBuffer = null; + } + + return byteBuffer; + } + + public void SetCoordinates(float left, float top, + float right, float bottom) { + dstLeftScale = left; + dstTopScale = top; + dstRightScale = right; + dstBottomScale = bottom; + + dstRect.left = (int)(dstLeftScale*dstWidth); + dstRect.top = (int)(dstTopScale*dstHeight); + dstRect.bottom = (int)(dstBottomScale*dstHeight); + dstRect.right = (int) (dstRightScale*dstWidth); + } + + public void DrawByteBuffer() { + if(byteBuffer == null) + return; + byteBuffer.rewind(); + bitmap.copyPixelsFromBuffer(byteBuffer); + DrawBitmap(); + } + + public void DrawBitmap() { + if(bitmap == null) + return; + + Canvas canvas = surfaceHolder.lockCanvas(); + if(canvas != null) { + canvas.drawBitmap(bitmap, srcRect, dstRect, null); + surfaceHolder.unlockCanvasAndPost(canvas); } } - bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); - srcRect.left = 0; - srcRect.top = 0; - srcRect.bottom = height; - srcRect.right = width; - - return bitmap; - } - - public ByteBuffer CreateByteBuffer(int width, int height) { - if (bitmap == null) { - try { - android.os.Process - .setThreadPriority(android.os.Process.THREAD_PRIORITY_DISPLAY); - } - catch (Exception e) { - } - } - - try { - bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565); - byteBuffer = ByteBuffer.allocateDirect(width*height*2); - srcRect.left = 0; - srcRect.top = 0; - srcRect.bottom = height; - srcRect.right = width; - } - catch (Exception ex) { - Log.e("*WEBRTC*", "Failed to CreateByteBuffer"); - bitmap = null; - byteBuffer = null; - } - - return byteBuffer; - } - - public void SetCoordinates(float left, float top, - float right, float bottom) { - dstLeftScale = left; - dstTopScale = top; - dstRightScale = right; - dstBottomScale = bottom; - - dstRect.left = (int)(dstLeftScale*dstWidth); - dstRect.top = (int)(dstTopScale*dstHeight); - dstRect.bottom = (int)(dstBottomScale*dstHeight); - dstRect.right = (int) (dstRightScale*dstWidth); - } - - public void DrawByteBuffer() { - if(byteBuffer == null) - return; - byteBuffer.rewind(); - bitmap.copyPixelsFromBuffer(byteBuffer); - DrawBitmap(); - } - - public void DrawBitmap() { - if(bitmap == null) - return; - - Canvas canvas = surfaceHolder.lockCanvas(); - if(canvas != null) { - canvas.drawBitmap(bitmap, srcRect, dstRect, null); - surfaceHolder.unlockCanvasAndPost(canvas); - } - } } diff --git a/src/modules/video_render/main/source/android/video_render_android_impl.cc b/src/modules/video_render/main/source/android/video_render_android_impl.cc index 2c40fa54c..63d0d6ed4 100644 --- a/src/modules/video_render/main/source/android/video_render_android_impl.cc +++ b/src/modules/video_render/main/source/android/video_render_android_impl.cc @@ -28,14 +28,10 @@ namespace webrtc { JavaVM* VideoRenderAndroid::g_jvm = NULL; -WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM) -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__); - - g_jvm = (JavaVM*) javaVM; - - return 0; - +WebRtc_Word32 VideoRenderAndroid::SetAndroidEnvVariables(void* javaVM) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__); + g_jvm = (JavaVM*) javaVM; + return 0; } VideoRenderAndroid::VideoRenderAndroid( @@ -53,40 +49,34 @@ VideoRenderAndroid::VideoRenderAndroid( _javaRenderEvent(*EventWrapper::Create()), _lastJavaRenderEvent(0), _javaRenderJniEnv(NULL), - _javaRenderThread(NULL) -{ + _javaRenderThread(NULL) { } -VideoRenderAndroid::~VideoRenderAndroid() -{ +VideoRenderAndroid::~VideoRenderAndroid() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "VideoRenderAndroid dtor"); - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, - "VideoRenderAndroid dtor"); + if (_javaRenderThread) + StopRender(); - if (_javaRenderThread) - StopRender(); - - for (MapItem* item = _streamsMap.First(); item != NULL; item - = _streamsMap.Next(item)) - { // Delete streams - delete static_cast (item->GetItem()); - } - delete &_javaShutdownEvent; - delete &_javaRenderEvent; - delete &_critSect; + for (MapItem* item = _streamsMap.First(); item != NULL; item + = _streamsMap.Next(item)) { // Delete streams + delete static_cast (item->GetItem()); + } + delete &_javaShutdownEvent; + delete &_javaRenderEvent; + delete &_critSect; } -WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id) -{ - CriticalSectionScoped cs(&_critSect); - _id = id; +WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id) { + CriticalSectionScoped cs(&_critSect); + _id = id; - return 0; + return 0; } -WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/) -{ - return -1; +WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/) { + return -1; } VideoRenderCallback* @@ -94,56 +84,48 @@ VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId, const WebRtc_UWord32 zOrder, const float left, const float top, const float right, - const float bottom) -{ - CriticalSectionScoped cs(&_critSect); + const float bottom) { + CriticalSectionScoped cs(&_critSect); - AndroidStream* renderStream = NULL; - MapItem* item = _streamsMap.Find(streamId); - if (item) - { - renderStream = (AndroidStream*) (item->GetItem()); - if (NULL != renderStream) - { - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, - "%s: Render stream already exists", __FUNCTION__); - return renderStream; - } + AndroidStream* renderStream = NULL; + MapItem* item = _streamsMap.Find(streamId); + if (item) { + renderStream = (AndroidStream*) (item->GetItem()); + if (NULL != renderStream) { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, + "%s: Render stream already exists", __FUNCTION__); + return renderStream; } + } - renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top, - right, bottom, *this); - if (renderStream) - { - _streamsMap.Insert(streamId, renderStream); - } - else - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); - return NULL; - } - return renderStream; + renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top, + right, bottom, *this); + if (renderStream) { + _streamsMap.Insert(streamId, renderStream); + } + else { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); + return NULL; + } + return renderStream; } WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream( - const WebRtc_UWord32 streamId) -{ - CriticalSectionScoped cs(&_critSect); + const WebRtc_UWord32 streamId) { + CriticalSectionScoped cs(&_critSect); - MapItem* item = _streamsMap.Find(streamId); - if (item) - { - delete (AndroidStream*) item->GetItem(); - _streamsMap.Erase(streamId); - } - else - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); - return -1; - } - return 0; + MapItem* item = _streamsMap.Find(streamId); + if (item) { + delete (AndroidStream*) item->GetItem(); + _streamsMap.Erase(streamId); + } + else { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__); + return -1; + } + return 0; } WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties( @@ -178,10 +160,9 @@ WebRtc_Word32 VideoRenderAndroid::StartRender() { } unsigned int tId = 0; - if (_javaRenderThread->Start(tId)) { + if (_javaRenderThread->Start(tId)) WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: thread started: %u", __FUNCTION__, tId); - } else { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Could not start send thread", __FUNCTION__); @@ -190,36 +171,32 @@ WebRtc_Word32 VideoRenderAndroid::StartRender() { return 0; } -WebRtc_Word32 VideoRenderAndroid::StopRender() -{ - - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__); - { - CriticalSectionScoped cs(&_critSect); - if (!_javaRenderThread) - { - return -1; - } - _javaShutDownFlag = true; - _javaRenderEvent.Set(); - } - - _javaShutdownEvent.Wait(3000); +WebRtc_Word32 VideoRenderAndroid::StopRender() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__); + { CriticalSectionScoped cs(&_critSect); - _javaRenderThread->SetNotAlive(); - if (_javaRenderThread->Stop()) + if (!_javaRenderThread) { - delete _javaRenderThread; - _javaRenderThread = NULL; + return -1; } - else - { - assert(false); - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, - "%s: Not able to stop thread, leaking", __FUNCTION__); - _javaRenderThread = NULL; - } - return 0; + _javaShutDownFlag = true; + _javaRenderEvent.Set(); + } + + _javaShutdownEvent.Wait(3000); + CriticalSectionScoped cs(&_critSect); + _javaRenderThread->SetNotAlive(); + if (_javaRenderThread->Stop()) { + delete _javaRenderThread; + _javaRenderThread = NULL; + } + else { + assert(false); + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Not able to stop thread, leaking", __FUNCTION__); + _javaRenderThread = NULL; + } + return 0; } void VideoRenderAndroid::ReDraw() { @@ -237,65 +214,55 @@ bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) { bool VideoRenderAndroid::JavaRenderThreadProcess() { - _javaRenderEvent.Wait(1000); + _javaRenderEvent.Wait(1000); - CriticalSectionScoped cs(&_critSect); - if (!_javaRenderJniEnv) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL); + CriticalSectionScoped cs(&_critSect); + if (!_javaRenderJniEnv) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL); - // Get the JNI env for this thread - if ((res < 0) || !_javaRenderJniEnv) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, _javaRenderJniEnv); - return false; - } + // Get the JNI env for this thread + if ((res < 0) || !_javaRenderJniEnv) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, _javaRenderJniEnv); + return false; } + } - for (MapItem* item = _streamsMap.First(); item != NULL; item - = _streamsMap.Next(item)) - { - static_cast (item->GetItem())->DeliverFrame( - _javaRenderJniEnv); - } + for (MapItem* item = _streamsMap.First(); item != NULL; + item = _streamsMap.Next(item)) { + static_cast (item->GetItem())->DeliverFrame( + _javaRenderJniEnv); + } - if (_javaShutDownFlag) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - else - { - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, - "%s: Java thread detached", __FUNCTION__); - } - _javaRenderJniEnv = false; - _javaShutDownFlag = false; - _javaShutdownEvent.Set(); - return false; // Do not run this thread again. + if (_javaShutDownFlag) { + if (g_jvm->DetachCurrentThread() < 0) + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + else { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "%s: Java thread detached", __FUNCTION__); } - return true; + _javaRenderJniEnv = false; + _javaShutDownFlag = false; + _javaShutdownEvent.Set(); + return false; // Do not run this thread again. + } + return true; } -VideoRenderType VideoRenderAndroid::RenderType() -{ - return _renderType; +VideoRenderType VideoRenderAndroid::RenderType() { + return _renderType; } -RawVideoType VideoRenderAndroid::PerferedVideoType() -{ - return kVideoI420; +RawVideoType VideoRenderAndroid::PerferedVideoType() { + return kVideoI420; } -bool VideoRenderAndroid::FullScreen() -{ - return false; +bool VideoRenderAndroid::FullScreen() { + return false; } WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory( @@ -374,4 +341,4 @@ WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap, return -1; } -} //namespace webrtc +} // namespace webrtc diff --git a/src/modules/video_render/main/source/android/video_render_android_impl.h b/src/modules/video_render/main/source/android/video_render_android_impl.h index b3f152559..d43d2d444 100644 --- a/src/modules/video_render/main/source/android/video_render_android_impl.h +++ b/src/modules/video_render/main/source/android/video_render_android_impl.h @@ -20,32 +20,24 @@ namespace webrtc { //#define ANDROID_LOG - class CriticalSectionWrapper; class EventWrapper; class ThreadWrapper; - // The object a module user uses to send new frames to the java renderer // Base class for android render streams. -class AndroidStream: public VideoRenderCallback -{ -public: - /* - * DeliverFrame is called from a thread connected to the Java VM. - * Used for Delivering frame for rendering. - */ - virtual void DeliverFrame(JNIEnv* jniEnv)=0; +class AndroidStream : public VideoRenderCallback { + public: + // DeliverFrame is called from a thread connected to the Java VM. + // Used for Delivering frame for rendering. + virtual void DeliverFrame(JNIEnv* jniEnv)=0; - virtual ~AndroidStream() - { - }; + virtual ~AndroidStream() {}; }; -class VideoRenderAndroid: IVideoRender -{ -public: +class VideoRenderAndroid: IVideoRender { + public: static WebRtc_Word32 SetAndroidEnvVariables(void* javaVM); VideoRenderAndroid(const WebRtc_Word32 id, @@ -82,11 +74,7 @@ public: virtual void ReDraw(); - /************************************************************************** - * - * Properties - * - ***************************************************************************/ + // Properties virtual VideoRenderType RenderType(); @@ -165,4 +153,4 @@ public: } //namespace webrtc -#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_ diff --git a/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc index 69de8c710..0bcf676a0 100644 --- a/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc +++ b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc @@ -31,208 +31,182 @@ AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer( const bool fullscreen) : VideoRenderAndroid(id, videoRenderType, window, fullscreen), _javaRenderObj(NULL), - _javaRenderClass(NULL) -{ + _javaRenderClass(NULL) { } -bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) -{ - if (!g_jvm) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, - "RendererAndroid():UseOpenGL No JVM set."); - return false; - } - bool isAttached = false; - JNIEnv* env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); +bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) { + if (!g_jvm) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "RendererAndroid():UseOpenGL No JVM set."); + return false; + } + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE( - kTraceError, - kTraceVideoRenderer, - -1, - "RendererAndroid(): Could not attach thread to JVM (%d, %p)", - res, env); - return false; - } - isAttached = true; + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE( + kTraceError, + kTraceVideoRenderer, + -1, + "RendererAndroid(): Could not attach thread to JVM (%d, %p)", + res, env); + return false; } + isAttached = true; + } - // get the renderer class - jclass javaRenderClassLocal = - env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); - if (!javaRenderClassLocal) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, - "%s: could not find ViEAndroidRenderer class", - __FUNCTION__); - return false; - } + // get the renderer class + jclass javaRenderClassLocal = + env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); + if (!javaRenderClassLocal) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "%s: could not find ViEAndroidRenderer class", + __FUNCTION__); + return false; + } - // get the method ID for UseOpenGL - jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal, - "UseOpenGL2", - "(Ljava/lang/Object;)Z"); - if (cidUseOpenGL == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, - "%s: could not get UseOpenGL ID", __FUNCTION__); - return false; - } - jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal, - cidUseOpenGL, (jobject) window); + // get the method ID for UseOpenGL + jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal, + "UseOpenGL2", + "(Ljava/lang/Object;)Z"); + if (cidUseOpenGL == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "%s: could not get UseOpenGL ID", __FUNCTION__); + return false; + } + jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal, + cidUseOpenGL, (jobject) window); - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, - "%s: Could not detach thread from JVM", __FUNCTION__); - } + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1, + "%s: Could not detach thread from JVM", __FUNCTION__); } - return res; + } + return res; } -AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() -{ - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, - "AndroidNativeOpenGl2Renderer dtor"); - if (g_jvm) - { - // get the JNI env for this thread - bool isAttached = false; - JNIEnv* env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - env = NULL; - } - else - { - isAttached = true; - } - } - - env->DeleteGlobalRef(_javaRenderObj); - env->DeleteGlobalRef(_javaRenderClass); - - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, - "%s: Could not detach thread from JVM", - __FUNCTION__); - } - } - } -} - -WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); - if (!g_jvm) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "(%s): Not a valid Java VM pointer.", __FUNCTION__); - return -1; - } - if (!_ptrWindow) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, - "(%s): No window have been provided.", __FUNCTION__); - return -1; - } - +AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "AndroidNativeOpenGl2Renderer dtor"); + if (g_jvm) { // get the JNI env for this thread bool isAttached = false; JNIEnv* env = NULL; - if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = g_jvm->AttachCurrentThread(&env, NULL); + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + env = NULL; + } + else { isAttached = true; + } } - // get the ViEAndroidGLES20 class - jclass javaRenderClassLocal = - env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); - if (!javaRenderClassLocal) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: could not find ViEAndroidGLES20", __FUNCTION__); - return -1; - } + env->DeleteGlobalRef(_javaRenderObj); + env->DeleteGlobalRef(_javaRenderClass); - // create a global reference to the class (to tell JNI that - // we are referencing it after this function has returned) - _javaRenderClass = - reinterpret_cast (env->NewGlobalRef(javaRenderClassLocal)); - if (!_javaRenderClass) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: could not create Java SurfaceHolder class reference", + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); - return -1; + } } + } +} - // Delete local class ref, we only use the global ref - env->DeleteLocalRef(javaRenderClassLocal); +WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__); + if (!g_jvm) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "(%s): Not a valid Java VM pointer.", __FUNCTION__); + return -1; + } + if (!_ptrWindow) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "(%s): No window have been provided.", __FUNCTION__); + return -1; + } - // create a reference to the object (to tell JNI that we are referencing it - // after this function has returned) - _javaRenderObj = env->NewGlobalRef(_ptrWindow); - if (!_javaRenderObj) - { - WEBRTC_TRACE( - kTraceError, - kTraceVideoRenderer, - _id, - "%s: could not create Java SurfaceRender object reference", - __FUNCTION__); - return -1; + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = g_jvm->AttachCurrentThread(&env, NULL); + + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; } + isAttached = true; + } - // Detach this thread if it was attached - if (isAttached) - { - if (g_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } - } + // get the ViEAndroidGLES20 class + jclass javaRenderClassLocal = + env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); + if (!javaRenderClassLocal) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not find ViEAndroidGLES20", __FUNCTION__); + return -1; + } - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", + // create a global reference to the class (to tell JNI that + // we are referencing it after this function has returned) + _javaRenderClass = + reinterpret_cast (env->NewGlobalRef(javaRenderClassLocal)); + if (!_javaRenderClass) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not create Java SurfaceHolder class reference", __FUNCTION__); - return 0; + return -1; + } + + // Delete local class ref, we only use the global ref + env->DeleteLocalRef(javaRenderClassLocal); + + // create a reference to the object (to tell JNI that we are referencing it + // after this function has returned) + _javaRenderObj = env->NewGlobalRef(_ptrWindow); + if (!_javaRenderObj) { + WEBRTC_TRACE( + kTraceError, + kTraceVideoRenderer, + _id, + "%s: could not create Java SurfaceRender object reference", + __FUNCTION__); + return -1; + } + + // Detach this thread if it was attached + if (isAttached) { + if (g_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); + } + } + + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", + __FUNCTION__); + return 0; } AndroidStream* @@ -243,20 +217,18 @@ AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel( const float top, const float right, const float bottom, - VideoRenderAndroid& renderer) -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", - __FUNCTION__, streamId); - AndroidNativeOpenGl2Channel* stream = - new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer, - _javaRenderObj); - if (stream && stream->Init(zOrder, left, top, right, bottom) == 0) - return stream; - else - { - delete stream; - } - return NULL; + VideoRenderAndroid& renderer) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d", + __FUNCTION__, streamId); + AndroidNativeOpenGl2Channel* stream = + new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer, + _javaRenderObj); + if (stream && stream->Init(zOrder, left, top, right, bottom) == 0) + return stream; + else { + delete stream; + } + return NULL; } AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel( @@ -267,194 +239,170 @@ AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel( _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()), _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj), _registerNativeCID(NULL), _deRegisterNativeCID(NULL), - _openGLRenderer(streamId) -{ + _openGLRenderer(streamId) { } -AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() -{ - WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, - "AndroidNativeOpenGl2Channel dtor"); - delete &_renderCritSect; - if (_jvm) - { - // get the JNI env for this thread - bool isAttached = false; - JNIEnv* env = NULL; - if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _jvm->AttachCurrentThread(&env, NULL); - - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - env = NULL; - } - else - { - isAttached = true; - } - } - if (env && _deRegisterNativeCID) - { - env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID); - } - - if (isAttached) - { - if (_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, - "%s: Could not detach thread from JVM", - __FUNCTION__); - } - } - } -} - -WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom) -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, - "%s: AndroidNativeOpenGl2Channel", __FUNCTION__); - if (!_jvm) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Not a valid Java VM pointer", __FUNCTION__); - return -1; - } - +AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() { + WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, + "AndroidNativeOpenGl2Channel dtor"); + delete &_renderCritSect; + if (_jvm) { // get the JNI env for this thread bool isAttached = false; JNIEnv* env = NULL; - if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) - { - // try to attach the thread and get the env - // Attach this thread to JVM - jint res = _jvm->AttachCurrentThread(&env, NULL); + if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); - // Get the JNI env for this thread - if ((res < 0) || !env) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not attach thread to JVM (%d, %p)", - __FUNCTION__, res, env); - return -1; - } + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + env = NULL; + } else { isAttached = true; + } + } + if (env && _deRegisterNativeCID) { + env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID); } - jclass javaRenderClass = - env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); - if (!javaRenderClass) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: could not find ViESurfaceRenderer", __FUNCTION__); - return -1; - } - - // get the method ID for the ReDraw function - _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V"); - if (_redrawCid == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: could not get ReDraw ID", __FUNCTION__); - return -1; - } - - _registerNativeCID = env->GetMethodID(javaRenderClass, - "RegisterNativeObject", "(J)V"); - if (_registerNativeCID == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: could not get RegisterNativeObject ID", __FUNCTION__); - return -1; - } - - _deRegisterNativeCID = env->GetMethodID(javaRenderClass, - "DeRegisterNativeObject", "()V"); - if (_deRegisterNativeCID == NULL) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: could not get DeRegisterNativeObject ID", + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); - return -1; + } } + } +} - JNINativeMethod nativeFunctions[2] = { - { "DrawNative", - "(J)V", - (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, }, - { "CreateOpenGLNative", - "(JII)I", - (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic }, - }; - if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) - { - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, - "%s: Registered native functions", __FUNCTION__); - } - else - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, - "%s: Failed to register native functions", __FUNCTION__); - return -1; - } +WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom) +{ + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: AndroidNativeOpenGl2Channel", __FUNCTION__); + if (!_jvm) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Not a valid Java VM pointer", __FUNCTION__); + return -1; + } - env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this); + // get the JNI env for this thread + bool isAttached = false; + JNIEnv* env = NULL; + if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) { + // try to attach the thread and get the env + // Attach this thread to JVM + jint res = _jvm->AttachCurrentThread(&env, NULL); - // Detach this thread if it was attached - if (isAttached) - { - if (_jvm->DetachCurrentThread() < 0) - { - WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, - "%s: Could not detach thread from JVM", __FUNCTION__); - } + // Get the JNI env for this thread + if ((res < 0) || !env) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not attach thread to JVM (%d, %p)", + __FUNCTION__, res, env); + return -1; } + isAttached = true; + } - if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) - { - return -1; + jclass javaRenderClass = + env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20"); + if (!javaRenderClass) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not find ViESurfaceRenderer", __FUNCTION__); + return -1; + } + + // get the method ID for the ReDraw function + _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V"); + if (_redrawCid == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get ReDraw ID", __FUNCTION__); + return -1; + } + + _registerNativeCID = env->GetMethodID(javaRenderClass, + "RegisterNativeObject", "(J)V"); + if (_registerNativeCID == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get RegisterNativeObject ID", __FUNCTION__); + return -1; + } + + _deRegisterNativeCID = env->GetMethodID(javaRenderClass, + "DeRegisterNativeObject", "()V"); + if (_deRegisterNativeCID == NULL) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: could not get DeRegisterNativeObject ID", + __FUNCTION__); + return -1; + } + + JNINativeMethod nativeFunctions[2] = { + { "DrawNative", + "(J)V", + (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, }, + { "CreateOpenGLNative", + "(JII)I", + (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic }, + }; + if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, + "%s: Registered native functions", __FUNCTION__); + } + else { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, + "%s: Failed to register native functions", __FUNCTION__); + return -1; + } + + env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this); + + // Detach this thread if it was attached + if (isAttached) { + if (_jvm->DetachCurrentThread() < 0) { + WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, + "%s: Could not detach thread from JVM", __FUNCTION__); } - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, - "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__); - return 0; + } + + if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) { + return -1; + } + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__); + return 0; } WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame( const WebRtc_UWord32 /*streamId*/, - VideoFrame& videoFrame) -{ - // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); - _renderCritSect.Enter(); - _bufferToRender.SwapFrame(videoFrame); - _renderCritSect.Leave(); - _renderer.ReDraw(); - return 0; + VideoFrame& videoFrame) { + // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__); + _renderCritSect.Enter(); + _bufferToRender.SwapFrame(videoFrame); + _renderCritSect.Leave(); + _renderer.ReDraw(); + return 0; } /*Implements AndroidStream * Calls the Java object and render the buffer in _bufferToRender */ -void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) -{ - //TickTime timeNow=TickTime::Now(); +void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) { + //TickTime timeNow=TickTime::Now(); - //Draw the Surface - jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid); + //Draw the Surface + jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid); - // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, - // "%s: time to deliver %lld" ,__FUNCTION__, - // (TickTime::Now()-timeNow).Milliseconds()); + // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, + // "%s: time to deliver %lld" ,__FUNCTION__, + // (TickTime::Now()-timeNow).Milliseconds()); } /* @@ -470,8 +418,7 @@ void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic( renderChannel->DrawNative(); } -void AndroidNativeOpenGl2Channel::DrawNative() -{ +void AndroidNativeOpenGl2Channel::DrawNative() { _openGLRenderer.Render(_bufferToRender); } @@ -498,4 +445,4 @@ jint AndroidNativeOpenGl2Channel::CreateOpenGLNative( return _openGLRenderer.Setup(width, height); } -} //namespace webrtc +} //namespace webrtc diff --git a/src/modules/video_render/main/source/android/video_render_android_surface_view.cc b/src/modules/video_render/main/source/android/video_render_android_surface_view.cc index 20555b105..c50ecd9da 100644 --- a/src/modules/video_render/main/source/android/video_render_android_surface_view.cc +++ b/src/modules/video_render/main/source/android/video_render_android_surface_view.cc @@ -16,7 +16,6 @@ #include #endif - #ifdef ANDROID_LOG #include #include @@ -29,15 +28,14 @@ namespace webrtc { -AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(const WebRtc_Word32 id, - const VideoRenderType videoRenderType, - void* window, - const bool fullscreen) -: +AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer( + const WebRtc_Word32 id, + const VideoRenderType videoRenderType, + void* window, + const bool fullscreen) : VideoRenderAndroid(id,videoRenderType,window,fullscreen), _javaRenderObj(NULL), - _javaRenderClass(NULL) -{ + _javaRenderClass(NULL) { } AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() { @@ -460,25 +458,25 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { #ifdef ANDROID_NDK_8_OR_ABOVE if (_bitmapWidth != _bufferToRender.Width() || - _bitmapHeight != _bufferToRender.Height()) { + _bitmapHeight != _bufferToRender.Height()) { // Create the bitmap to write to WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: Creating bitmap %u " "%u", __FUNCTION__, _bufferToRender.Width(), _bufferToRender.Height()); if (_javaBitmapObj) { jniEnv->DeleteGlobalRef(_javaBitmapObj); - _javaBitmapObj = NULL; + _javaBitmapObj = NULL; } jobject javaBitmap = jniEnv->CallObjectMethod(_javaRenderObj, _createBitmapCid, videoFrame.Width(), videoFrame.Height()); _javaBitmapObj = jniEnv->NewGlobalRef(javaBitmap); - if (!_javaBitmapObj) { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " - "create Java Bitmap object reference", __FUNCTION__); - _renderCritSect.Leave(); - return; + if (!_javaBitmapObj) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " + "create Java Bitmap object reference", __FUNCTION__); + _renderCritSect.Leave(); + return; } else { _bitmapWidth = _bufferToRender.Width(); _bitmapHeight = _bufferToRender.Height(); @@ -518,14 +516,14 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { "%d",__FUNCTION__, _bufferToRender.Width(), _bufferToRender.Height()); if (_javaByteBufferObj) { - jniEnv->DeleteGlobalRef(_javaByteBufferObj); - _javaByteBufferObj = NULL; - _directBuffer = NULL; + jniEnv->DeleteGlobalRef(_javaByteBufferObj); + _javaByteBufferObj = NULL; + _directBuffer = NULL; } jobject javaByteBufferObj = - jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid, - _bufferToRender.Width(), - _bufferToRender.Height()); + jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid, + _bufferToRender.Width(), + _bufferToRender.Height()); _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj); if (!_javaByteBufferObj) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: could not " @@ -544,8 +542,8 @@ void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) { // Android requires a vertically flipped image compared to std convert. // This is done by giving a negative height input. const int conversionResult = - ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(), - _directBuffer, _bitmapWidth, -_bitmapHeight); + ConvertI420ToRGB565((unsigned char* )_bufferToRender.Buffer(), + _directBuffer, _bitmapWidth, -_bitmapHeight); if (conversionResult < 0) { WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion" " failed.", __FUNCTION__); diff --git a/src/modules/video_render/main/source/android/video_render_android_surface_view.h b/src/modules/video_render/main/source/android/video_render_android_surface_view.h index 1355e8323..34394c219 100644 --- a/src/modules/video_render/main/source/android/video_render_android_surface_view.h +++ b/src/modules/video_render/main/source/android/video_render_android_surface_view.h @@ -20,9 +20,8 @@ namespace webrtc { class CriticalSectionWrapper; -class AndroidSurfaceViewChannel: public AndroidStream -{ -public: +class AndroidSurfaceViewChannel : public AndroidStream { + public: AndroidSurfaceViewChannel(WebRtc_UWord32 streamId, JavaVM* jvm, VideoRenderAndroid& renderer, @@ -67,9 +66,8 @@ public: unsigned int _bitmapHeight; }; -class AndroidSurfaceViewRenderer: private VideoRenderAndroid -{ -public: +class AndroidSurfaceViewRenderer : private VideoRenderAndroid { + public: AndroidSurfaceViewRenderer(const WebRtc_Word32 id, const VideoRenderType videoRenderType, void* window, @@ -89,6 +87,6 @@ public: jclass _javaRenderClass; }; -} //namespace webrtc +} // namespace webrtc -#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_ diff --git a/src/modules/video_render/main/source/android/video_render_opengles20.cc b/src/modules/video_render/main/source/android/video_render_opengles20.cc index f207a16d7..28bf9ae20 100644 --- a/src/modules/video_render/main/source/android/video_render_opengles20.cc +++ b/src/modules/video_render/main/source/android/video_render_opengles20.cc @@ -33,318 +33,287 @@ namespace webrtc { const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 }; const char VideoRenderOpenGles20::g_vertextShader[] = { - "attribute vec4 aPosition;\n" - "attribute vec2 aTextureCoord;\n" - "varying vec2 vTextureCoord;\n" - "void main() {\n" - " gl_Position = aPosition;\n" - " vTextureCoord = aTextureCoord;\n" - "}\n" }; + "attribute vec4 aPosition;\n" + "attribute vec2 aTextureCoord;\n" + "varying vec2 vTextureCoord;\n" + "void main() {\n" + " gl_Position = aPosition;\n" + " vTextureCoord = aTextureCoord;\n" + "}\n" }; // The fragment shader. // Do YUV to RGB565 conversion. const char VideoRenderOpenGles20::g_fragmentShader[] = { - "precision mediump float;\n" - "uniform sampler2D Ytex;\n" - "uniform sampler2D Utex,Vtex;\n" - "varying vec2 vTextureCoord;\n" - "void main(void) {\n" - " float nx,ny,r,g,b,y,u,v;\n" - " mediump vec4 txl,ux,vx;" - " nx=vTextureCoord[0];\n" - " ny=vTextureCoord[1];\n" - " y=texture2D(Ytex,vec2(nx,ny)).r;\n" - " u=texture2D(Utex,vec2(nx,ny)).r;\n" - " v=texture2D(Vtex,vec2(nx,ny)).r;\n" + "precision mediump float;\n" + "uniform sampler2D Ytex;\n" + "uniform sampler2D Utex,Vtex;\n" + "varying vec2 vTextureCoord;\n" + "void main(void) {\n" + " float nx,ny,r,g,b,y,u,v;\n" + " mediump vec4 txl,ux,vx;" + " nx=vTextureCoord[0];\n" + " ny=vTextureCoord[1];\n" + " y=texture2D(Ytex,vec2(nx,ny)).r;\n" + " u=texture2D(Utex,vec2(nx,ny)).r;\n" + " v=texture2D(Vtex,vec2(nx,ny)).r;\n" - //" y = v;\n"+ - " y=1.1643*(y-0.0625);\n" - " u=u-0.5;\n" - " v=v-0.5;\n" + //" y = v;\n"+ + " y=1.1643*(y-0.0625);\n" + " u=u-0.5;\n" + " v=v-0.5;\n" - " r=y+1.5958*v;\n" - " g=y-0.39173*u-0.81290*v;\n" - " b=y+2.017*u;\n" - " gl_FragColor=vec4(r,g,b,1.0);\n" - "}\n" }; + " r=y+1.5958*v;\n" + " g=y-0.39173*u-0.81290*v;\n" + " b=y+2.017*u;\n" + " gl_FragColor=vec4(r,g,b,1.0);\n" + "}\n" }; VideoRenderOpenGles20::VideoRenderOpenGles20(WebRtc_Word32 id) : _id(id), _textureWidth(-1), - _textureHeight(-1) + _textureHeight(-1) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", + __FUNCTION__, (int) _id); -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", - __FUNCTION__, (int) _id); - - const GLfloat vertices[20] = { + const GLfloat vertices[20] = { // X, Y, Z, U, V - -1, -1, 0, 0, 1, // Bottom Left - 1, -1, 0, 1, 1, //Bottom Right - 1, 1, 0, 1, 0, //Top Right - -1, 1, 0, 0, 0 }; //Top Left + -1, -1, 0, 0, 1, // Bottom Left + 1, -1, 0, 1, 1, //Bottom Right + 1, 1, 0, 1, 0, //Top Right + -1, 1, 0, 0, 0 }; //Top Left - memcpy(_vertices, vertices, sizeof(_vertices)); + memcpy(_vertices, vertices, sizeof(_vertices)); } -VideoRenderOpenGles20::~VideoRenderOpenGles20() -{ - +VideoRenderOpenGles20::~VideoRenderOpenGles20() { } WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width, - WebRtc_Word32 height) -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, - "%s: width %d, height %d", __FUNCTION__, (int) width, - (int) height); + WebRtc_Word32 height) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: width %d, height %d", __FUNCTION__, (int) width, + (int) height); - printGLString("Version", GL_VERSION); - printGLString("Vendor", GL_VENDOR); - printGLString("Renderer", GL_RENDERER); - printGLString("Extensions", GL_EXTENSIONS); + printGLString("Version", GL_VERSION); + printGLString("Vendor", GL_VENDOR); + printGLString("Renderer", GL_RENDERER); + printGLString("Extensions", GL_EXTENSIONS); - int maxTextureImageUnits[2]; - int maxTextureSize[2]; - glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits); - glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize); + int maxTextureImageUnits[2]; + int maxTextureSize[2]; + glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits); + glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize); - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, - "%s: number of textures %d, size %d", __FUNCTION__, - (int) maxTextureImageUnits[0], (int) maxTextureSize[0]); + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: number of textures %d, size %d", __FUNCTION__, + (int) maxTextureImageUnits[0], (int) maxTextureSize[0]); - _program = createProgram(g_vertextShader, g_fragmentShader); - if (!_program) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not create program", __FUNCTION__); - return -1; - } + _program = createProgram(g_vertextShader, g_fragmentShader); + if (!_program) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not create program", __FUNCTION__); + return -1; + } - int positionHandle = glGetAttribLocation(_program, "aPosition"); - checkGlError("glGetAttribLocation aPosition"); - if (positionHandle == -1) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not get aPosition handle", __FUNCTION__); - return -1; - } - int textureHandle = glGetAttribLocation(_program, "aTextureCoord"); - checkGlError("glGetAttribLocation aTextureCoord"); - if (textureHandle == -1) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not get aTextureCoord handle", __FUNCTION__); - return -1; - } + int positionHandle = glGetAttribLocation(_program, "aPosition"); + checkGlError("glGetAttribLocation aPosition"); + if (positionHandle == -1) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not get aPosition handle", __FUNCTION__); + return -1; + } - // set the vertices array in the shader - // _vertices contains 4 vertices with 5 coordinates. - // 3 for (xyz) for the vertices and 2 for the texture - glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, 5 - * sizeof(GLfloat), _vertices); - checkGlError("glVertexAttribPointer aPosition"); + int textureHandle = glGetAttribLocation(_program, "aTextureCoord"); + checkGlError("glGetAttribLocation aTextureCoord"); + if (textureHandle == -1) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not get aTextureCoord handle", __FUNCTION__); + return -1; + } - glEnableVertexAttribArray(positionHandle); - checkGlError("glEnableVertexAttribArray positionHandle"); + // set the vertices array in the shader + // _vertices contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture + glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false, + 5 * sizeof(GLfloat), _vertices); + checkGlError("glVertexAttribPointer aPosition"); - // set the texture coordinate array in the shader - // _vertices contains 4 vertices with 5 coordinates. - // 3 for (xyz) for the vertices and 2 for the texture - glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5 - * sizeof(GLfloat), &_vertices[3]); - checkGlError("glVertexAttribPointer maTextureHandle"); - glEnableVertexAttribArray(textureHandle); - checkGlError("glEnableVertexAttribArray textureHandle"); + glEnableVertexAttribArray(positionHandle); + checkGlError("glEnableVertexAttribArray positionHandle"); - glUseProgram(_program); - int i = glGetUniformLocation(_program, "Ytex"); - checkGlError("glGetUniformLocation"); - glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */ - checkGlError("glUniform1i Ytex"); + // set the texture coordinate array in the shader + // _vertices contains 4 vertices with 5 coordinates. + // 3 for (xyz) for the vertices and 2 for the texture + glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5 + * sizeof(GLfloat), &_vertices[3]); + checkGlError("glVertexAttribPointer maTextureHandle"); + glEnableVertexAttribArray(textureHandle); + checkGlError("glEnableVertexAttribArray textureHandle"); - i = glGetUniformLocation(_program, "Utex"); - checkGlError("glGetUniformLocation Utex"); - glUniform1i(i, 1); /* Bind Utex to texture unit 1 */ - checkGlError("glUniform1i Utex"); + glUseProgram(_program); + int i = glGetUniformLocation(_program, "Ytex"); + checkGlError("glGetUniformLocation"); + glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */ + checkGlError("glUniform1i Ytex"); - i = glGetUniformLocation(_program, "Vtex"); - checkGlError("glGetUniformLocation"); - glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */ - checkGlError("glUniform1i"); + i = glGetUniformLocation(_program, "Utex"); + checkGlError("glGetUniformLocation Utex"); + glUniform1i(i, 1); /* Bind Utex to texture unit 1 */ + checkGlError("glUniform1i Utex"); - glViewport(0, 0, width, height); - checkGlError("glViewport"); - return 0; + i = glGetUniformLocation(_program, "Vtex"); + checkGlError("glGetUniformLocation"); + glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */ + checkGlError("glUniform1i"); + glViewport(0, 0, width, height); + checkGlError("glViewport"); + return 0; } -/* - * SetCoordinates - * Sets the coordinates where the stream shall be rendered. - * Values must be between 0 and 1. - */ + +// SetCoordinates +// Sets the coordinates where the stream shall be rendered. +// Values must be between 0 and 1. WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder, const float left, const float top, const float right, - const float bottom) -{ - if ((top > 1 || top < 0) || (right > 1 || right < 0) || (bottom > 1 - || bottom < 0) || (left > 1 || left < 0)) - { - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Wrong coordinates", __FUNCTION__); - return -1; - } - /* - // X, Y, Z, U, V - -1, -1, 0, 0, 1, // Bottom Left - 1, -1, 0, 1, 1, //Bottom Right - 1, 1, 0, 1, 0, //Top Right - -1, 1, 0, 0, 0 }; //Top Left - */ - // Bottom Left - _vertices[0] = (left * 2) - 1; - _vertices[1] = -1 * (2 * bottom) + 1; - _vertices[2] = zOrder; + const float bottom) { + if ((top > 1 || top < 0) || (right > 1 || right < 0) || + (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) { + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Wrong coordinates", __FUNCTION__); + return -1; + } - //Bottom Right - _vertices[5] = (right * 2) - 1; - _vertices[6] = -1 * (2 * bottom) + 1; - _vertices[7] = zOrder; + // X, Y, Z, U, V + // -1, -1, 0, 0, 1, // Bottom Left + // 1, -1, 0, 1, 1, //Bottom Right + // 1, 1, 0, 1, 0, //Top Right + // -1, 1, 0, 0, 0 //Top Left - //Top Right - _vertices[10] = (right * 2) - 1; - _vertices[11] = -1 * (2 * top) + 1; - _vertices[12] = zOrder; + // Bottom Left + _vertices[0] = (left * 2) - 1; + _vertices[1] = -1 * (2 * bottom) + 1; + _vertices[2] = zOrder; - //Top Left - _vertices[15] = (left * 2) - 1; - _vertices[16] = -1 * (2 * top) + 1; - _vertices[17] = zOrder; + //Bottom Right + _vertices[5] = (right * 2) - 1; + _vertices[6] = -1 * (2 * bottom) + 1; + _vertices[7] = zOrder; - return 0; + //Top Right + _vertices[10] = (right * 2) - 1; + _vertices[11] = -1 * (2 * top) + 1; + _vertices[12] = zOrder; + //Top Left + _vertices[15] = (left * 2) - 1; + _vertices[16] = -1 * (2 * top) + 1; + _vertices[17] = zOrder; + + return 0; } -WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) -{ - if (frameToRender.Length() == 0) - { - return -1; - } +WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) { - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", - __FUNCTION__, (int) _id); + if (frameToRender.Length() == 0) { + return -1; + } - //glClearColor(0.0f, 0.0f, 1.0f, 1.0f); - //glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT); + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d", + __FUNCTION__, (int) _id); - glUseProgram(_program); - checkGlError("glUseProgram"); + glUseProgram(_program); + checkGlError("glUseProgram"); - if (_textureWidth != (GLsizei) frameToRender.Width() || _textureHeight - != (GLsizei) frameToRender.Height()) - { - SetupTextures(frameToRender); - } - else - { - UpdateTextures(frameToRender); - } + if (_textureWidth != (GLsizei) frameToRender.Width() || + _textureHeight != (GLsizei) frameToRender.Height()) { + SetupTextures(frameToRender); + } + else { + UpdateTextures(frameToRender); + } - glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices); - checkGlError("glDrawArrays"); + glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices); + checkGlError("glDrawArrays"); - return 0; + return 0; } GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType, - const char* pSource) -{ - GLuint shader = glCreateShader(shaderType); - if (shader) - { - glShaderSource(shader, 1, &pSource, NULL); - glCompileShader(shader); - GLint compiled = 0; - glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); - if (!compiled) - { - GLint infoLen = 0; - glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); - if (infoLen) - { - char* buf = (char*) malloc(infoLen); - if (buf) - { - glGetShaderInfoLog(shader, infoLen, NULL, buf); - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not compile shader %d: %s", - __FUNCTION__, shaderType, buf); - free(buf); - } - glDeleteShader(shader); - shader = 0; - } + const char* pSource) { + GLuint shader = glCreateShader(shaderType); + if (shader) { + glShaderSource(shader, 1, &pSource, NULL); + glCompileShader(shader); + GLint compiled = 0; + glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); + if (!compiled) { + GLint infoLen = 0; + glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); + if (infoLen) { + char* buf = (char*) malloc(infoLen); + if (buf) { + glGetShaderInfoLog(shader, infoLen, NULL, buf); + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not compile shader %d: %s", + __FUNCTION__, shaderType, buf); + free(buf); } + glDeleteShader(shader); + shader = 0; + } } - return shader; + } + return shader; } GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource, - const char* pFragmentSource) -{ - GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); - if (!vertexShader) - { - return 0; - } + const char* pFragmentSource) { + GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); + if (!vertexShader) { + return 0; + } - GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); - if (!pixelShader) - { - return 0; - } + GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); + if (!pixelShader) { + return 0; + } - GLuint program = glCreateProgram(); - if (program) - { - glAttachShader(program, vertexShader); - checkGlError("glAttachShader"); - glAttachShader(program, pixelShader); - checkGlError("glAttachShader"); - glLinkProgram(program); - GLint linkStatus = GL_FALSE; - glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); - if (linkStatus != GL_TRUE) - { - GLint bufLength = 0; - glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); - if (bufLength) - { - char* buf = (char*) malloc(bufLength); - if (buf) - { - glGetProgramInfoLog(program, bufLength, NULL, buf); - WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, - "%s: Could not link program: %s", - __FUNCTION__, buf); - free(buf); - } - } - glDeleteProgram(program); - program = 0; + GLuint program = glCreateProgram(); + if (program) { + glAttachShader(program, vertexShader); + checkGlError("glAttachShader"); + glAttachShader(program, pixelShader); + checkGlError("glAttachShader"); + glLinkProgram(program); + GLint linkStatus = GL_FALSE; + glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); + if (linkStatus != GL_TRUE) { + GLint bufLength = 0; + glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); + if (bufLength) { + char* buf = (char*) malloc(bufLength); + if (buf) { + glGetProgramInfoLog(program, bufLength, NULL, buf); + WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, + "%s: Could not link program: %s", + __FUNCTION__, buf); + free(buf); } + } + glDeleteProgram(program); + program = 0; } - return program; + } + return program; } -void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) -{ - const char *v = (const char *) glGetString(s); - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n", - name, v); +void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) { + const char *v = (const char *) glGetString(s); + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n", + name, v); } void VideoRenderOpenGles20::checkGlError(const char* op) { @@ -358,89 +327,87 @@ void VideoRenderOpenGles20::checkGlError(const char* op) { #endif } -void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) -{ - WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, - "%s: width %d, height %d length %u", __FUNCTION__, - frameToRender.Width(), frameToRender.Height(), - frameToRender.Length()); +void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) { + WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, + "%s: width %d, height %d length %u", __FUNCTION__, + frameToRender.Width(), frameToRender.Height(), + frameToRender.Length()); - const GLsizei width = frameToRender.Width(); - const GLsizei height = frameToRender.Height(); + const GLsizei width = frameToRender.Width(); + const GLsizei height = frameToRender.Height(); - glGenTextures(3, _textureIds); //Generate the Y, U and V texture - GLuint currentTextureId = _textureIds[0]; // Y - glActiveTexture( GL_TEXTURE0); - glBindTexture(GL_TEXTURE_2D, currentTextureId); + glGenTextures(3, _textureIds); //Generate the Y, U and V texture + GLuint currentTextureId = _textureIds[0]; // Y + glActiveTexture( GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, currentTextureId); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, - GL_LUMINANCE, GL_UNSIGNED_BYTE, - (const GLvoid*) frameToRender.Buffer()); + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0, + GL_LUMINANCE, GL_UNSIGNED_BYTE, + (const GLvoid*) frameToRender.Buffer()); - currentTextureId = _textureIds[1]; // U - glActiveTexture( GL_TEXTURE1); - glBindTexture(GL_TEXTURE_2D, currentTextureId); + currentTextureId = _textureIds[1]; // U + glActiveTexture( GL_TEXTURE1); + glBindTexture(GL_TEXTURE_2D, currentTextureId); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height; - glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, - GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent); + const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height; + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, + GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent); - currentTextureId = _textureIds[2]; // V - glActiveTexture( GL_TEXTURE2); - glBindTexture(GL_TEXTURE_2D, currentTextureId); + currentTextureId = _textureIds[2]; // V + glActiveTexture( GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, currentTextureId); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); + glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4; - glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, - GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent); - checkGlError("SetupTextures"); + const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4; + glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0, + GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent); + checkGlError("SetupTextures"); - _textureWidth = width; - _textureHeight = height; + _textureWidth = width; + _textureHeight = height; } -void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) -{ - const GLsizei width = frameToRender.Width(); - const GLsizei height = frameToRender.Height(); +void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) { + const GLsizei width = frameToRender.Width(); + const GLsizei height = frameToRender.Height(); - GLuint currentTextureId = _textureIds[0]; // Y - glActiveTexture( GL_TEXTURE0); - glBindTexture(GL_TEXTURE_2D, currentTextureId); - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, - GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer()); + GLuint currentTextureId = _textureIds[0]; // Y + glActiveTexture( GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, currentTextureId); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE, + GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer()); - currentTextureId = _textureIds[1]; // U - glActiveTexture( GL_TEXTURE1); - glBindTexture(GL_TEXTURE_2D, currentTextureId); - const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height; - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, - GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent); + currentTextureId = _textureIds[1]; // U + glActiveTexture( GL_TEXTURE1); + glBindTexture(GL_TEXTURE_2D, currentTextureId); + const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height; + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, + GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent); - currentTextureId = _textureIds[2]; // V - glActiveTexture( GL_TEXTURE2); - glBindTexture(GL_TEXTURE_2D, currentTextureId); - const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4; - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, - GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent); - checkGlError("UpdateTextures"); + currentTextureId = _textureIds[2]; // V + glActiveTexture( GL_TEXTURE2); + glBindTexture(GL_TEXTURE_2D, currentTextureId); + const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4; + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2, + GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent); + checkGlError("UpdateTextures"); } -} //namespace webrtc +} // namespace webrtc diff --git a/src/modules/video_render/main/source/android/video_render_opengles20.h b/src/modules/video_render/main/source/android/video_render_opengles20.h index eee40892e..8f1743eca 100644 --- a/src/modules/video_render/main/source/android/video_render_opengles20.h +++ b/src/modules/video_render/main/source/android/video_render_opengles20.h @@ -19,44 +19,43 @@ namespace webrtc { -class VideoRenderOpenGles20 -{ -public: - VideoRenderOpenGles20(WebRtc_Word32 id); - ~VideoRenderOpenGles20(); +class VideoRenderOpenGles20 { + public: + VideoRenderOpenGles20(WebRtc_Word32 id); + ~VideoRenderOpenGles20(); - WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height); - WebRtc_Word32 Render(const VideoFrame& frameToRender); - WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder, - const float left, - const float top, - const float right, - const float bottom); + WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height); + WebRtc_Word32 Render(const VideoFrame& frameToRender); + WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder, + const float left, + const float top, + const float right, + const float bottom); -private: - void printGLString(const char *name, GLenum s); - void checkGlError(const char* op); - GLuint loadShader(GLenum shaderType, const char* pSource); - GLuint createProgram(const char* pVertexSource, - const char* pFragmentSource); - void SetupTextures(const VideoFrame& frameToRender); - void UpdateTextures(const VideoFrame& frameToRender); + private: + void printGLString(const char *name, GLenum s); + void checkGlError(const char* op); + GLuint loadShader(GLenum shaderType, const char* pSource); + GLuint createProgram(const char* pVertexSource, + const char* pFragmentSource); + void SetupTextures(const VideoFrame& frameToRender); + void UpdateTextures(const VideoFrame& frameToRender); - WebRtc_Word32 _id; - GLuint _textureIds[3]; // Texture id of Y,U and V texture. - GLuint _program; - GLuint _vPositionHandle; - GLsizei _textureWidth; - GLsizei _textureHeight; + WebRtc_Word32 _id; + GLuint _textureIds[3]; // Texture id of Y,U and V texture. + GLuint _program; + GLuint _vPositionHandle; + GLsizei _textureWidth; + GLsizei _textureHeight; - GLfloat _vertices[20]; - static const char g_indices[]; + GLfloat _vertices[20]; + static const char g_indices[]; - static const char g_vertextShader[]; - static const char g_fragmentShader[]; + static const char g_vertextShader[]; + static const char g_fragmentShader[]; }; -} //namespace webrtc +} // namespace webrtc -#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_ +#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_ diff --git a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java index 7a00d1420..5a26190f6 100644 --- a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java +++ b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -11,7 +11,7 @@ package org.webrtc.videoengineapp; public interface IViEAndroidCallback { - public int UpdateStats(int frameRateI, int bitRateI, - int packetLoss, int frameRateO, - int bitRateO); + public int UpdateStats(int frameRateI, int bitRateI, + int packetLoss, int frameRateO, + int bitRateO); } diff --git a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidDemo.java b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidDemo.java index 9301948ea..1213f28e3 100644 --- a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidDemo.java +++ b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidDemo.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -53,751 +53,751 @@ import android.view.OrientationEventListener; public class ViEAndroidDemo extends TabActivity implements IViEAndroidCallback, View.OnClickListener, OnItemSelectedListener { - private ViEAndroidJavaAPI ViEAndroidAPI = null; + private ViEAndroidJavaAPI ViEAndroidAPI = null; - // remote renderer - private SurfaceView remoteSurfaceView = null; + // remote renderer + private SurfaceView remoteSurfaceView = null; - // local renderer and camera - private SurfaceView svLocal = null; + // local renderer and camera + private SurfaceView svLocal = null; - // channel number - private int channel; - private int cameraId; - private int voiceChannel = -1; + // channel number + private int channel; + private int cameraId; + private int voiceChannel = -1; - // flags - private boolean viERunning = false; - private boolean voERunning = false; + // flags + private boolean viERunning = false; + private boolean voERunning = false; - // debug - private boolean enableTrace = false; + // debug + private boolean enableTrace = false; - // Constant - private static final String LOG_TAG = "*WEBRTCJ*"; - private static final int RECEIVE_CODEC_FRAMERATE = 30; - private static final int SEND_CODEC_FRAMERATE = 15; - private static final int INIT_BITRATE = 400; + // Constant + private static final String LOG_TAG = "*WEBRTCJ*"; + private static final int RECEIVE_CODEC_FRAMERATE = 30; + private static final int SEND_CODEC_FRAMERATE = 15; + private static final int INIT_BITRATE = 400; - private static final int EXPIRARY_YEAR = 2010; - private static final int EXPIRARY_MONTH = 10; - private static final int EXPIRARY_DAY = 22; + private static final int EXPIRARY_YEAR = 2010; + private static final int EXPIRARY_MONTH = 10; + private static final int EXPIRARY_DAY = 22; - private int volumeLevel = 204; + private int volumeLevel = 204; - private TabHost mTabHost = null; + private TabHost mTabHost = null; - private TabSpec mTabSpecConfig; - private TabSpec mTabSpecVideo; + private TabSpec mTabSpecConfig; + private TabSpec mTabSpecVideo; - private LinearLayout mLlRemoteSurface = null; - private LinearLayout mLlLocalSurface = null; + private LinearLayout mLlRemoteSurface = null; + private LinearLayout mLlLocalSurface = null; - private Button btStartStopCall; - private Button btSwitchCamera; + private Button btStartStopCall; + private Button btSwitchCamera; - //Global Settings - private CheckBox cbVideoSend; - private boolean enableVideoSend = true; - private CheckBox cbVideoReceive; - private boolean enableVideoReceive = true; - private boolean enableVideo = true; - private CheckBox cbVoice; - private boolean enableVoice = false; - private EditText etRemoteIp; - private String remoteIp = "10.1.100.68"; - private CheckBox cbLoopback; - private boolean loopbackMode = true; + //Global Settings + private CheckBox cbVideoSend; + private boolean enableVideoSend = true; + private CheckBox cbVideoReceive; + private boolean enableVideoReceive = true; + private boolean enableVideo = true; + private CheckBox cbVoice; + private boolean enableVoice = false; + private EditText etRemoteIp; + private String remoteIp = "10.1.100.68"; + private CheckBox cbLoopback; + private boolean loopbackMode = true; - //Video settings - private Spinner spCodecType; - private int codecType = 0; - private Spinner spCodecSize; - private int codecSizeWidth = 352; - private int codecSizeHeight = 288; - private TextView etVRxPort; - private int receivePortVideo = 11111; - private TextView etVTxPort; - private int destinationPortVideo = 11111; - private CheckBox cbEnableNack; - private boolean enableNack = false; + //Video settings + private Spinner spCodecType; + private int codecType = 0; + private Spinner spCodecSize; + private int codecSizeWidth = 352; + private int codecSizeHeight = 288; + private TextView etVRxPort; + private int receivePortVideo = 11111; + private TextView etVTxPort; + private int destinationPortVideo = 11111; + private CheckBox cbEnableNack; + private boolean enableNack = false; - //Audio settings - private Spinner spVoiceCodecType; - private int voiceCodecType = 5; //PCMU = 5 - private TextView etARxPort; - private int receivePortVoice = 11113; - private TextView etATxPort; - private int destinationPortVoice = 11113; - private CheckBox cbEnableSpeaker; - private boolean enableSpeaker = false; - private CheckBox cbEnableAGC; - private boolean enableAGC = false; - private CheckBox cbEnableAECM; - private boolean enableAECM = false; - private CheckBox cbEnableNS; - private boolean enableNS = false; + //Audio settings + private Spinner spVoiceCodecType; + private int voiceCodecType = 5; //PCMU = 5 + private TextView etARxPort; + private int receivePortVoice = 11113; + private TextView etATxPort; + private int destinationPortVoice = 11113; + private CheckBox cbEnableSpeaker; + private boolean enableSpeaker = false; + private CheckBox cbEnableAGC; + private boolean enableAGC = false; + private CheckBox cbEnableAECM; + private boolean enableAECM = false; + private CheckBox cbEnableNS; + private boolean enableNS = false; - //Stats - private TextView tvFrameRateI; - private TextView tvBitRateI; - private TextView tvPacketLoss; - private TextView tvFrameRateO; - private TextView tvBitRateO; - private int frameRateI; - private int bitRateI; - private int packetLoss; - private int frameRateO; - private int bitRateO; + //Stats + private TextView tvFrameRateI; + private TextView tvBitRateI; + private TextView tvPacketLoss; + private TextView tvFrameRateO; + private TextView tvBitRateO; + private int frameRateI; + private int bitRateI; + private int packetLoss; + private int frameRateO; + private int bitRateO; - private WakeLock wakeLock; + private WakeLock wakeLock; - private boolean usingFrontCamera = false; + private boolean usingFrontCamera = false; - private OrientationEventListener orientationListener; - int currentOrientation = OrientationEventListener.ORIENTATION_UNKNOWN; - int currentCameraOrientation = 0; + private OrientationEventListener orientationListener; + int currentOrientation = OrientationEventListener.ORIENTATION_UNKNOWN; + int currentCameraOrientation = 0; - //Convert current display orientation to how much the camera should be rotated. - public int GetCameraOrientation(int cameraOrientation) { - Display display = this.getWindowManager().getDefaultDisplay(); - int displatyRotation = display.getRotation(); - int degrees = 0; - switch (displatyRotation) { - case Surface.ROTATION_0: degrees = 0; break; - case Surface.ROTATION_90: degrees = 90; break; - case Surface.ROTATION_180: degrees = 180; break; - case Surface.ROTATION_270: degrees = 270; break; - } - int result=0; - if(cameraOrientation>180) { - result=(cameraOrientation + degrees) % 360; - } - else { - result=(cameraOrientation - degrees+360) % 360; - } - - return result; - } - - public void onConfigurationChanged(Configuration newConfig) { - super.onConfigurationChanged(newConfig); - int newRotation = GetCameraOrientation(currentCameraOrientation); - if (viERunning){ - ViEAndroidAPI.SetRotation(cameraId,newRotation); - } - } - - // Called when the activity is first created. - @Override - public void onCreate(Bundle savedInstanceState) { - super.onCreate(savedInstanceState); - requestWindowFeature(Window.FEATURE_NO_TITLE); - getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, - WindowManager.LayoutParams.FLAG_FULLSCREEN); - - PowerManager pm = (PowerManager)this.getSystemService( - Context.POWER_SERVICE); - wakeLock = pm.newWakeLock( - PowerManager.SCREEN_DIM_WAKE_LOCK, LOG_TAG); - - setContentView(R.layout.tabhost); - mTabHost = getTabHost(); - - //Video tab - mTabSpecVideo = mTabHost.newTabSpec("tab_video"); - mTabSpecVideo.setIndicator("Video"); - mTabSpecVideo.setContent(R.id.tab_video); - mTabHost.addTab(mTabSpecVideo); - - //Shared config tab - mTabHost = getTabHost(); - mTabSpecConfig = mTabHost.newTabSpec("tab_config"); - mTabSpecConfig.setIndicator("Config"); - mTabSpecConfig.setContent(R.id.tab_config); - mTabHost.addTab(mTabSpecConfig); - - TabSpec mTabv; - mTabv = mTabHost.newTabSpec("tab_vconfig"); - mTabv.setIndicator("V. Config"); - mTabv.setContent(R.id.tab_vconfig); - mTabHost.addTab(mTabv); - TabSpec mTaba; - mTaba = mTabHost.newTabSpec("tab_aconfig"); - mTaba.setIndicator("A. Config"); - mTaba.setContent(R.id.tab_aconfig); - mTabHost.addTab(mTaba); - TabSpec mTabs; - mTabs = mTabHost.newTabSpec("tab_stats"); - mTabs.setIndicator("Stats"); - mTabs.setContent(R.id.tab_stats); - mTabHost.addTab(mTabs); - - int childCount = mTabHost.getTabWidget().getChildCount(); - for (int i=0; i en = NetworkInterface - .getNetworkInterfaces(); en.hasMoreElements();) { - NetworkInterface intf = en.nextElement(); - for (Enumeration enumIpAddr = intf - .getInetAddresses(); enumIpAddr.hasMoreElements();) { - InetAddress inetAddress = enumIpAddr.nextElement(); - if (!inetAddress.isLoopbackAddress()) { - localIPs += inetAddress.getHostAddress().toString() + " "; - //set the remote ip address the same as - // the local ip address of the last netif - remoteIp = inetAddress.getHostAddress().toString(); - } + int result=0; + if(cameraOrientation>180) { + result=(cameraOrientation + degrees) % 360; + } + else { + result=(cameraOrientation - degrees+360) % 360; } - } - } catch (SocketException ex) { - Log.e(LOG_TAG, ex.toString()); - } - return localIPs; - } - @Override - public boolean onKeyDown(int keyCode, KeyEvent event) { - if (keyCode == KeyEvent.KEYCODE_BACK) { - if (viERunning) { - StopAll(); + return result; + } + + public void onConfigurationChanged(Configuration newConfig) { + super.onConfigurationChanged(newConfig); + int newRotation = GetCameraOrientation(currentCameraOrientation); + if (viERunning){ + ViEAndroidAPI.SetRotation(cameraId,newRotation); + } + } + + // Called when the activity is first created. + @Override + public void onCreate(Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + requestWindowFeature(Window.FEATURE_NO_TITLE); + getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, + WindowManager.LayoutParams.FLAG_FULLSCREEN); + + PowerManager pm = (PowerManager)this.getSystemService( + Context.POWER_SERVICE); + wakeLock = pm.newWakeLock( + PowerManager.SCREEN_DIM_WAKE_LOCK, LOG_TAG); + + setContentView(R.layout.tabhost); + mTabHost = getTabHost(); + + //Video tab + mTabSpecVideo = mTabHost.newTabSpec("tab_video"); + mTabSpecVideo.setIndicator("Video"); + mTabSpecVideo.setContent(R.id.tab_video); + mTabHost.addTab(mTabSpecVideo); + + //Shared config tab + mTabHost = getTabHost(); + mTabSpecConfig = mTabHost.newTabSpec("tab_config"); + mTabSpecConfig.setIndicator("Config"); + mTabSpecConfig.setContent(R.id.tab_config); + mTabHost.addTab(mTabSpecConfig); + + TabSpec mTabv; + mTabv = mTabHost.newTabSpec("tab_vconfig"); + mTabv.setIndicator("V. Config"); + mTabv.setContent(R.id.tab_vconfig); + mTabHost.addTab(mTabv); + TabSpec mTaba; + mTaba = mTabHost.newTabSpec("tab_aconfig"); + mTaba.setIndicator("A. Config"); + mTaba.setContent(R.id.tab_aconfig); + mTabHost.addTab(mTaba); + TabSpec mTabs; + mTabs = mTabHost.newTabSpec("tab_stats"); + mTabs.setIndicator("Stats"); + mTabs.setContent(R.id.tab_stats); + mTabHost.addTab(mTabs); + + int childCount = mTabHost.getTabWidget().getChildCount(); + for (int i=0; i en = NetworkInterface + .getNetworkInterfaces(); en.hasMoreElements();) { + NetworkInterface intf = en.nextElement(); + for (Enumeration enumIpAddr = intf + .getInetAddresses(); enumIpAddr.hasMoreElements();) { + InetAddress inetAddress = enumIpAddr.nextElement(); + if (!inetAddress.isLoopbackAddress()) { + localIPs += inetAddress.getHostAddress().toString() + " "; + //set the remote ip address the same as + // the local ip address of the last netif + remoteIp = inetAddress.getHostAddress().toString(); + } + } + } + } catch (SocketException ex) { + Log.e(LOG_TAG, ex.toString()); + } + return localIPs; + } - if (viERunning) { - viERunning = false; - ViEAndroidAPI.StopRender(channel); - ViEAndroidAPI.StopReceive(channel); - ViEAndroidAPI.StopSend(channel); - ViEAndroidAPI.RemoveRemoteRenderer(channel); - // stop the camera - ViEAndroidAPI.StopCamera(cameraId); - ViEAndroidAPI.Terminate(); - mLlRemoteSurface.removeView(remoteSurfaceView); - mLlLocalSurface.removeView(svLocal); + @Override + public boolean onKeyDown(int keyCode, KeyEvent event) { + if (keyCode == KeyEvent.KEYCODE_BACK) { + if (viERunning) { + StopAll(); + StartMain(); + } + finish(); + return true; + } + return super.onKeyDown(keyCode, event); + } + + private void StopAll() { + if (ViEAndroidAPI != null) { + if (voERunning) { + voERunning = false; + StopVoiceEngine(); + } + + if (viERunning) { + viERunning = false; + ViEAndroidAPI.StopRender(channel); + ViEAndroidAPI.StopReceive(channel); + ViEAndroidAPI.StopSend(channel); + ViEAndroidAPI.RemoveRemoteRenderer(channel); + // stop the camera + ViEAndroidAPI.StopCamera(cameraId); + ViEAndroidAPI.Terminate(); + mLlRemoteSurface.removeView(remoteSurfaceView); + mLlLocalSurface.removeView(svLocal); + remoteSurfaceView = null; + + svLocal = null; + } + } + } + + private void StartMain() { + mTabHost.setCurrentTab(0); + + mLlRemoteSurface = (LinearLayout) findViewById(R.id.llRemoteView); + mLlLocalSurface = (LinearLayout) findViewById(R.id.llLocalView); + + if (null == ViEAndroidAPI) + ViEAndroidAPI = new ViEAndroidJavaAPI(this); + + //setContentView(R.layout.main); + btSwitchCamera = (Button)findViewById(R.id.btSwitchCamera); + btSwitchCamera.setOnClickListener(this); + btStartStopCall = (Button)findViewById(R.id.btStartStopCall); + btStartStopCall.setOnClickListener(this); + findViewById(R.id.btExit).setOnClickListener(this); + + // cleaning remoteSurfaceView = null; - svLocal = null; - } - } - } - private void StartMain() { - mTabHost.setCurrentTab(0); + // init UI + ArrayAdapter adapter; - mLlRemoteSurface = (LinearLayout) findViewById(R.id.llRemoteView); - mLlLocalSurface = (LinearLayout) findViewById(R.id.llLocalView); + int resource = android.R.layout.simple_spinner_item; + int dropdownRes = android.R.layout.simple_spinner_dropdown_item; - if (null == ViEAndroidAPI) - ViEAndroidAPI = new ViEAndroidJavaAPI(this); + // video codec + spCodecType = (Spinner) findViewById(R.id.spCodecType); + adapter = ArrayAdapter.createFromResource(this, + R.array.codectype, + resource); + adapter.setDropDownViewResource(dropdownRes); + spCodecType.setAdapter(adapter); + spCodecType.setSelection(codecType); + spCodecType.setOnItemSelectedListener(this); - //setContentView(R.layout.main); - btSwitchCamera = (Button)findViewById(R.id.btSwitchCamera); - btSwitchCamera.setOnClickListener(this); - btStartStopCall = (Button)findViewById(R.id.btStartStopCall); - btStartStopCall.setOnClickListener(this); - findViewById(R.id.btExit).setOnClickListener(this); + // voice codec + spVoiceCodecType = (Spinner) findViewById(R.id.spVoiceCodecType); + adapter = ArrayAdapter.createFromResource(this, R.array.voiceCodecType, + resource); + adapter.setDropDownViewResource(dropdownRes); + spVoiceCodecType.setAdapter(adapter); + spVoiceCodecType.setSelection(voiceCodecType); + spVoiceCodecType.setOnItemSelectedListener(this); - // cleaning - remoteSurfaceView = null; - svLocal = null; + spCodecSize = (Spinner) findViewById(R.id.spCodecSize); + adapter = ArrayAdapter.createFromResource(this, R.array.codecSize, + resource); + adapter.setDropDownViewResource(dropdownRes); + spCodecSize.setAdapter(adapter); + spCodecSize.setOnItemSelectedListener(this); - // init UI - ArrayAdapter adapter; + String ip = GetLocalIpAddress(); + TextView tvLocalIp = (TextView) findViewById(R.id.tvLocalIp); + tvLocalIp.setText("Local IP address - " + ip); - int resource = android.R.layout.simple_spinner_item; - int dropdownRes = android.R.layout.simple_spinner_dropdown_item; + etRemoteIp = (EditText) findViewById(R.id.etRemoteIp); + etRemoteIp.setText(remoteIp); - // video codec - spCodecType = (Spinner) findViewById(R.id.spCodecType); - adapter = ArrayAdapter.createFromResource(this, - R.array.codectype, - resource); - adapter.setDropDownViewResource(dropdownRes); - spCodecType.setAdapter(adapter); - spCodecType.setSelection(codecType); - spCodecType.setOnItemSelectedListener(this); + cbLoopback = (CheckBox) findViewById(R.id.cbLoopback); + cbLoopback.setChecked(loopbackMode); - // voice codec - spVoiceCodecType = (Spinner) findViewById(R.id.spVoiceCodecType); - adapter = ArrayAdapter.createFromResource(this, R.array.voiceCodecType, - resource); - adapter.setDropDownViewResource(dropdownRes); - spVoiceCodecType.setAdapter(adapter); - spVoiceCodecType.setSelection(voiceCodecType); - spVoiceCodecType.setOnItemSelectedListener(this); + cbVoice = (CheckBox) findViewById(R.id.cbVoice); + cbVoice.setChecked(enableVoice); - spCodecSize = (Spinner) findViewById(R.id.spCodecSize); - adapter = ArrayAdapter.createFromResource(this, R.array.codecSize, - resource); - adapter.setDropDownViewResource(dropdownRes); - spCodecSize.setAdapter(adapter); - spCodecSize.setOnItemSelectedListener(this); + cbVideoSend = (CheckBox) findViewById(R.id.cbVideoSend); + cbVideoSend.setChecked(enableVideoSend); + cbVideoReceive = (CheckBox) findViewById(R.id.cbVideoReceive); + cbVideoReceive.setChecked(enableVideoReceive); - String ip = GetLocalIpAddress(); - TextView tvLocalIp = (TextView) findViewById(R.id.tvLocalIp); - tvLocalIp.setText("Local IP address - " + ip); + etVTxPort = (EditText) findViewById(R.id.etVTxPort); + etVTxPort.setText(Integer.toString(destinationPortVideo)); - etRemoteIp = (EditText) findViewById(R.id.etRemoteIp); - etRemoteIp.setText(remoteIp); + etVRxPort = (EditText) findViewById(R.id.etVRxPort); + etVRxPort.setText(Integer.toString(receivePortVideo)); - cbLoopback = (CheckBox) findViewById(R.id.cbLoopback); - cbLoopback.setChecked(loopbackMode); + etATxPort = (EditText) findViewById(R.id.etATxPort); + etATxPort.setText(Integer.toString(destinationPortVoice)); - cbVoice = (CheckBox) findViewById(R.id.cbVoice); - cbVoice.setChecked(enableVoice); + etARxPort = (EditText) findViewById(R.id.etARxPort); + etARxPort.setText(Integer.toString(receivePortVoice)); - cbVideoSend = (CheckBox) findViewById(R.id.cbVideoSend); - cbVideoSend.setChecked(enableVideoSend); - cbVideoReceive = (CheckBox) findViewById(R.id.cbVideoReceive); - cbVideoReceive.setChecked(enableVideoReceive); + cbEnableNack = (CheckBox) findViewById(R.id.cbNack); + cbEnableNack.setChecked(enableNack); - etVTxPort = (EditText) findViewById(R.id.etVTxPort); - etVTxPort.setText(Integer.toString(destinationPortVideo)); + cbEnableSpeaker = (CheckBox) findViewById(R.id.cbSpeaker); + cbEnableSpeaker.setChecked(enableSpeaker); + cbEnableAGC = (CheckBox) findViewById(R.id.cbAutoGainControl); + cbEnableAGC.setChecked(enableAGC); + cbEnableAECM = (CheckBox) findViewById(R.id.cbAECM); + cbEnableAECM.setChecked(enableAECM); + cbEnableNS = (CheckBox) findViewById(R.id.cbNoiseSuppression); + cbEnableNS.setChecked(enableNS); - etVRxPort = (EditText) findViewById(R.id.etVRxPort); - etVRxPort.setText(Integer.toString(receivePortVideo)); + cbEnableNack.setOnClickListener(this); + cbEnableSpeaker.setOnClickListener(this); + cbEnableAECM.setOnClickListener(this); - etATxPort = (EditText) findViewById(R.id.etATxPort); - etATxPort.setText(Integer.toString(destinationPortVoice)); + cbEnableAGC.setOnClickListener(this); + cbEnableNS.setOnClickListener(this); - etARxPort = (EditText) findViewById(R.id.etARxPort); - etARxPort.setText(Integer.toString(receivePortVoice)); + tvFrameRateI = (TextView) findViewById(R.id.tvFrameRateI); + tvBitRateI = (TextView) findViewById(R.id.tvBitRateI); + tvPacketLoss = (TextView) findViewById(R.id.tvPacketLoss); + tvFrameRateO = (TextView) findViewById(R.id.tvFrameRateO); + tvBitRateO = (TextView) findViewById(R.id.tvBitRateO); - cbEnableNack = (CheckBox) findViewById(R.id.cbNack); - cbEnableNack.setChecked(enableNack); - - cbEnableSpeaker = (CheckBox) findViewById(R.id.cbSpeaker); - cbEnableSpeaker.setChecked(enableSpeaker); - cbEnableAGC = (CheckBox) findViewById(R.id.cbAutoGainControl); - cbEnableAGC.setChecked(enableAGC); - cbEnableAECM = (CheckBox) findViewById(R.id.cbAECM); - cbEnableAECM.setChecked(enableAECM); - cbEnableNS = (CheckBox) findViewById(R.id.cbNoiseSuppression); - cbEnableNS.setChecked(enableNS); - - cbEnableNack.setOnClickListener(this); - cbEnableSpeaker.setOnClickListener(this); - cbEnableAECM.setOnClickListener(this); - - cbEnableAGC.setOnClickListener(this); - cbEnableNS.setOnClickListener(this); - - tvFrameRateI = (TextView) findViewById(R.id.tvFrameRateI); - tvBitRateI = (TextView) findViewById(R.id.tvBitRateI); - tvPacketLoss = (TextView) findViewById(R.id.tvPacketLoss); - tvFrameRateO = (TextView) findViewById(R.id.tvFrameRateO); - tvBitRateO = (TextView) findViewById(R.id.tvBitRateO); - - } - - @Override - protected void onPause() { - super.onPause(); - // if (remoteSurfaceView != null) - // glSurfaceView.onPause(); - } - - @Override - protected void onResume() { - super.onResume(); - // if (glSurfaceView != null) - // glSurfaceView.onResume(); - } - - private void StartCall() { - int ret = 0; - - if (enableVoice) { - SetupVoE(); - StartVoiceEngine(); } - if (enableVideo) { - if (enableVideoSend) { - // camera and preview surface - svLocal = ViERenderer.CreateLocalRenderer(this); - } + @Override + protected void onPause() { + super.onPause(); + // if (remoteSurfaceView != null) + // glSurfaceView.onPause(); + } - ret = ViEAndroidAPI.GetVideoEngine(); - ret = ViEAndroidAPI.Init(enableTrace); - channel = ViEAndroidAPI.CreateChannel(voiceChannel); - ret = ViEAndroidAPI.SetLocalReceiver(channel, - receivePortVideo); - ret = ViEAndroidAPI.SetSendDestination(channel, - destinationPortVideo, - remoteIp.getBytes()); + @Override + protected void onResume() { + super.onResume(); + // if (glSurfaceView != null) + // glSurfaceView.onResume(); + } - if (enableVideoReceive) { - if(android.os.Build.MANUFACTURER.equals("samsung")) { - // Create an Open GL renderer - remoteSurfaceView = ViERenderer.CreateRenderer(this, true); - ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView); + private void StartCall() { + int ret = 0; + + if (enableVoice) { + SetupVoE(); + StartVoiceEngine(); + } + + if (enableVideo) { + if (enableVideoSend) { + // camera and preview surface + svLocal = ViERenderer.CreateLocalRenderer(this); + } + + ret = ViEAndroidAPI.GetVideoEngine(); + ret = ViEAndroidAPI.Init(enableTrace); + channel = ViEAndroidAPI.CreateChannel(voiceChannel); + ret = ViEAndroidAPI.SetLocalReceiver(channel, + receivePortVideo); + ret = ViEAndroidAPI.SetSendDestination(channel, + destinationPortVideo, + remoteIp.getBytes()); + + if (enableVideoReceive) { + if(android.os.Build.MANUFACTURER.equals("samsung")) { + // Create an Open GL renderer + remoteSurfaceView = ViERenderer.CreateRenderer(this, true); + ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView); + } + else { + remoteSurfaceView = ViERenderer.CreateRenderer(this, false); + ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView); + } + + ret = ViEAndroidAPI.SetReceiveCodec(channel, + codecType, + INIT_BITRATE, + codecSizeWidth, + codecSizeHeight, + RECEIVE_CODEC_FRAMERATE); + ret = ViEAndroidAPI.StartRender(channel); + ret = ViEAndroidAPI.StartReceive(channel); + } + + if (enableVideoSend) { + currentCameraOrientation = + ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0); + ret = ViEAndroidAPI.SetSendCodec(channel, + codecType, + INIT_BITRATE, + codecSizeWidth, + codecSizeHeight, + SEND_CODEC_FRAMERATE); + int cameraId = ViEAndroidAPI.StartCamera(channel, usingFrontCamera?1:0); + + if(cameraId>0) { + cameraId = cameraId; + int neededRotation = GetCameraOrientation(currentCameraOrientation); + ViEAndroidAPI.SetRotation(cameraId,neededRotation); + } + else { + ret=cameraId; + } + ret = ViEAndroidAPI.StartSend(channel); + } + + ret = ViEAndroidAPI.SetCallback(channel, this); + + if (enableVideoSend) { + if (mLlLocalSurface != null) + mLlLocalSurface.addView(svLocal); + } + + if (enableVideoReceive) { + if (mLlRemoteSurface != null) + mLlRemoteSurface.addView(remoteSurfaceView); + } + + viERunning = true; + } + + } + + private void DemoLog(String msg) { + Log.d("*WEBRTC*", msg); + } + + private void StopVoiceEngine() { + // Stop send + if (0 != ViEAndroidAPI.VoE_StopSend(voiceChannel)) { + DemoLog("VoE stop send failed"); + } + + // Stop listen + if (0 != ViEAndroidAPI.VoE_StopListen(voiceChannel)) { + DemoLog("VoE stop listen failed"); + } + + // Stop playout + if (0 != ViEAndroidAPI.VoE_StopPlayout(voiceChannel)) { + DemoLog("VoE stop playout failed"); + } + + if (0 != ViEAndroidAPI.VoE_DeleteChannel(voiceChannel)) { + DemoLog("VoE delete channel failed"); + } + voiceChannel=-1; + + // Terminate + if (0 != ViEAndroidAPI.VoE_Terminate()) { + DemoLog("VoE terminate failed"); + } + } + + private void SetupVoE() { + // Create VoiceEngine + // Error logging is done in native API wrapper + ViEAndroidAPI.VoE_Create(this); + + // Initialize + if (0 != ViEAndroidAPI.VoE_Init(enableTrace)) { + DemoLog("VoE init failed"); + } + + // Create channel + voiceChannel = ViEAndroidAPI.VoE_CreateChannel(); + if (0 != voiceChannel) { + DemoLog("VoE create channel failed"); + } + + // Suggest to use the voice call audio stream for hardware volume controls + setVolumeControlStream(AudioManager.STREAM_VOICE_CALL); + } + + private int StartVoiceEngine() { + // Set local receiver + if (0 != ViEAndroidAPI.VoE_SetLocalReceiver(voiceChannel, + receivePortVoice)) { + DemoLog("VoE set local receiver failed"); + } + + if (0 != ViEAndroidAPI.VoE_StartListen(voiceChannel)) { + DemoLog("VoE start listen failed"); + } + + // Route audio + RouteAudio(enableSpeaker); + + // set volume to default value + if (0 != ViEAndroidAPI.VoE_SetSpeakerVolume(volumeLevel)) { + DemoLog("VoE set speaker volume failed"); + } + + // Start playout + if (0 != ViEAndroidAPI.VoE_StartPlayout(voiceChannel)) { + DemoLog("VoE start playout failed"); + } + + if (0 != ViEAndroidAPI.VoE_SetSendDestination(voiceChannel, + destinationPortVoice, + remoteIp)) { + DemoLog("VoE set send destination failed"); + } + + // 0 = iPCM-wb, 5 = PCMU + if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) { + DemoLog("VoE set send codec failed"); + } + + if (0 != ViEAndroidAPI.VoE_SetECStatus(enableAECM, 5, 0, 28)){ + DemoLog("VoE set EC Status failed"); + } + + if (0 != ViEAndroidAPI.VoE_StartSend(voiceChannel)) { + DemoLog("VoE start send failed"); + } + + voERunning = true; + return 0; + } + + private void RouteAudio(boolean enableSpeaker) { + int sdkVersion = Integer.parseInt(android.os.Build.VERSION.SDK); + if (sdkVersion >= 5) { + AudioManager am = + (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); + am.setSpeakerphoneOn(enableSpeaker); } else { - remoteSurfaceView = ViERenderer.CreateRenderer(this, false); - ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView); + if (0 != ViEAndroidAPI.VoE_SetLoudspeakerStatus(enableSpeaker)) { + DemoLog("VoE set louspeaker status failed"); + } } + } - ret = ViEAndroidAPI.SetReceiveCodec(channel, - codecType, - INIT_BITRATE, - codecSizeWidth, - codecSizeHeight, - RECEIVE_CODEC_FRAMERATE); - ret = ViEAndroidAPI.StartRender(channel); - ret = ViEAndroidAPI.StartReceive(channel); - } + public void onClick(View arg0) { + switch (arg0.getId()) { + case R.id.btSwitchCamera: + if (usingFrontCamera ){ + btSwitchCamera.setText(R.string.frontCamera); + } + else { + btSwitchCamera.setText(R.string.backCamera); + } + usingFrontCamera = !usingFrontCamera; - if (enableVideoSend) { - currentCameraOrientation = - ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0); - ret = ViEAndroidAPI.SetSendCodec(channel, - codecType, - INIT_BITRATE, - codecSizeWidth, - codecSizeHeight, - SEND_CODEC_FRAMERATE); - int cameraId = ViEAndroidAPI.StartCamera(channel, usingFrontCamera?1:0); + if (viERunning) { + currentCameraOrientation = + ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0); + ViEAndroidAPI.StopCamera(cameraId); + mLlLocalSurface.removeView(svLocal); - if(cameraId>0) { - cameraId = cameraId; - int neededRotation = GetCameraOrientation(currentCameraOrientation); - ViEAndroidAPI.SetRotation(cameraId,neededRotation); + ViEAndroidAPI.StartCamera(channel,usingFrontCamera?1:0); + mLlLocalSurface.addView(svLocal); + int neededRotation = GetCameraOrientation(currentCameraOrientation); + ViEAndroidAPI.SetRotation(cameraId, neededRotation); + } + break; + case R.id.btStartStopCall: + ReadSettings(); + if (viERunning || voERunning) { + StopAll(); + wakeLock.release();//release the wake lock + btStartStopCall.setText(R.string.startCall); + } + else if (enableVoice || enableVideo){ + StartCall(); + wakeLock.acquire();//screen stay on during the call + btStartStopCall.setText(R.string.stopCall); + } + break; + case R.id.btExit: + StopAll(); + finish(); + break; + case R.id.cbNack: + enableNack = cbEnableNack.isChecked(); + if (viERunning) { + ViEAndroidAPI.EnableNACK(channel, enableNack); + } + break; + case R.id.cbSpeaker: + enableSpeaker = cbEnableSpeaker.isChecked(); + if (voERunning){ + RouteAudio(enableSpeaker); + } + break; + case R.id.cbAutoGainControl: + enableAGC=cbEnableAGC.isChecked(); + if(voERunning) { + //Enable AGC default mode. + ViEAndroidAPI.VoE_SetAGCStatus(enableAGC,1); + } + break; + case R.id.cbNoiseSuppression: + enableNS=cbEnableNS.isChecked(); + if(voERunning) { + //Enable NS default mode. + ViEAndroidAPI.VoE_SetNSStatus(enableNS, 1); + } + break; + case R.id.cbAECM: + enableAECM = cbEnableAECM.isChecked(); + if (voERunning) { + //EC_AECM=5 + //AECM_DEFAULT=0 + ViEAndroidAPI.VoE_SetECStatus(enableAECM, 5, 0, 28); + } + break; } - else { - ret=cameraId; - } - ret = ViEAndroidAPI.StartSend(channel); - } - - ret = ViEAndroidAPI.SetCallback(channel, this); - - if (enableVideoSend) { - if (mLlLocalSurface != null) - mLlLocalSurface.addView(svLocal); - } - - if (enableVideoReceive) { - if (mLlRemoteSurface != null) - mLlRemoteSurface.addView(remoteSurfaceView); - } - - viERunning = true; } - } + private void ReadSettings() { + codecType = spCodecType.getSelectedItemPosition(); + voiceCodecType = spVoiceCodecType.getSelectedItemPosition(); - private void DemoLog(String msg) { - Log.d("*WEBRTC*", msg); - } + String sCodecSize = spCodecSize.getSelectedItem().toString(); + String[] aCodecSize = sCodecSize.split("x"); + codecSizeWidth = Integer.parseInt(aCodecSize[0]); + codecSizeHeight = Integer.parseInt(aCodecSize[1]); - private void StopVoiceEngine() { - // Stop send - if (0 != ViEAndroidAPI.VoE_StopSend(voiceChannel)) { - DemoLog("VoE stop send failed"); - } + loopbackMode = cbLoopback.isChecked(); + enableVoice = cbVoice.isChecked(); + enableVideoSend = cbVideoSend.isChecked(); + enableVideoReceive = cbVideoReceive.isChecked(); + enableVideo = enableVideoSend || enableVideoReceive; - // Stop listen - if (0 != ViEAndroidAPI.VoE_StopListen(voiceChannel)) { - DemoLog("VoE stop listen failed"); - } + destinationPortVideo = + Integer.parseInt(etVTxPort.getText().toString()); + receivePortVideo = + Integer.parseInt(etVRxPort.getText().toString()); + destinationPortVoice = + Integer.parseInt(etATxPort.getText().toString()); + receivePortVoice = + Integer.parseInt(etARxPort.getText().toString()); - // Stop playout - if (0 != ViEAndroidAPI.VoE_StopPlayout(voiceChannel)) { - DemoLog("VoE stop playout failed"); - } - - if (0 != ViEAndroidAPI.VoE_DeleteChannel(voiceChannel)) { - DemoLog("VoE delete channel failed"); - } - voiceChannel=-1; - - // Terminate - if (0 != ViEAndroidAPI.VoE_Terminate()) { - DemoLog("VoE terminate failed"); - } - } - - private void SetupVoE() { - // Create VoiceEngine - // Error logging is done in native API wrapper - ViEAndroidAPI.VoE_Create(this); - - // Initialize - if (0 != ViEAndroidAPI.VoE_Init(enableTrace)) { - DemoLog("VoE init failed"); - } - - // Create channel - voiceChannel = ViEAndroidAPI.VoE_CreateChannel(); - if (0 != voiceChannel) { - DemoLog("VoE create channel failed"); - } - - // Suggest to use the voice call audio stream for hardware volume controls - setVolumeControlStream(AudioManager.STREAM_VOICE_CALL); - } - - private int StartVoiceEngine() { - // Set local receiver - if (0 != ViEAndroidAPI.VoE_SetLocalReceiver(voiceChannel, - receivePortVoice)) { - DemoLog("VoE set local receiver failed"); - } - - if (0 != ViEAndroidAPI.VoE_StartListen(voiceChannel)) { - DemoLog("VoE start listen failed"); - } - - // Route audio - RouteAudio(enableSpeaker); - - // set volume to default value - if (0 != ViEAndroidAPI.VoE_SetSpeakerVolume(volumeLevel)) { - DemoLog("VoE set speaker volume failed"); - } - - // Start playout - if (0 != ViEAndroidAPI.VoE_StartPlayout(voiceChannel)) { - DemoLog("VoE start playout failed"); - } - - if (0 != ViEAndroidAPI.VoE_SetSendDestination(voiceChannel, - destinationPortVoice, - remoteIp)) { - DemoLog("VoE set send destination failed"); - } - - // 0 = iPCM-wb, 5 = PCMU - if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) { - DemoLog("VoE set send codec failed"); - } - - if (0 != ViEAndroidAPI.VoE_SetECStatus(enableAECM, 5, 0, 28)){ - DemoLog("VoE set EC Status failed"); - } - - if (0 != ViEAndroidAPI.VoE_StartSend(voiceChannel)) { - DemoLog("VoE start send failed"); - } - - voERunning = true; - return 0; - } - - private void RouteAudio(boolean enableSpeaker) { - int sdkVersion = Integer.parseInt(android.os.Build.VERSION.SDK); - if (sdkVersion >= 5) { - AudioManager am = - (AudioManager) this.getSystemService(Context.AUDIO_SERVICE); - am.setSpeakerphoneOn(enableSpeaker); - } - else { - if (0 != ViEAndroidAPI.VoE_SetLoudspeakerStatus(enableSpeaker)) { - DemoLog("VoE set louspeaker status failed"); - } - } - } - - public void onClick(View arg0) { - switch (arg0.getId()) { - case R.id.btSwitchCamera: - if (usingFrontCamera ){ - btSwitchCamera.setText(R.string.frontCamera); - } - else { - btSwitchCamera.setText(R.string.backCamera); - } - usingFrontCamera = !usingFrontCamera; - - if (viERunning) { - currentCameraOrientation = - ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0); - ViEAndroidAPI.StopCamera(cameraId); - mLlLocalSurface.removeView(svLocal); - - ViEAndroidAPI.StartCamera(channel,usingFrontCamera?1:0); - mLlLocalSurface.addView(svLocal); - int neededRotation = GetCameraOrientation(currentCameraOrientation); - ViEAndroidAPI.SetRotation(cameraId, neededRotation); - } - break; - case R.id.btStartStopCall: - ReadSettings(); - if (viERunning || voERunning) { - StopAll(); - wakeLock.release();//release the wake lock - btStartStopCall.setText(R.string.startCall); - } - else if (enableVoice || enableVideo){ - StartCall(); - wakeLock.acquire();//screen stay on during the call - btStartStopCall.setText(R.string.stopCall); - } - break; - case R.id.btExit: - StopAll(); - finish(); - break; - case R.id.cbNack: enableNack = cbEnableNack.isChecked(); - if (viERunning) { - ViEAndroidAPI.EnableNACK(channel, enableNack); - } - break; - case R.id.cbSpeaker: - enableSpeaker = cbEnableSpeaker.isChecked(); - if (voERunning){ - RouteAudio(enableSpeaker); - } - break; - case R.id.cbAutoGainControl: - enableAGC=cbEnableAGC.isChecked(); - if(voERunning) { - //Enable AGC default mode. - ViEAndroidAPI.VoE_SetAGCStatus(enableAGC,1); - } - break; - case R.id.cbNoiseSuppression: - enableNS=cbEnableNS.isChecked(); - if(voERunning) { - //Enable NS default mode. - ViEAndroidAPI.VoE_SetNSStatus(enableNS, 1); - } - break; - case R.id.cbAECM: - enableAECM = cbEnableAECM.isChecked(); - if (voERunning) { - //EC_AECM=5 - //AECM_DEFAULT=0 - ViEAndroidAPI.VoE_SetECStatus(enableAECM, 5, 0, 28); - } - break; + enableSpeaker = cbEnableSpeaker.isChecked(); + enableAGC = cbEnableAGC.isChecked(); + enableAECM = cbEnableAECM.isChecked(); + enableNS = cbEnableNS.isChecked(); + + if (loopbackMode) + remoteIp = "127.0.0.1"; + else + remoteIp = etRemoteIp.getText().toString(); } - } - private void ReadSettings() { - codecType = spCodecType.getSelectedItemPosition(); - voiceCodecType = spVoiceCodecType.getSelectedItemPosition(); - - String sCodecSize = spCodecSize.getSelectedItem().toString(); - String[] aCodecSize = sCodecSize.split("x"); - codecSizeWidth = Integer.parseInt(aCodecSize[0]); - codecSizeHeight = Integer.parseInt(aCodecSize[1]); - - loopbackMode = cbLoopback.isChecked(); - enableVoice = cbVoice.isChecked(); - enableVideoSend = cbVideoSend.isChecked(); - enableVideoReceive = cbVideoReceive.isChecked(); - enableVideo = enableVideoSend || enableVideoReceive; - - destinationPortVideo = - Integer.parseInt(etVTxPort.getText().toString()); - receivePortVideo = - Integer.parseInt(etVRxPort.getText().toString()); - destinationPortVoice = - Integer.parseInt(etATxPort.getText().toString()); - receivePortVoice = - Integer.parseInt(etARxPort.getText().toString()); - - enableNack = cbEnableNack.isChecked(); - enableSpeaker = cbEnableSpeaker.isChecked(); - enableAGC = cbEnableAGC.isChecked(); - enableAECM = cbEnableAECM.isChecked(); - enableNS = cbEnableNS.isChecked(); - - if (loopbackMode) - remoteIp = "127.0.0.1"; - else - remoteIp = etRemoteIp.getText().toString(); - } - - public void onItemSelected(AdapterView adapterView, View view, - int position, long id) { - if ((adapterView==spCodecType || adapterView==spCodecSize) && - viERunning) { - ReadSettings(); - //change the codectype - if (enableVideoReceive) { - if (0 !=ViEAndroidAPI.SetReceiveCodec(channel, codecType, - INIT_BITRATE, codecSizeWidth, - codecSizeHeight, - RECEIVE_CODEC_FRAMERATE)) - DemoLog("ViE set receive codec failed"); - } - if (enableVideoSend) { - if (0!=ViEAndroidAPI.SetSendCodec(channel, codecType, INIT_BITRATE, - codecSizeWidth, - codecSizeHeight, - SEND_CODEC_FRAMERATE)) - DemoLog("ViE set send codec failed"); - } - } - else if ((adapterView==spVoiceCodecType) && voERunning) { - //change voice engine codec - ReadSettings(); - if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) { - DemoLog("VoE set send codec failed"); - } - } - } - - public void onNothingSelected(AdapterView arg0) { - DemoLog("No setting selected"); - } - - public int UpdateStats(int in_frameRateI, int in_bitRateI, int in_packetLoss, - int in_frameRateO, int in_bitRateO) { - frameRateI = in_frameRateI; - bitRateI = in_bitRateI; - packetLoss = in_packetLoss; - frameRateO = in_frameRateO; - bitRateO = in_bitRateO; - runOnUiThread(new Runnable() { - public void run() { - tvFrameRateI.setText("Incoming FrameRate - " + - Integer.toString(frameRateI)); - tvBitRateI.setText("Incoming BitRate - " + - Integer.toString(bitRateI)); - tvPacketLoss.setText("Incoming Packet Loss - " + - Integer.toString(packetLoss)); - tvFrameRateO.setText("Send FrameRate - " + - Integer.toString(frameRateO)); - tvBitRateO.setText("Send BitRate - " + - Integer.toString(bitRateO)); + public void onItemSelected(AdapterView adapterView, View view, + int position, long id) { + if ((adapterView==spCodecType || adapterView==spCodecSize) && + viERunning) { + ReadSettings(); + //change the codectype + if (enableVideoReceive) { + if (0 !=ViEAndroidAPI.SetReceiveCodec(channel, codecType, + INIT_BITRATE, codecSizeWidth, + codecSizeHeight, + RECEIVE_CODEC_FRAMERATE)) + DemoLog("ViE set receive codec failed"); + } + if (enableVideoSend) { + if (0!=ViEAndroidAPI.SetSendCodec(channel, codecType, INIT_BITRATE, + codecSizeWidth, + codecSizeHeight, + SEND_CODEC_FRAMERATE)) + DemoLog("ViE set send codec failed"); + } } - }); - return 0; - } + else if ((adapterView==spVoiceCodecType) && voERunning) { + //change voice engine codec + ReadSettings(); + if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) { + DemoLog("VoE set send codec failed"); + } + } + } + + public void onNothingSelected(AdapterView arg0) { + DemoLog("No setting selected"); + } + + public int UpdateStats(int in_frameRateI, int in_bitRateI, int in_packetLoss, + int in_frameRateO, int in_bitRateO) { + frameRateI = in_frameRateI; + bitRateI = in_bitRateI; + packetLoss = in_packetLoss; + frameRateO = in_frameRateO; + bitRateO = in_bitRateO; + runOnUiThread(new Runnable() { + public void run() { + tvFrameRateI.setText("Incoming FrameRate - " + + Integer.toString(frameRateI)); + tvBitRateI.setText("Incoming BitRate - " + + Integer.toString(bitRateI)); + tvPacketLoss.setText("Incoming Packet Loss - " + + Integer.toString(packetLoss)); + tvFrameRateO.setText("Send FrameRate - " + + Integer.toString(frameRateO)); + tvBitRateO.setText("Send BitRate - " + + Integer.toString(bitRateO)); + } + }); + return 0; + } } diff --git a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java index 701f8bd62..a0797c2e6 100644 --- a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java +++ b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -18,121 +18,121 @@ import android.view.SurfaceView; public class ViEAndroidJavaAPI { - public ViEAndroidJavaAPI(Context context) { - Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI..."); - System.loadLibrary("webrtc-video-demo-jni"); + public ViEAndroidJavaAPI(Context context) { + Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI..."); + System.loadLibrary("webrtc-video-demo-jni"); - Log.d("*WEBRTCJ*", "Calling native init..."); - if (!NativeInit(context)) { - Log.e("*WEBRTCJ*", "Native init failed"); - throw new RuntimeException("Native init failed"); + Log.d("*WEBRTCJ*", "Calling native init..."); + if (!NativeInit(context)) { + Log.e("*WEBRTCJ*", "Native init failed"); + throw new RuntimeException("Native init failed"); + } + else { + Log.d("*WEBRTCJ*", "Native init successful"); + } + String a = ""; + a.getBytes(); } - else { - Log.d("*WEBRTCJ*", "Native init successful"); - } - String a = ""; - a.getBytes(); - } - // API Native - private native boolean NativeInit(Context context); + // API Native + private native boolean NativeInit(Context context); - // Video Engine API - // Initialization and Termination functions - public native int GetVideoEngine(); - public native int Init(boolean enableTrace); - public native int Terminate(); + // Video Engine API + // Initialization and Termination functions + public native int GetVideoEngine(); + public native int Init(boolean enableTrace); + public native int Terminate(); - public native int StartSend(int channel); - public native int StopRender(int channel); - public native int StopSend(int channel); - public native int StartReceive(int channel); - public native int StopReceive(int channel); - // Channel functions - public native int CreateChannel(int voiceChannel); - // Receiver & Destination functions - public native int SetLocalReceiver(int channel, int port); - public native int SetSendDestination(int channel, int port, byte ipadr[]); - // Codec - public native int SetReceiveCodec(int channel, int codecNum, - int intbitRate, int width, - int height, int frameRate); - public native int SetSendCodec(int channel, int codecNum, - int intbitRate, int width, - int height, int frameRate); - // Rendering - public native int AddRemoteRenderer(int channel, Object glSurface); - public native int RemoveRemoteRenderer(int channel); - public native int StartRender(int channel); + public native int StartSend(int channel); + public native int StopRender(int channel); + public native int StopSend(int channel); + public native int StartReceive(int channel); + public native int StopReceive(int channel); + // Channel functions + public native int CreateChannel(int voiceChannel); + // Receiver & Destination functions + public native int SetLocalReceiver(int channel, int port); + public native int SetSendDestination(int channel, int port, byte ipadr[]); + // Codec + public native int SetReceiveCodec(int channel, int codecNum, + int intbitRate, int width, + int height, int frameRate); + public native int SetSendCodec(int channel, int codecNum, + int intbitRate, int width, + int height, int frameRate); + // Rendering + public native int AddRemoteRenderer(int channel, Object glSurface); + public native int RemoveRemoteRenderer(int channel); + public native int StartRender(int channel); - // Capture - public native int StartCamera(int channel, int cameraNum); - public native int StopCamera(int cameraId); - public native int GetCameraOrientation(int cameraNum); - public native int SetRotation(int cameraId,int degrees); + // Capture + public native int StartCamera(int channel, int cameraNum); + public native int StopCamera(int cameraId); + public native int GetCameraOrientation(int cameraNum); + public native int SetRotation(int cameraId,int degrees); - // NACK - public native int EnableNACK(int channel, boolean enable); + // NACK + public native int EnableNACK(int channel, boolean enable); - //PLI for H.264 - public native int EnablePLI(int channel, boolean enable); + //PLI for H.264 + public native int EnablePLI(int channel, boolean enable); - // Enable stats callback - public native int SetCallback(int channel, IViEAndroidCallback callback); + // Enable stats callback + public native int SetCallback(int channel, IViEAndroidCallback callback); - // Voice Engine API - // Create and Delete functions - public native boolean VoE_Create(Activity context); - public native boolean VoE_Delete(); + // Voice Engine API + // Create and Delete functions + public native boolean VoE_Create(Activity context); + public native boolean VoE_Delete(); - // Initialization and Termination functions - public native int VoE_Authenticate(String key); - public native int VoE_Init(boolean enableTrace); - public native int VoE_Terminate(); + // Initialization and Termination functions + public native int VoE_Authenticate(String key); + public native int VoE_Init(boolean enableTrace); + public native int VoE_Terminate(); - // Channel functions - public native int VoE_CreateChannel(); - public native int VoE_DeleteChannel(int channel); + // Channel functions + public native int VoE_CreateChannel(); + public native int VoE_DeleteChannel(int channel); - // Receiver & Destination functions - public native int VoE_SetLocalReceiver(int channel, int port); - public native int VoE_SetSendDestination(int channel, int port, - String ipaddr); + // Receiver & Destination functions + public native int VoE_SetLocalReceiver(int channel, int port); + public native int VoE_SetSendDestination(int channel, int port, + String ipaddr); - // Media functions - public native int VoE_StartListen(int channel); - public native int VoE_StartPlayout(int channel); - public native int VoE_StartSend(int channel); - public native int VoE_StopListen(int channel); - public native int VoE_StopPlayout(int channel); - public native int VoE_StopSend(int channel); + // Media functions + public native int VoE_StartListen(int channel); + public native int VoE_StartPlayout(int channel); + public native int VoE_StartSend(int channel); + public native int VoE_StopListen(int channel); + public native int VoE_StopPlayout(int channel); + public native int VoE_StopSend(int channel); - // Volume - public native int VoE_SetSpeakerVolume(int volume); + // Volume + public native int VoE_SetSpeakerVolume(int volume); - // Hardware - public native int VoE_SetLoudspeakerStatus(boolean enable); + // Hardware + public native int VoE_SetLoudspeakerStatus(boolean enable); - // Playout file locally - public native int VoE_StartPlayingFileLocally(int channel, - String fileName, - boolean loop); - public native int VoE_StopPlayingFileLocally(int channel); + // Playout file locally + public native int VoE_StartPlayingFileLocally(int channel, + String fileName, + boolean loop); + public native int VoE_StopPlayingFileLocally(int channel); - // Play file as microphone - public native int VoE_StartPlayingFileAsMicrophone(int channel, - String fileName, - boolean loop); - public native int VoE_StopPlayingFileAsMicrophone(int channel); + // Play file as microphone + public native int VoE_StartPlayingFileAsMicrophone(int channel, + String fileName, + boolean loop); + public native int VoE_StopPlayingFileAsMicrophone(int channel); - // Codec-setting functions - public native int VoE_NumOfCodecs(); - public native int VoE_SetSendCodec(int channel, int index); + // Codec-setting functions + public native int VoE_NumOfCodecs(); + public native int VoE_SetSendCodec(int channel, int index); - //VE funtions - public native int VoE_SetECStatus(boolean enable, int mode, - int AESmode, int AESattenuation); - public native int VoE_SetAGCStatus(boolean enable, int mode); - public native int VoE_SetNSStatus(boolean enable, int mode); + //VE funtions + public native int VoE_SetECStatus(boolean enable, int mode, + int AESmode, int AESattenuation); + public native int VoE_SetAGCStatus(boolean enable, int mode); + public native int VoE_SetNSStatus(boolean enable, int mode); }