diff --git a/webrtc/build/common.gypi b/webrtc/build/common.gypi index 3bcf782e4..f340c1882 100644 --- a/webrtc/build/common.gypi +++ b/webrtc/build/common.gypi @@ -17,11 +17,13 @@ # This will be set to zero in the supplement.gypi triggered by a # gclient hook in the standalone build. 'build_with_chromium%': 1, + 'build_with_libjingle%': 0, }, 'build_with_chromium%': '<(build_with_chromium)', + 'build_with_libjingle%': '<(build_with_libjingle)', 'conditions': [ - ['build_with_chromium==1', { + ['build_with_chromium==1 or build_with_libjingle==1', { 'webrtc_root%': '<(DEPTH)/third_party/webrtc', }, { 'webrtc_root%': '<(DEPTH)/webrtc', @@ -29,12 +31,14 @@ ], }, 'build_with_chromium%': '<(build_with_chromium)', + 'build_with_libjingle%': '<(build_with_libjingle)', 'webrtc_root%': '<(webrtc_root)', 'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8', 'include_opus%': 1, }, 'build_with_chromium%': '<(build_with_chromium)', + 'build_with_libjingle%': '<(build_with_libjingle)', 'webrtc_root%': '<(webrtc_root)', 'webrtc_vp8_dir%': '<(webrtc_vp8_dir)', 'include_opus%': '<(include_opus)', @@ -121,6 +125,9 @@ 'build_libvpx%': 0, 'include_tests%': 0, }], + ['build_with_libjingle==1', { + 'include_tests%': 0, + }], ['target_arch=="arm"', { 'prefer_fixed_point%': 1, }], diff --git a/webrtc/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java b/webrtc/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java index 3bdfc940f..c324b9c08 100644 --- a/webrtc/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java +++ b/webrtc/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java @@ -61,10 +61,10 @@ class WebRTCAudioDevice { @SuppressWarnings("unused") private int InitRecording(int audioSource, int sampleRate) { // get the minimum buffer size that can be used - int minRecBufSize = - AudioRecord.getMinBufferSize(sampleRate, - AudioFormat.CHANNEL_CONFIGURATION_MONO, - AudioFormat.ENCODING_PCM_16BIT); + int minRecBufSize = AudioRecord.getMinBufferSize( + sampleRate, + AudioFormat.CHANNEL_IN_MONO, + AudioFormat.ENCODING_PCM_16BIT); // DoLog("min rec buf size is " + minRecBufSize); @@ -83,7 +83,7 @@ class WebRTCAudioDevice { _audioRecord = new AudioRecord( audioSource, sampleRate, - AudioFormat.CHANNEL_CONFIGURATION_MONO, + AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, recBufSize); @@ -127,7 +127,7 @@ class WebRTCAudioDevice { // get the minimum buffer size that can be used int minPlayBufSize = AudioTrack.getMinBufferSize( sampleRate, - AudioFormat.CHANNEL_CONFIGURATION_MONO, + AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT); // DoLog("min play buf size is " + minPlayBufSize); @@ -149,7 +149,7 @@ class WebRTCAudioDevice { _audioTrack = new AudioTrack( AudioManager.STREAM_VOICE_CALL, sampleRate, - AudioFormat.CHANNEL_CONFIGURATION_MONO, + AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, playBufSize, AudioTrack.MODE_STREAM); } catch (Exception e) { @@ -388,7 +388,7 @@ class WebRTCAudioDevice { return -1; } - int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK); + int apiLevel = android.os.Build.VERSION.SDK_INT; if ((3 == apiLevel) || (4 == apiLevel)) { // 1.5 and 1.6 devices @@ -464,7 +464,7 @@ class WebRTCAudioDevice { } private void SetAudioMode(boolean startCall) { - int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK); + int apiLevel = android.os.Build.VERSION.SDK_INT; if (_audioManager == null && _context != null) { _audioManager = (AudioManager) diff --git a/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc b/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc index 8f3c7c8d4..156ae18e1 100644 --- a/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc +++ b/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc @@ -57,7 +57,7 @@ DllHandle InternalLoadDll(const char dll_name[]) { #endif if (handle == kInvalidDllHandle) { WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1, - "Can't load %s : %d", dll_name, GetDllError()); + "Can't load %s : %s", dll_name, GetDllError()); } return handle; } @@ -66,7 +66,7 @@ void InternalUnloadDll(DllHandle handle) { #ifdef WEBRTC_LINUX if (dlclose(handle) != 0) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, - "%d", GetDllError()); + "%s", GetDllError()); } #else #error Not implemented diff --git a/webrtc/modules/video_capture/android/device_info_android.cc b/webrtc/modules/video_capture/android/device_info_android.cc index d119531b5..ca16ebf08 100644 --- a/webrtc/modules/video_capture/android/device_info_android.cc +++ b/webrtc/modules/video_capture/android/device_info_android.cc @@ -22,6 +22,13 @@ namespace webrtc namespace videocapturemodule { +static jclass g_capabilityClass = NULL; + +// static +void DeviceInfoAndroid::SetAndroidCaptureClasses(jclass capabilityClass) { + g_capabilityClass = capabilityClass; +} + VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo (const WebRtc_Word32 id) { videocapturemodule::DeviceInfoAndroid *deviceInfo = @@ -172,23 +179,20 @@ WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( return -1; // Find the capability class - jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass); - if (javaCapClassLocal == NULL) { + jclass javaCapClass = g_capabilityClass; + if (javaCapClass == NULL) { VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, - "%s: Can't find java class VideoCaptureCapabilityAndroid.", + "%s: SetAndroidCaptureClasses must be called first!", __FUNCTION__); return -1; } // get the method ID for the Android Java GetCapabilityArray . - char signature[256]; - sprintf(signature, - "(Ljava/lang/String;)[L%s;", - AndroidJavaCaptureCapabilityClass); - jmethodID cid = env->GetMethodID(javaCmDevInfoClass, - "GetCapabilityArray", - signature); + jmethodID cid = env->GetMethodID( + javaCmDevInfoClass, + "GetCapabilityArray", + "(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;"); if (cid == NULL) { VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, @@ -216,9 +220,9 @@ WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( return -1; } - jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I"); - jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I"); - jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I"); + jfieldID widthField = env->GetFieldID(javaCapClass, "width", "I"); + jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I"); + jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I"); if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) { VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached); WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, @@ -240,7 +244,7 @@ WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap( cap->expectedCaptureDelay = _expectedCaptureDelay; cap->rawType = kVideoNV21; cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField); - WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, + WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "%s: Cap width %d, height %d, fps %d", __FUNCTION__, cap->width, cap->height, cap->maxFPS); _captureCapabilities.Insert(i, cap); diff --git a/webrtc/modules/video_capture/android/device_info_android.h b/webrtc/modules/video_capture/android/device_info_android.h index 855a29195..4db3445ac 100644 --- a/webrtc/modules/video_capture/android/device_info_android.h +++ b/webrtc/modules/video_capture/android/device_info_android.h @@ -15,9 +15,6 @@ #include "../video_capture_impl.h" #include "../device_info_impl.h" -#define AndroidJavaCaptureDeviceInfoClass "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid" -#define AndroidJavaCaptureCapabilityClass "org/webrtc/videoengine/CaptureCapabilityAndroid" - namespace webrtc { namespace videocapturemodule @@ -32,6 +29,7 @@ namespace videocapturemodule class DeviceInfoAndroid : public DeviceInfoImpl { public: + static void SetAndroidCaptureClasses(jclass capabilityClass); DeviceInfoAndroid(const WebRtc_Word32 id); WebRtc_Word32 Init(); virtual ~DeviceInfoAndroid(); diff --git a/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java b/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java index ef7fc7b56..9999d2384 100644 --- a/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java +++ b/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java @@ -42,7 +42,6 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { private boolean isCaptureStarted = false; private boolean isCaptureRunning = false; private boolean isSurfaceReady = false; - private SurfaceHolder surfaceHolder = null; private final int numCaptureBuffers = 3; private int expectedFrameSize = 0; @@ -61,6 +60,9 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { public static void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) { Log.d(TAG, "DeleteVideoCaptureAndroid"); + if (captureAndroid.camera == null) { + return; + } captureAndroid.StopCapture(); captureAndroid.camera.release(); @@ -82,53 +84,44 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { return -1; } - Log.d(TAG, "tryStartCapture " + width + - " height " + height +" frame rate " + frameRate + - "isCaptureRunning " + isCaptureRunning + - "isSurfaceReady " + isSurfaceReady + - "isCaptureStarted " + isCaptureStarted); + Log.d(TAG, "tryStartCapture: " + width + + "x" + height +", frameRate: " + frameRate + + ", isCaptureRunning: " + isCaptureRunning + + ", isSurfaceReady: " + isSurfaceReady + + ", isCaptureStarted: " + isCaptureStarted); - if (isCaptureRunning || !isSurfaceReady || !isCaptureStarted) { + if (isCaptureRunning || !isCaptureStarted) { return 0; } - try { - camera.setPreviewDisplay(surfaceHolder); + CaptureCapabilityAndroid currentCapability = + new CaptureCapabilityAndroid(); + currentCapability.width = width; + currentCapability.height = height; + currentCapability.maxFPS = frameRate; + PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); - CaptureCapabilityAndroid currentCapability = - new CaptureCapabilityAndroid(); - currentCapability.width = width; - currentCapability.height = height; - currentCapability.maxFPS = frameRate; - PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat); - - Camera.Parameters parameters = camera.getParameters(); - parameters.setPreviewSize(currentCapability.width, - currentCapability.height); - parameters.setPreviewFormat(PIXEL_FORMAT); - parameters.setPreviewFrameRate(currentCapability.maxFPS); - camera.setParameters(parameters); - - int bufSize = width * height * pixelFormat.bitsPerPixel / 8; - byte[] buffer = null; - for (int i = 0; i < numCaptureBuffers; i++) { - buffer = new byte[bufSize]; - camera.addCallbackBuffer(buffer); - } - camera.setPreviewCallbackWithBuffer(this); - ownsBuffers = true; - - camera.startPreview(); - previewBufferLock.lock(); - expectedFrameSize = bufSize; - isCaptureRunning = true; - previewBufferLock.unlock(); + Camera.Parameters parameters = camera.getParameters(); + parameters.setPreviewSize(currentCapability.width, + currentCapability.height); + parameters.setPreviewFormat(PIXEL_FORMAT); + parameters.setPreviewFrameRate(currentCapability.maxFPS); + camera.setParameters(parameters); + int bufSize = width * height * pixelFormat.bitsPerPixel / 8; + byte[] buffer = null; + for (int i = 0; i < numCaptureBuffers; i++) { + buffer = new byte[bufSize]; + camera.addCallbackBuffer(buffer); } - catch (Exception ex) { - Log.e(TAG, "Failed to start camera"); - return -1; - } + camera.setPreviewCallbackWithBuffer(this); + ownsBuffers = true; + + camera.startPreview(); + previewBufferLock.lock(); + expectedFrameSize = bufSize; + isCaptureRunning = true; + previewBufferLock.unlock(); isCaptureRunning = true; return 0; @@ -140,6 +133,9 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { // Get the local preview SurfaceHolder from the static render class localPreview = ViERenderer.GetLocalRenderer(); if (localPreview != null) { + if (localPreview.getSurface() != null) { + surfaceCreated(localPreview); + } localPreview.addCallback(this); } @@ -163,9 +159,8 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { previewBufferLock.unlock(); camera.stopPreview(); camera.setPreviewCallbackWithBuffer(null); - } - catch (Exception ex) { - Log.e(TAG, "Failed to stop camera"); + } catch (RuntimeException e) { + Log.e(TAG, "Failed to stop camera", e); return -1; } @@ -237,22 +232,27 @@ public class VideoCaptureAndroid implements PreviewCallback, Callback { public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.d(TAG, "VideoCaptureAndroid::surfaceChanged"); - - captureLock.lock(); - isSurfaceReady = true; - surfaceHolder = holder; - - tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS); - captureLock.unlock(); - return; } public void surfaceCreated(SurfaceHolder holder) { Log.d(TAG, "VideoCaptureAndroid::surfaceCreated"); + captureLock.lock(); + try { + camera.setPreviewDisplay(holder); + } catch (IOException e) { + Log.e(TAG, "Failed to set preview surface!", e); + } + captureLock.unlock(); } public void surfaceDestroyed(SurfaceHolder holder) { Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed"); - isSurfaceReady = false; + captureLock.lock(); + try { + camera.setPreviewDisplay(null); + } catch (IOException e) { + Log.e(TAG, "Failed to clear preview surface!", e); + } + captureLock.unlock(); } } diff --git a/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java index b0e75cc43..af227cd95 100644 --- a/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java +++ b/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java @@ -88,43 +88,36 @@ public class VideoCaptureDeviceInfoAndroid { private int Init() { // Populate the deviceList with available cameras and their capabilities. Camera camera = null; - try{ - if(android.os.Build.VERSION.SDK_INT > 8) { - // From Android 2.3 and onwards - for(int i = 0; i < Camera.getNumberOfCameras(); ++i) { - AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice(); + if(android.os.Build.VERSION.SDK_INT > 8) { + // From Android 2.3 and onwards + for(int i = 0; i < Camera.getNumberOfCameras(); ++i) { + AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice(); - Camera.CameraInfo info = new Camera.CameraInfo(); - Camera.getCameraInfo(i, info); - newDevice.index = i; - newDevice.orientation=info.orientation; - if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { - newDevice.deviceUniqueName = - "Camera " + i +", Facing back, Orientation "+ info.orientation; - Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation); + Camera.CameraInfo info = new Camera.CameraInfo(); + Camera.getCameraInfo(i, info); + newDevice.index = i; + newDevice.orientation=info.orientation; + if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { + newDevice.deviceUniqueName = + "Camera " + i +", Facing back, Orientation "+ info.orientation; + Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation); - } - else { - newDevice.deviceUniqueName = - "Camera " + i +", Facing front, Orientation "+ info.orientation; - newDevice.frontCameraType = FrontFacingCameraType.Android23; - Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation); - } - - camera = Camera.open(i); - Camera.Parameters parameters = camera.getParameters(); - AddDeviceInfo(newDevice, parameters); - camera.release(); - camera = null; - deviceList.add(newDevice); } + else { + newDevice.deviceUniqueName = + "Camera " + i +", Facing front, Orientation "+ info.orientation; + newDevice.frontCameraType = FrontFacingCameraType.Android23; + Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation); + } + + camera = Camera.open(i); + Camera.Parameters parameters = camera.getParameters(); + AddDeviceInfo(newDevice, parameters); + camera.release(); + camera = null; + deviceList.add(newDevice); } } - catch (Exception ex) { - Log.e(TAG, "Failed to init VideoCaptureDeviceInfo ex" + - ex.getLocalizedMessage()); - return -1; - } VerifyCapabilities(); return 0; } @@ -149,9 +142,8 @@ public class VideoCaptureDeviceInfoAndroid { newDevice.captureCapabilies[i].height = s.height; newDevice.captureCapabilies[i].width = s.width; newDevice.captureCapabilies[i].maxFPS = maxFPS; - Log.v(TAG, - "VideoCaptureDeviceInfo " + "maxFPS:" + maxFPS + - " width:" + s.width + " height:" + s.height); + Log.v(TAG, "VideoCaptureDeviceInfo " + ", maxFPS: " + maxFPS + + ", width: " + s.width + ", height: " + s.height); } } @@ -187,9 +179,9 @@ public class VideoCaptureDeviceInfoAndroid { // even though it reports that it can if(android.os.Build.MANUFACTURER.equals("motorola") && android.os.Build.DEVICE.equals("umts_sholes")) { - for(AndroidVideoCaptureDevice device:deviceList) { - for(CaptureCapabilityAndroid capability:device.captureCapabilies) { - capability.maxFPS=15; + for (AndroidVideoCaptureDevice device : deviceList) { + for (CaptureCapabilityAndroid capability : device.captureCapabilies) { + capability.maxFPS = 15; } } } @@ -286,10 +278,14 @@ public class VideoCaptureDeviceInfoAndroid { Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid"); return new VideoCaptureAndroid(id, context, camera, deviceToUse); - - }catch (Exception ex) { - Log.e(TAG, "AllocateCamera Failed to open camera- ex " + - ex.getLocalizedMessage()); + } catch (NoSuchMethodException e) { + Log.e(TAG, "AllocateCamera Failed to open camera", e); + } catch (ClassNotFoundException e) { + Log.e(TAG, "AllocateCamera Failed to open camera", e); + } catch (InvocationTargetException e) { + Log.e(TAG, "AllocateCamera Failed to open camera", e); + } catch (IllegalAccessException e) { + Log.e(TAG, "AllocateCamera Failed to open camera", e); } return null; } @@ -307,20 +303,13 @@ public class VideoCaptureDeviceInfoAndroid { String cameraId = parameters.get("camera-id"); if(cameraId != null && cameraId.equals("1")) { // This might be a Samsung Galaxy S with a front facing camera. - try { - parameters.set("camera-id", 2); - camera.setParameters(parameters); - parameters = camera.getParameters(); - newDevice.frontCameraType = FrontFacingCameraType.GalaxyS; - newDevice.orientation = 0; - camera.release(); - return parameters; - } - catch (Exception ex) { - // Nope - it did not work. - Log.e(TAG, "Init Failed to open front camera camera - ex " + - ex.getLocalizedMessage()); - } + parameters.set("camera-id", 2); + camera.setParameters(parameters); + parameters = camera.getParameters(); + newDevice.frontCameraType = FrontFacingCameraType.GalaxyS; + newDevice.orientation = 0; + camera.release(); + return parameters; } camera.release(); diff --git a/webrtc/modules/video_capture/android/video_capture_android.cc b/webrtc/modules/video_capture/android/video_capture_android.cc index f5abbc91c..7321495b2 100644 --- a/webrtc/modules/video_capture/android/video_capture_android.cc +++ b/webrtc/modules/video_capture/android/video_capture_android.cc @@ -112,9 +112,20 @@ WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM, return -1; } + jclass capabilityClassLocal = env->FindClass( + "org/webrtc/videoengine/CaptureCapabilityAndroid"); + if (!capabilityClassLocal) { + WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, + "%s: could not find java class", __FUNCTION__); + return -1; + } + jclass capabilityClassGlobal = reinterpret_cast(env->NewGlobalRef( + capabilityClassLocal)); + DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal); + // get java capture class type (note path to class packet) jclass javaCmDevInfoClassLocal = env->FindClass( - AndroidJavaCaptureDeviceInfoClass); + "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"); if (!javaCmDevInfoClassLocal) { WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1, "%s: could not find java class", __FUNCTION__); diff --git a/webrtc/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java b/webrtc/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java index d45fb810a..e2c9921de 100644 --- a/webrtc/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java +++ b/webrtc/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java @@ -20,7 +20,7 @@ public class ViERenderer { private static SurfaceHolder g_localRenderer; public static SurfaceView CreateRenderer(Context context) { - return CreateRenderer(context,false); + return CreateRenderer(context, false); } public static SurfaceView CreateRenderer(Context context, @@ -48,7 +48,7 @@ public class ViERenderer { SurfaceView localRender = new SurfaceView(context); g_localRenderer = localRender.getHolder(); g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); - return localRender; + return localRender; } public static SurfaceHolder GetLocalRenderer() { diff --git a/webrtc/modules/video_render/android/video_render_android_impl.cc b/webrtc/modules/video_render/android/video_render_android_impl.cc index 0107afefa..d6f7c5ab5 100644 --- a/webrtc/modules/video_render/android/video_render_android_impl.cc +++ b/webrtc/modules/video_render/android/video_render_android_impl.cc @@ -15,7 +15,7 @@ #include "thread_wrapper.h" #include "tick_util.h" -#ifdef ANDROID_LOG +#ifdef ANDROID #include #include diff --git a/webrtc/voice_engine/voe_video_sync_impl.cc b/webrtc/voice_engine/voe_video_sync_impl.cc index 7df460312..59f5218b5 100644 --- a/webrtc/voice_engine/voe_video_sync_impl.cc +++ b/webrtc/voice_engine/voe_video_sync_impl.cc @@ -51,7 +51,6 @@ int VoEVideoSyncImpl::GetPlayoutTimestamp(int channel, unsigned int& timestamp) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetPlayoutTimestamp(channel=%d, timestamp=?)", channel); - ANDROID_NOT_SUPPORTED(_shared->statistics()); IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) @@ -76,7 +75,6 @@ int VoEVideoSyncImpl::SetInitTimestamp(int channel, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetInitTimestamp(channel=%d, timestamp=%lu)", channel, timestamp); - ANDROID_NOT_SUPPORTED(_shared->statistics()); IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) @@ -101,7 +99,6 @@ int VoEVideoSyncImpl::SetInitSequenceNumber(int channel, WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetInitSequenceNumber(channel=%d, sequenceNumber=%hd)", channel, sequenceNumber); - ANDROID_NOT_SUPPORTED(_shared->statistics()); IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) @@ -125,7 +122,6 @@ int VoEVideoSyncImpl::SetMinimumPlayoutDelay(int channel,int delayMs) WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "SetMinimumPlayoutDelay(channel=%d, delayMs=%d)", channel, delayMs); - ANDROID_NOT_SUPPORTED(_shared->statistics()); IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) @@ -172,7 +168,6 @@ int VoEVideoSyncImpl::GetDelayEstimate(int channel, int& delayMs) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetDelayEstimate(channel=%d, delayMs=?)", channel); - ANDROID_NOT_SUPPORTED(_shared->statistics()); IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) @@ -195,7 +190,6 @@ int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetPlayoutBufferSize(bufferMs=?)"); - ANDROID_NOT_SUPPORTED(_shared->statistics()); IPHONE_NOT_SUPPORTED(_shared->statistics()); if (!_shared->statistics().Initialized()) @@ -223,7 +217,7 @@ int VoEVideoSyncImpl::GetRtpRtcp(int channel, RtpRtcp* &rtpRtcpModule) { WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1), "GetRtpRtcp(channel=%i)", channel); - + if (!_shared->statistics().Initialized()) { _shared->SetLastError(VE_NOT_INITED, kTraceError);