diff --git a/src/modules/audio_device/android/audio_device_android_jni.cc b/src/modules/audio_device/android/audio_device_android_jni.cc index a419badf9..23df34aee 100644 --- a/src/modules/audio_device/android/audio_device_android_jni.cc +++ b/src/modules/audio_device/android/audio_device_android_jni.cc @@ -55,7 +55,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects( globalJNIEnv = reinterpret_cast(env); // Get java class type (note path to class packet). jclass javaScClassLocal = globalJNIEnv->FindClass( - "org/webrtc/voiceengine/AudioDeviceAndroid"); + "org/webrtc/voiceengine/WebRTCAudioDevice"); if (!javaScClassLocal) { WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1, "%s: could not find java class", __FUNCTION__); diff --git a/src/modules/audio_device/android/org/webrtc/voiceengine/AudioDeviceAndroid.java b/src/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java similarity index 96% rename from src/modules/audio_device/android/org/webrtc/voiceengine/AudioDeviceAndroid.java rename to src/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java index 2a31a2da9..3bdfc940f 100644 --- a/src/modules/audio_device/android/org/webrtc/voiceengine/AudioDeviceAndroid.java +++ b/src/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java @@ -20,7 +20,7 @@ import android.media.AudioRecord; import android.media.AudioTrack; import android.util.Log; -class AudioDeviceAndroid { +class WebRTCAudioDevice { private AudioTrack _audioTrack = null; private AudioRecord _audioRecord = null; @@ -44,7 +44,7 @@ class AudioDeviceAndroid { private int _bufferedPlaySamples = 0; private int _playPosition = 0; - AudioDeviceAndroid() { + WebRTCAudioDevice() { try { _playBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48 // kHz @@ -125,10 +125,10 @@ class AudioDeviceAndroid { @SuppressWarnings("unused") private int InitPlayback(int sampleRate) { // get the minimum buffer size that can be used - int minPlayBufSize = - AudioTrack.getMinBufferSize(sampleRate, - AudioFormat.CHANNEL_CONFIGURATION_MONO, - AudioFormat.ENCODING_PCM_16BIT); + int minPlayBufSize = AudioTrack.getMinBufferSize( + sampleRate, + AudioFormat.CHANNEL_CONFIGURATION_MONO, + AudioFormat.ENCODING_PCM_16BIT); // DoLog("min play buf size is " + minPlayBufSize); @@ -479,12 +479,12 @@ class AudioDeviceAndroid { // ***IMPORTANT*** When the API level for honeycomb (H) has been // decided, // the condition should be changed to include API level 8 to H-1. - if ((android.os.Build.BRAND.equals("Samsung") || android.os.Build.BRAND - .equals("samsung")) && (8 == apiLevel)) { + if ((android.os.Build.BRAND.equals("Samsung") || + android.os.Build.BRAND.equals("samsung")) && + (8 == apiLevel)) { // Set Samsung specific VoIP mode for 2.2 devices - int mode = - (startCall ? 4 /* VoIP mode */ - : AudioManager.MODE_NORMAL); + // 4 is VoIP mode + int mode = (startCall ? 4 : AudioManager.MODE_NORMAL); _audioManager.setMode(mode); if (_audioManager.getMode() != mode) { DoLogErr("Could not set audio mode for Samsung device"); diff --git a/src/voice_engine/voice_engine_core.gypi b/src/voice_engine/voice_engine_core.gypi index 880d5aadc..79d55cd0a 100644 --- a/src/voice_engine/voice_engine_core.gypi +++ b/src/voice_engine/voice_engine_core.gypi @@ -26,7 +26,7 @@ ], 'include_dirs': [ 'include', - '<(webrtc_root)/modules/audio_device/main/source', + '<(webrtc_root)/modules/audio_device', ], 'direct_dependent_settings': { 'include_dirs': [ diff --git a/src/voice_engine/voice_engine_impl.cc b/src/voice_engine/voice_engine_impl.cc index 7b3c834c2..f90869f4d 100644 --- a/src/voice_engine/voice_engine_impl.cc +++ b/src/voice_engine/voice_engine_impl.cc @@ -9,7 +9,7 @@ */ #if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES) -#include "modules/audio_device/main/source/android/audio_device_android_jni.h" +#include "modules/audio_device/android/audio_device_android_jni.h" #endif #include "voice_engine_impl.h"