Make webrtc build with audio device java impl and add an option to enable it

BUG=
TEST=buildbots

This cl is to make audio device java implemenation build in webrtc, and add an
option in gyp so we can switch between opensl implementaiton and java
implementation.
Review URL: https://webrtc-codereview.appspot.com/801004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2783 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
leozwang@webrtc.org
2012-09-18 20:19:00 +00:00
parent deea95f76f
commit 2db85bcba7
9 changed files with 316 additions and 330 deletions

View File

@@ -98,6 +98,10 @@
# flood of chromium-style warnings. Investigate enabling them:
# http://code.google.com/p/webrtc/issues/detail?id=163
'clang_use_chrome_plugins%': 0,
# Switch between Android audio device OpenSL ES implementation
# and Java Implementation
'enable_android_opensl%': 1,
}],
['OS=="ios"', {
'enable_video%': 0,
@@ -211,9 +215,15 @@
# with condition and event functions in system_wrappers.
'WEBRTC_CLOCK_TYPE_REALTIME',
'WEBRTC_THREAD_RR',
],
'conditions': [
['enable_android_opensl==1', {
'defines': [
'WEBRTC_ANDROID_OPENSLES',
],
}],
],
}],
], # conditions
}, # target_defaults
}

View File

@@ -24,19 +24,13 @@
#include "thread_wrapper.h"
#include "event_wrapper.h"
// Android logging, uncomment to print trace to logcat instead of
// trace file/callback
//#include <android/log.h>
//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, \
// "WebRTC AD jni", __VA_ARGS__)
namespace webrtc
{
JavaVM* globalJvm = NULL;
JNIEnv* globalJNIEnv = NULL;
jobject globalSndContext = NULL;
jclass globalScClass = NULL;
JavaVM* AudioDeviceAndroidJni::globalJvm = NULL;
JNIEnv* AudioDeviceAndroidJni::globalJNIEnv = NULL;
jobject AudioDeviceAndroidJni::globalSndContext = NULL;
jclass AudioDeviceAndroidJni::globalScClass = NULL;
// ----------------------------------------------------------------------------
// SetAndroidAudioDeviceObjects
@@ -46,61 +40,38 @@ jclass globalScClass = NULL;
// by the same Java application.
// ----------------------------------------------------------------------------
WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
void* context)
{
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1, "%s",
__FUNCTION__);
WebRtc_Word32 AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
void* javaVM,
void* env,
void* context) {
// TODO(leozwang): Make this function thread-safe.
globalJvm = reinterpret_cast<JavaVM*>(javaVM);
globalSndContext = reinterpret_cast<jobject>(context);
globalJvm = (JavaVM*) javaVM;
globalSndContext = (jobject) context;
if (env)
{
globalJNIEnv = (JNIEnv *) env;
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
"%s: will find class", __FUNCTION__);
// get java class type (note path to class packet)
jclass
javaScClassLocal =
globalJNIEnv->FindClass(
if (env) {
globalJNIEnv = reinterpret_cast<JNIEnv*>(env);
// Get java class type (note path to class packet).
jclass javaScClassLocal = globalJNIEnv->FindClass(
"org/webrtc/voiceengine/AudioDeviceAndroid");
if (!javaScClassLocal)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
"%s: could not find java class", __FUNCTION__);
return -1; /* exception thrown */
if (!javaScClassLocal) {
return -1; // exception thrown
}
else {
}
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
"%s: will create global reference", __FUNCTION__);
// create a global reference to the class (to tell JNI that we are
// referencing it after this function has returned)
globalScClass
= reinterpret_cast<jclass> (globalJNIEnv->NewGlobalRef(
javaScClassLocal));
if (!globalScClass)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
"%s: could not create reference", __FUNCTION__);
// Create a global reference to the class (to tell JNI that we are
// referencing it after this function has returned).
globalScClass = reinterpret_cast<jclass> (
globalJNIEnv->NewGlobalRef(javaScClassLocal));
if (!globalScClass) {
return -1;
}
// Delete local class ref, we only use the global ref
globalJNIEnv->DeleteLocalRef(javaScClassLocal);
}
else // User is resetting the env variable
{
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
"%s: env is NULL, assuming deinit", __FUNCTION__);
if (!globalJNIEnv)
{
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
"%s: saved env already NULL", __FUNCTION__);
else { // User is resetting the env variable
if (!globalJNIEnv) {
return 0;
}
@@ -2229,7 +2200,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
}
// create a reference to the object (to tell JNI that we are referencing it
// after this function has returned)
// after this function has returned).
_javaScObj = env->NewGlobalRef(javaScObjLocal);
if (!_javaScObj)
{
@@ -2239,46 +2210,37 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Delete local object ref, we only use the global ref
// Delete local object ref, we only use the global ref.
env->DeleteLocalRef(javaScObjLocal);
//////////////////////
// AUDIO MANAGEMENT
// This is not mandatory functionality
if (_javaContext)
{
// This is not mandatory functionality.
if (_javaContext) {
// Get Context field ID
jfieldID fidContext = env->GetFieldID(_javaScClass, "_context",
"Landroid/content/Context;");
if (!fidContext)
{
if (!fidContext) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"%s: could not get Context fid", __FUNCTION__);
return -1;
}
// Set the Java application Context so we can use AudioManager
// Get Context object and check it
// Get Context object and check it.
jobject javaContext = (jobject) _javaContext;
env->SetObjectField(_javaScObj, fidContext, javaContext);
javaContext = env->GetObjectField(_javaScObj, fidContext);
if (!javaContext)
{
if (!javaContext) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"%s: could not set Context", __FUNCTION__);
return -1;
}
// Delete local object ref
// Delete local object ref.
env->DeleteLocalRef(javaContext);
}
else
{
WEBRTC_TRACE(
kTraceWarning,
kTraceAudioDevice,
_id,
else {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
"%s: did not set Context - some functionality is not "
"supported",
__FUNCTION__);
@@ -2287,7 +2249,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
/////////////
// PLAYOUT
// Get play buffer field ID
// Get play buffer field ID.
jfieldID fidPlayBuffer = env->GetFieldID(_javaScClass, "_playBuffer",
"Ljava/nio/ByteBuffer;");
if (!fidPlayBuffer)
@@ -2297,7 +2259,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Get play buffer object
// Get play buffer object.
jobject javaPlayBufferLocal =
env->GetObjectField(_javaScObj, fidPlayBuffer);
if (!javaPlayBufferLocal)
@@ -2309,7 +2271,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
// Create a global reference to the object (to tell JNI that we are
// referencing it after this function has returned)
// NOTE: we are referencing it only through the direct buffer (see below)
// NOTE: we are referencing it only through the direct buffer (see below).
_javaPlayBuffer = env->NewGlobalRef(javaPlayBufferLocal);
if (!_javaPlayBuffer)
{
@@ -2318,10 +2280,10 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Delete local object ref, we only use the global ref
// Delete local object ref, we only use the global ref.
env->DeleteLocalRef(javaPlayBufferLocal);
// Get direct buffer
// Get direct buffer.
_javaDirectPlayBuffer = env->GetDirectBufferAddress(_javaPlayBuffer);
if (!_javaDirectPlayBuffer)
{
@@ -2330,7 +2292,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Get the play audio method ID
// Get the play audio method ID.
_javaMidPlayAudio = env->GetMethodID(_javaScClass, "PlayAudio", "(I)I");
if (!_javaMidPlayAudio)
{
@@ -2342,7 +2304,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
//////////////
// RECORDING
// Get rec buffer field ID
// Get rec buffer field ID.
jfieldID fidRecBuffer = env->GetFieldID(_javaScClass, "_recBuffer",
"Ljava/nio/ByteBuffer;");
if (!fidRecBuffer)
@@ -2352,7 +2314,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Get rec buffer object
// Get rec buffer object.
jobject javaRecBufferLocal = env->GetObjectField(_javaScObj, fidRecBuffer);
if (!javaRecBufferLocal)
{
@@ -2363,7 +2325,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
// Create a global reference to the object (to tell JNI that we are
// referencing it after this function has returned)
// NOTE: we are referencing it only through the direct buffer (see below)
// NOTE: we are referencing it only through the direct buffer (see below).
_javaRecBuffer = env->NewGlobalRef(javaRecBufferLocal);
if (!_javaRecBuffer)
{
@@ -2372,10 +2334,10 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Delete local object ref, we only use the global ref
// Delete local object ref, we only use the global ref.
env->DeleteLocalRef(javaRecBufferLocal);
// Get direct buffer
// Get direct buffer.
_javaDirectRecBuffer = env->GetDirectBufferAddress(_javaRecBuffer);
if (!_javaDirectRecBuffer)
{
@@ -2384,7 +2346,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Get the rec audio method ID
// Get the rec audio method ID.
_javaMidRecAudio = env->GetMethodID(_javaScClass, "RecordAudio", "(I)I");
if (!_javaMidRecAudio)
{
@@ -2393,7 +2355,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
return -1;
}
// Detach this thread if it was attached
// Detach this thread if it was attached.
if (isAttached)
{
if (_javaVM->DetachCurrentThread() < 0)
@@ -2411,7 +2373,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
//
// checks supported sample rates for playback
// and recording and initializes the rates to be used
// Also stores the max playout volume returned from InitPlayout
// Also stores the max playout volume returned from InitPlayout.
// ----------------------------------------------------------------------------
WebRtc_Word32 AudioDeviceAndroidJni::InitSampleRate()

View File

@@ -33,17 +33,17 @@ const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
void* context);
class ThreadWrapper;
class AudioDeviceAndroidJni: public AudioDeviceGeneric
{
public:
class AudioDeviceAndroidJni : public AudioDeviceGeneric {
public:
AudioDeviceAndroidJni(const WebRtc_Word32 id);
~AudioDeviceAndroidJni();
static WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM,
void* env,
void* context);
virtual WebRtc_Word32 ActiveAudioLayer(
AudioDeviceModule::AudioLayer& audioLayer) const;
@@ -53,8 +53,7 @@ public:
virtual WebRtc_Word16 PlayoutDevices();
virtual WebRtc_Word16 RecordingDevices();
virtual WebRtc_Word32 PlayoutDeviceName(
WebRtc_UWord16 index,
virtual WebRtc_Word32 PlayoutDeviceName(WebRtc_UWord16 index,
char name[kAdmMaxDeviceNameSize],
char guid[kAdmMaxGuidSize]);
virtual WebRtc_Word32 RecordingDeviceName(
@@ -110,8 +109,8 @@ public:
virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize)
const;
virtual WebRtc_Word32 MicrophoneVolumeStepSize(
WebRtc_UWord16& stepSize) const;
virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
virtual WebRtc_Word32 SetSpeakerMute(bool enable);
@@ -160,14 +159,12 @@ public:
virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
private:
private:
// Lock
void Lock()
{
void Lock() {
_critSect.Enter();
};
void UnLock()
{
void UnLock() {
_critSect.Leave();
};
@@ -261,6 +258,14 @@ private:
void* _javaDirectRecBuffer; // Direct buffer pointer to rec buffer
jmethodID _javaMidPlayAudio; // Method ID of play in AudioDeviceAndroid
jmethodID _javaMidRecAudio; // Method ID of rec in AudioDeviceAndroid
// TODO(leozwang): Android holds only one JVM, all these jni handling
// will be consolidated into a single place to make it consistant and
// reliable. Chromium has a good example at base/android.
static JavaVM* globalJvm;
static JNIEnv* globalJNIEnv;
static jobject globalSndContext;
static jclass globalScClass;
};
} // namespace webrtc

View File

@@ -110,8 +110,6 @@
'win/audio_device_utility_win.h',
'win/audio_mixer_manager_win.cc',
'win/audio_mixer_manager_win.h',
'android/audio_device_android_opensles.cc',
'android/audio_device_android_opensles.h',
'android/audio_device_utility_android.cc',
'android/audio_device_utility_android.h',
],
@@ -123,6 +121,19 @@
'-lOpenSLES',
],
},
'conditions': [
['enable_android_opensl==1', {
'sources': [
'android/audio_device_android_opensles.cc',
'android/audio_device_android_opensles.h',
],
}, {
'sources': [
'android/audio_device_android_jni.cc',
'android/audio_device_android_jni.h',
],
}],
],
}],
['OS=="linux"', {
'defines': [

View File

@@ -21,7 +21,8 @@ LOCAL_SRC_FILES := \
src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java \
src/org/webrtc/videoengine/ViEAndroidGLES20.java \
src/org/webrtc/videoengine/ViERenderer.java \
src/org/webrtc/videoengine/ViESurfaceRenderer.java
src/org/webrtc/videoengine/ViESurfaceRenderer.java \
src/org/webrtc/voiceengine/AudioDeviceAndroid.java
LOCAL_PACKAGE_NAME := webrtc-video-demo
LOCAL_CERTIFICATE := platform

View File

@@ -891,7 +891,7 @@ Notice for all the files in this folder.
verbose="${verbose}"
classpathref="project.javac.classpath"
fork="${need.javac.fork}">
<src path="${source.absolute.dir}:../../../../modules/video_capture/main/source/android/java:../../../../modules/video_render/main/source/android/java" />
<src path="${source.absolute.dir}:../../../../modules/video_capture/main/source/android/java:../../../../modules/video_render/main/source/android/java:../../../../modules/audio_device/main/source/android" />
<src path="${gen.absolute.dir}" />
<compilerarg line="${java.compilerargs}" />
</javac>

View File

@@ -1103,9 +1103,11 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopInco
*/
JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create(
JNIEnv *env,
jobject)
jobject context)
{
__android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create");
__android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create VoiceEngine");
VoiceEngine::SetAndroidObjects(webrtcGlobalVM, env, context);
// Check if already created
if (voeData.ve) {

View File

@@ -24,7 +24,7 @@ int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
webrtc::VideoEngine::SetAndroidObjects(javaVM, context);
#ifndef WEBRTC_ANDROID_OPENSLES
// voice engine calls into ADM directly
webrtc::VoiceEngine::SetAndroidAudioDeviceObjects(javaVM, env, context);
webrtc::VoiceEngine::SetAndroidObjects(javaVM, env, context);
#endif
if (subTestSelection == 0) {

View File

@@ -8,17 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES)
#include "modules/audio_device/main/source/android/audio_device_android_jni.h"
#endif
#include "voice_engine_impl.h"
#include "trace.h"
#ifdef WEBRTC_ANDROID
extern "C"
{
extern WebRtc_Word32 SetAndroidAudioDeviceObjects(
void* javaVM, void* env, void* context);
} // extern "C"
#endif
namespace webrtc
{
@@ -149,9 +145,8 @@ bool VoiceEngine::Delete(VoiceEngine*& voiceEngine)
int VoiceEngine::SetAndroidObjects(void* javaVM, void* env, void* context)
{
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES)
// modules/audio_device/main/source/android/audio_device_android_jni.cc
// contains the actual implementation.
return SetAndroidAudioDeviceObjects(javaVM, env, context);
return AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
javaVM, env, context);
#else
return -1;
#endif