Make webrtc build with audio device java impl and add an option to enable it
BUG= TEST=buildbots This cl is to make audio device java implemenation build in webrtc, and add an option in gyp so we can switch between opensl implementaiton and java implementation. Review URL: https://webrtc-codereview.appspot.com/801004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2783 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
deea95f76f
commit
2db85bcba7
@ -98,6 +98,10 @@
|
||||
# flood of chromium-style warnings. Investigate enabling them:
|
||||
# http://code.google.com/p/webrtc/issues/detail?id=163
|
||||
'clang_use_chrome_plugins%': 0,
|
||||
|
||||
# Switch between Android audio device OpenSL ES implementation
|
||||
# and Java Implementation
|
||||
'enable_android_opensl%': 1,
|
||||
}],
|
||||
['OS=="ios"', {
|
||||
'enable_video%': 0,
|
||||
@ -211,7 +215,13 @@
|
||||
# with condition and event functions in system_wrappers.
|
||||
'WEBRTC_CLOCK_TYPE_REALTIME',
|
||||
'WEBRTC_THREAD_RR',
|
||||
'WEBRTC_ANDROID_OPENSLES',
|
||||
],
|
||||
'conditions': [
|
||||
['enable_android_opensl==1', {
|
||||
'defines': [
|
||||
'WEBRTC_ANDROID_OPENSLES',
|
||||
],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
], # conditions
|
||||
|
@ -24,19 +24,13 @@
|
||||
#include "thread_wrapper.h"
|
||||
#include "event_wrapper.h"
|
||||
|
||||
// Android logging, uncomment to print trace to logcat instead of
|
||||
// trace file/callback
|
||||
//#include <android/log.h>
|
||||
//#define WEBRTC_TRACE(a,b,c,...) __android_log_print(ANDROID_LOG_DEBUG, \
|
||||
// "WebRTC AD jni", __VA_ARGS__)
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
JavaVM* globalJvm = NULL;
|
||||
JNIEnv* globalJNIEnv = NULL;
|
||||
jobject globalSndContext = NULL;
|
||||
jclass globalScClass = NULL;
|
||||
JavaVM* AudioDeviceAndroidJni::globalJvm = NULL;
|
||||
JNIEnv* AudioDeviceAndroidJni::globalJNIEnv = NULL;
|
||||
jobject AudioDeviceAndroidJni::globalSndContext = NULL;
|
||||
jclass AudioDeviceAndroidJni::globalScClass = NULL;
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// SetAndroidAudioDeviceObjects
|
||||
@ -46,69 +40,46 @@ jclass globalScClass = NULL;
|
||||
// by the same Java application.
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
|
||||
void* context)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1, "%s",
|
||||
__FUNCTION__);
|
||||
WebRtc_Word32 AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
|
||||
void* javaVM,
|
||||
void* env,
|
||||
void* context) {
|
||||
// TODO(leozwang): Make this function thread-safe.
|
||||
globalJvm = reinterpret_cast<JavaVM*>(javaVM);
|
||||
globalSndContext = reinterpret_cast<jobject>(context);
|
||||
|
||||
globalJvm = (JavaVM*) javaVM;
|
||||
globalSndContext = (jobject) context;
|
||||
|
||||
if (env)
|
||||
{
|
||||
globalJNIEnv = (JNIEnv *) env;
|
||||
|
||||
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
|
||||
"%s: will find class", __FUNCTION__);
|
||||
|
||||
// get java class type (note path to class packet)
|
||||
jclass
|
||||
javaScClassLocal =
|
||||
globalJNIEnv->FindClass(
|
||||
"org/webrtc/voiceengine/AudioDeviceAndroid");
|
||||
if (!javaScClassLocal)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||
"%s: could not find java class", __FUNCTION__);
|
||||
return -1; /* exception thrown */
|
||||
}
|
||||
|
||||
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
|
||||
"%s: will create global reference", __FUNCTION__);
|
||||
|
||||
// create a global reference to the class (to tell JNI that we are
|
||||
// referencing it after this function has returned)
|
||||
globalScClass
|
||||
= reinterpret_cast<jclass> (globalJNIEnv->NewGlobalRef(
|
||||
javaScClassLocal));
|
||||
if (!globalScClass)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
|
||||
"%s: could not create reference", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Delete local class ref, we only use the global ref
|
||||
globalJNIEnv->DeleteLocalRef(javaScClassLocal);
|
||||
if (env) {
|
||||
globalJNIEnv = reinterpret_cast<JNIEnv*>(env);
|
||||
// Get java class type (note path to class packet).
|
||||
jclass javaScClassLocal = globalJNIEnv->FindClass(
|
||||
"org/webrtc/voiceengine/AudioDeviceAndroid");
|
||||
if (!javaScClassLocal) {
|
||||
return -1; // exception thrown
|
||||
}
|
||||
else // User is resetting the env variable
|
||||
{
|
||||
WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
|
||||
"%s: env is NULL, assuming deinit", __FUNCTION__);
|
||||
|
||||
if (!globalJNIEnv)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
|
||||
"%s: saved env already NULL", __FUNCTION__);
|
||||
return 0;
|
||||
}
|
||||
|
||||
globalJNIEnv->DeleteGlobalRef(globalScClass);
|
||||
globalJNIEnv = (JNIEnv *) NULL;
|
||||
else {
|
||||
}
|
||||
|
||||
return 0;
|
||||
// Create a global reference to the class (to tell JNI that we are
|
||||
// referencing it after this function has returned).
|
||||
globalScClass = reinterpret_cast<jclass> (
|
||||
globalJNIEnv->NewGlobalRef(javaScClassLocal));
|
||||
if (!globalScClass) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Delete local class ref, we only use the global ref
|
||||
globalJNIEnv->DeleteLocalRef(javaScClassLocal);
|
||||
}
|
||||
else { // User is resetting the env variable
|
||||
if (!globalJNIEnv) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
globalJNIEnv->DeleteGlobalRef(globalScClass);
|
||||
globalJNIEnv = (JNIEnv *) NULL;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
@ -2229,7 +2200,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
}
|
||||
|
||||
// create a reference to the object (to tell JNI that we are referencing it
|
||||
// after this function has returned)
|
||||
// after this function has returned).
|
||||
_javaScObj = env->NewGlobalRef(javaScObjLocal);
|
||||
if (!_javaScObj)
|
||||
{
|
||||
@ -2239,55 +2210,46 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Delete local object ref, we only use the global ref
|
||||
// Delete local object ref, we only use the global ref.
|
||||
env->DeleteLocalRef(javaScObjLocal);
|
||||
|
||||
//////////////////////
|
||||
// AUDIO MANAGEMENT
|
||||
// This is not mandatory functionality.
|
||||
if (_javaContext) {
|
||||
// Get Context field ID
|
||||
jfieldID fidContext = env->GetFieldID(_javaScClass, "_context",
|
||||
"Landroid/content/Context;");
|
||||
if (!fidContext) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
"%s: could not get Context fid", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
|
||||
// This is not mandatory functionality
|
||||
if (_javaContext)
|
||||
{
|
||||
// Get Context field ID
|
||||
jfieldID fidContext = env->GetFieldID(_javaScClass, "_context",
|
||||
"Landroid/content/Context;");
|
||||
if (!fidContext)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
"%s: could not get Context fid", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Set the Java application Context so we can use AudioManager
|
||||
// Get Context object and check it
|
||||
jobject javaContext = (jobject) _javaContext;
|
||||
env->SetObjectField(_javaScObj, fidContext, javaContext);
|
||||
javaContext = env->GetObjectField(_javaScObj, fidContext);
|
||||
if (!javaContext)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
"%s: could not set Context", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Delete local object ref
|
||||
env->DeleteLocalRef(javaContext);
|
||||
// Set the Java application Context so we can use AudioManager
|
||||
// Get Context object and check it.
|
||||
jobject javaContext = (jobject) _javaContext;
|
||||
env->SetObjectField(_javaScObj, fidContext, javaContext);
|
||||
javaContext = env->GetObjectField(_javaScObj, fidContext);
|
||||
if (!javaContext) {
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
"%s: could not set Context", __FUNCTION__);
|
||||
return -1;
|
||||
}
|
||||
// Delete local object ref.
|
||||
env->DeleteLocalRef(javaContext);
|
||||
}
|
||||
else
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceWarning,
|
||||
kTraceAudioDevice,
|
||||
_id,
|
||||
"%s: did not set Context - some functionality is not "
|
||||
"supported",
|
||||
__FUNCTION__);
|
||||
else {
|
||||
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
|
||||
"%s: did not set Context - some functionality is not "
|
||||
"supported",
|
||||
__FUNCTION__);
|
||||
}
|
||||
|
||||
/////////////
|
||||
// PLAYOUT
|
||||
|
||||
// Get play buffer field ID
|
||||
// Get play buffer field ID.
|
||||
jfieldID fidPlayBuffer = env->GetFieldID(_javaScClass, "_playBuffer",
|
||||
"Ljava/nio/ByteBuffer;");
|
||||
if (!fidPlayBuffer)
|
||||
@ -2297,7 +2259,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Get play buffer object
|
||||
// Get play buffer object.
|
||||
jobject javaPlayBufferLocal =
|
||||
env->GetObjectField(_javaScObj, fidPlayBuffer);
|
||||
if (!javaPlayBufferLocal)
|
||||
@ -2309,7 +2271,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
|
||||
// Create a global reference to the object (to tell JNI that we are
|
||||
// referencing it after this function has returned)
|
||||
// NOTE: we are referencing it only through the direct buffer (see below)
|
||||
// NOTE: we are referencing it only through the direct buffer (see below).
|
||||
_javaPlayBuffer = env->NewGlobalRef(javaPlayBufferLocal);
|
||||
if (!_javaPlayBuffer)
|
||||
{
|
||||
@ -2318,10 +2280,10 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Delete local object ref, we only use the global ref
|
||||
// Delete local object ref, we only use the global ref.
|
||||
env->DeleteLocalRef(javaPlayBufferLocal);
|
||||
|
||||
// Get direct buffer
|
||||
// Get direct buffer.
|
||||
_javaDirectPlayBuffer = env->GetDirectBufferAddress(_javaPlayBuffer);
|
||||
if (!_javaDirectPlayBuffer)
|
||||
{
|
||||
@ -2330,7 +2292,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Get the play audio method ID
|
||||
// Get the play audio method ID.
|
||||
_javaMidPlayAudio = env->GetMethodID(_javaScClass, "PlayAudio", "(I)I");
|
||||
if (!_javaMidPlayAudio)
|
||||
{
|
||||
@ -2342,7 +2304,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
//////////////
|
||||
// RECORDING
|
||||
|
||||
// Get rec buffer field ID
|
||||
// Get rec buffer field ID.
|
||||
jfieldID fidRecBuffer = env->GetFieldID(_javaScClass, "_recBuffer",
|
||||
"Ljava/nio/ByteBuffer;");
|
||||
if (!fidRecBuffer)
|
||||
@ -2352,7 +2314,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Get rec buffer object
|
||||
// Get rec buffer object.
|
||||
jobject javaRecBufferLocal = env->GetObjectField(_javaScObj, fidRecBuffer);
|
||||
if (!javaRecBufferLocal)
|
||||
{
|
||||
@ -2363,7 +2325,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
|
||||
// Create a global reference to the object (to tell JNI that we are
|
||||
// referencing it after this function has returned)
|
||||
// NOTE: we are referencing it only through the direct buffer (see below)
|
||||
// NOTE: we are referencing it only through the direct buffer (see below).
|
||||
_javaRecBuffer = env->NewGlobalRef(javaRecBufferLocal);
|
||||
if (!_javaRecBuffer)
|
||||
{
|
||||
@ -2372,10 +2334,10 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Delete local object ref, we only use the global ref
|
||||
// Delete local object ref, we only use the global ref.
|
||||
env->DeleteLocalRef(javaRecBufferLocal);
|
||||
|
||||
// Get direct buffer
|
||||
// Get direct buffer.
|
||||
_javaDirectRecBuffer = env->GetDirectBufferAddress(_javaRecBuffer);
|
||||
if (!_javaDirectRecBuffer)
|
||||
{
|
||||
@ -2384,7 +2346,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Get the rec audio method ID
|
||||
// Get the rec audio method ID.
|
||||
_javaMidRecAudio = env->GetMethodID(_javaScClass, "RecordAudio", "(I)I");
|
||||
if (!_javaMidRecAudio)
|
||||
{
|
||||
@ -2393,7 +2355,7 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Detach this thread if it was attached
|
||||
// Detach this thread if it was attached.
|
||||
if (isAttached)
|
||||
{
|
||||
if (_javaVM->DetachCurrentThread() < 0)
|
||||
@ -2409,9 +2371,9 @@ WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
|
||||
// ----------------------------------------------------------------------------
|
||||
// InitSampleRate
|
||||
//
|
||||
// checks supported sample rates for playback
|
||||
// checks supported sample rates for playback
|
||||
// and recording and initializes the rates to be used
|
||||
// Also stores the max playout volume returned from InitPlayout
|
||||
// Also stores the max playout volume returned from InitPlayout.
|
||||
// ----------------------------------------------------------------------------
|
||||
|
||||
WebRtc_Word32 AudioDeviceAndroidJni::InitSampleRate()
|
||||
|
@ -33,234 +33,239 @@ const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
|
||||
const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
|
||||
|
||||
|
||||
WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
|
||||
void* context);
|
||||
|
||||
class ThreadWrapper;
|
||||
|
||||
class AudioDeviceAndroidJni: public AudioDeviceGeneric
|
||||
{
|
||||
public:
|
||||
AudioDeviceAndroidJni(const WebRtc_Word32 id);
|
||||
~AudioDeviceAndroidJni();
|
||||
class AudioDeviceAndroidJni : public AudioDeviceGeneric {
|
||||
public:
|
||||
AudioDeviceAndroidJni(const WebRtc_Word32 id);
|
||||
~AudioDeviceAndroidJni();
|
||||
|
||||
virtual WebRtc_Word32 ActiveAudioLayer(
|
||||
AudioDeviceModule::AudioLayer& audioLayer) const;
|
||||
static WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM,
|
||||
void* env,
|
||||
void* context);
|
||||
|
||||
virtual WebRtc_Word32 Init();
|
||||
virtual WebRtc_Word32 Terminate();
|
||||
virtual bool Initialized() const;
|
||||
virtual WebRtc_Word32 ActiveAudioLayer(
|
||||
AudioDeviceModule::AudioLayer& audioLayer) const;
|
||||
|
||||
virtual WebRtc_Word16 PlayoutDevices();
|
||||
virtual WebRtc_Word16 RecordingDevices();
|
||||
virtual WebRtc_Word32 PlayoutDeviceName(
|
||||
WebRtc_UWord16 index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]);
|
||||
virtual WebRtc_Word32 RecordingDeviceName(
|
||||
WebRtc_UWord16 index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]);
|
||||
virtual WebRtc_Word32 Init();
|
||||
virtual WebRtc_Word32 Terminate();
|
||||
virtual bool Initialized() const;
|
||||
|
||||
virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
|
||||
virtual WebRtc_Word32 SetPlayoutDevice(
|
||||
AudioDeviceModule::WindowsDeviceType device);
|
||||
virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
|
||||
virtual WebRtc_Word32 SetRecordingDevice(
|
||||
AudioDeviceModule::WindowsDeviceType device);
|
||||
virtual WebRtc_Word16 PlayoutDevices();
|
||||
virtual WebRtc_Word16 RecordingDevices();
|
||||
virtual WebRtc_Word32 PlayoutDeviceName(WebRtc_UWord16 index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]);
|
||||
virtual WebRtc_Word32 RecordingDeviceName(
|
||||
WebRtc_UWord16 index,
|
||||
char name[kAdmMaxDeviceNameSize],
|
||||
char guid[kAdmMaxGuidSize]);
|
||||
|
||||
virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitPlayout();
|
||||
virtual bool PlayoutIsInitialized() const;
|
||||
virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitRecording();
|
||||
virtual bool RecordingIsInitialized() const;
|
||||
virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
|
||||
virtual WebRtc_Word32 SetPlayoutDevice(
|
||||
AudioDeviceModule::WindowsDeviceType device);
|
||||
virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
|
||||
virtual WebRtc_Word32 SetRecordingDevice(
|
||||
AudioDeviceModule::WindowsDeviceType device);
|
||||
|
||||
virtual WebRtc_Word32 StartPlayout();
|
||||
virtual WebRtc_Word32 StopPlayout();
|
||||
virtual bool Playing() const;
|
||||
virtual WebRtc_Word32 StartRecording();
|
||||
virtual WebRtc_Word32 StopRecording();
|
||||
virtual bool Recording() const;
|
||||
virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitPlayout();
|
||||
virtual bool PlayoutIsInitialized() const;
|
||||
virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitRecording();
|
||||
virtual bool RecordingIsInitialized() const;
|
||||
|
||||
virtual WebRtc_Word32 SetAGC(bool enable);
|
||||
virtual bool AGC() const;
|
||||
virtual WebRtc_Word32 StartPlayout();
|
||||
virtual WebRtc_Word32 StopPlayout();
|
||||
virtual bool Playing() const;
|
||||
virtual WebRtc_Word32 StartRecording();
|
||||
virtual WebRtc_Word32 StopRecording();
|
||||
virtual bool Recording() const;
|
||||
|
||||
virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
|
||||
WebRtc_UWord16 volumeRight);
|
||||
virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
|
||||
WebRtc_UWord16& volumeRight) const;
|
||||
virtual WebRtc_Word32 SetAGC(bool enable);
|
||||
virtual bool AGC() const;
|
||||
|
||||
virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitSpeaker();
|
||||
virtual bool SpeakerIsInitialized() const;
|
||||
virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitMicrophone();
|
||||
virtual bool MicrophoneIsInitialized() const;
|
||||
virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
|
||||
WebRtc_UWord16 volumeRight);
|
||||
virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
|
||||
WebRtc_UWord16& volumeRight) const;
|
||||
|
||||
virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
|
||||
virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
|
||||
virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
|
||||
virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
|
||||
virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
|
||||
virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitSpeaker();
|
||||
virtual bool SpeakerIsInitialized() const;
|
||||
virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 InitMicrophone();
|
||||
virtual bool MicrophoneIsInitialized() const;
|
||||
|
||||
virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
|
||||
virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
|
||||
virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
|
||||
virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
|
||||
virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize)
|
||||
const;
|
||||
virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
|
||||
virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
|
||||
virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
|
||||
virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
|
||||
virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
|
||||
|
||||
virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetSpeakerMute(bool enable);
|
||||
virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
|
||||
virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
|
||||
virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
|
||||
virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
|
||||
virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
|
||||
virtual WebRtc_Word32 MicrophoneVolumeStepSize(
|
||||
WebRtc_UWord16& stepSize) const;
|
||||
|
||||
virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
|
||||
virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
|
||||
virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetSpeakerMute(bool enable);
|
||||
virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
|
||||
|
||||
virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
|
||||
virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
|
||||
virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
|
||||
virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
|
||||
|
||||
virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetStereoPlayout(bool enable);
|
||||
virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
|
||||
virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetStereoRecording(bool enable);
|
||||
virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
|
||||
virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
|
||||
virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
|
||||
|
||||
virtual WebRtc_Word32 SetPlayoutBuffer(
|
||||
const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
|
||||
virtual WebRtc_Word32 PlayoutBuffer(
|
||||
AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
|
||||
virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
|
||||
virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
|
||||
virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetStereoPlayout(bool enable);
|
||||
virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
|
||||
virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
|
||||
virtual WebRtc_Word32 SetStereoRecording(bool enable);
|
||||
virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
|
||||
|
||||
virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
|
||||
virtual WebRtc_Word32 SetPlayoutBuffer(
|
||||
const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
|
||||
virtual WebRtc_Word32 PlayoutBuffer(
|
||||
AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
|
||||
virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
|
||||
virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
|
||||
|
||||
virtual bool PlayoutWarning() const;
|
||||
virtual bool PlayoutError() const;
|
||||
virtual bool RecordingWarning() const;
|
||||
virtual bool RecordingError() const;
|
||||
virtual void ClearPlayoutWarning();
|
||||
virtual void ClearPlayoutError();
|
||||
virtual void ClearRecordingWarning();
|
||||
virtual void ClearRecordingError();
|
||||
virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
|
||||
|
||||
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
|
||||
virtual bool PlayoutWarning() const;
|
||||
virtual bool PlayoutError() const;
|
||||
virtual bool RecordingWarning() const;
|
||||
virtual bool RecordingError() const;
|
||||
virtual void ClearPlayoutWarning();
|
||||
virtual void ClearPlayoutError();
|
||||
virtual void ClearRecordingWarning();
|
||||
virtual void ClearRecordingError();
|
||||
|
||||
virtual WebRtc_Word32 SetRecordingSampleRate(
|
||||
const WebRtc_UWord32 samplesPerSec);
|
||||
virtual WebRtc_Word32 SetPlayoutSampleRate(
|
||||
const WebRtc_UWord32 samplesPerSec);
|
||||
virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
|
||||
|
||||
virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
|
||||
virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
|
||||
virtual WebRtc_Word32 SetRecordingSampleRate(
|
||||
const WebRtc_UWord32 samplesPerSec);
|
||||
virtual WebRtc_Word32 SetPlayoutSampleRate(
|
||||
const WebRtc_UWord32 samplesPerSec);
|
||||
|
||||
private:
|
||||
// Lock
|
||||
void Lock()
|
||||
{
|
||||
_critSect.Enter();
|
||||
};
|
||||
void UnLock()
|
||||
{
|
||||
_critSect.Leave();
|
||||
};
|
||||
virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
|
||||
virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
|
||||
|
||||
// Init
|
||||
WebRtc_Word32 InitJavaResources();
|
||||
WebRtc_Word32 InitSampleRate();
|
||||
private:
|
||||
// Lock
|
||||
void Lock() {
|
||||
_critSect.Enter();
|
||||
};
|
||||
void UnLock() {
|
||||
_critSect.Leave();
|
||||
};
|
||||
|
||||
// Threads
|
||||
static bool RecThreadFunc(void*);
|
||||
static bool PlayThreadFunc(void*);
|
||||
bool RecThreadProcess();
|
||||
bool PlayThreadProcess();
|
||||
// Init
|
||||
WebRtc_Word32 InitJavaResources();
|
||||
WebRtc_Word32 InitSampleRate();
|
||||
|
||||
// Misc
|
||||
AudioDeviceBuffer* _ptrAudioBuffer;
|
||||
CriticalSectionWrapper& _critSect;
|
||||
WebRtc_Word32 _id;
|
||||
// Threads
|
||||
static bool RecThreadFunc(void*);
|
||||
static bool PlayThreadFunc(void*);
|
||||
bool RecThreadProcess();
|
||||
bool PlayThreadProcess();
|
||||
|
||||
// Events
|
||||
EventWrapper& _timeEventRec;
|
||||
EventWrapper& _timeEventPlay;
|
||||
EventWrapper& _recStartStopEvent;
|
||||
EventWrapper& _playStartStopEvent;
|
||||
// Misc
|
||||
AudioDeviceBuffer* _ptrAudioBuffer;
|
||||
CriticalSectionWrapper& _critSect;
|
||||
WebRtc_Word32 _id;
|
||||
|
||||
// Threads
|
||||
ThreadWrapper* _ptrThreadPlay;
|
||||
ThreadWrapper* _ptrThreadRec;
|
||||
WebRtc_UWord32 _recThreadID;
|
||||
WebRtc_UWord32 _playThreadID;
|
||||
bool _playThreadIsInitialized;
|
||||
bool _recThreadIsInitialized;
|
||||
bool _shutdownPlayThread;
|
||||
bool _shutdownRecThread;
|
||||
// Events
|
||||
EventWrapper& _timeEventRec;
|
||||
EventWrapper& _timeEventPlay;
|
||||
EventWrapper& _recStartStopEvent;
|
||||
EventWrapper& _playStartStopEvent;
|
||||
|
||||
// Rec buffer
|
||||
WebRtc_Word8 _recBuffer[2 * REC_BUF_SIZE_IN_SAMPLES];
|
||||
// Threads
|
||||
ThreadWrapper* _ptrThreadPlay;
|
||||
ThreadWrapper* _ptrThreadRec;
|
||||
WebRtc_UWord32 _recThreadID;
|
||||
WebRtc_UWord32 _playThreadID;
|
||||
bool _playThreadIsInitialized;
|
||||
bool _recThreadIsInitialized;
|
||||
bool _shutdownPlayThread;
|
||||
bool _shutdownRecThread;
|
||||
|
||||
// States
|
||||
bool _recordingDeviceIsSpecified;
|
||||
bool _playoutDeviceIsSpecified;
|
||||
bool _initialized;
|
||||
bool _recording;
|
||||
bool _playing;
|
||||
bool _recIsInitialized;
|
||||
bool _playIsInitialized;
|
||||
bool _micIsInitialized;
|
||||
bool _speakerIsInitialized;
|
||||
// Rec buffer
|
||||
WebRtc_Word8 _recBuffer[2 * REC_BUF_SIZE_IN_SAMPLES];
|
||||
|
||||
// Signal flags to threads
|
||||
bool _startRec;
|
||||
bool _stopRec;
|
||||
bool _startPlay;
|
||||
bool _stopPlay;
|
||||
// States
|
||||
bool _recordingDeviceIsSpecified;
|
||||
bool _playoutDeviceIsSpecified;
|
||||
bool _initialized;
|
||||
bool _recording;
|
||||
bool _playing;
|
||||
bool _recIsInitialized;
|
||||
bool _playIsInitialized;
|
||||
bool _micIsInitialized;
|
||||
bool _speakerIsInitialized;
|
||||
|
||||
// Warnings and errors
|
||||
WebRtc_UWord16 _playWarning;
|
||||
WebRtc_UWord16 _playError;
|
||||
WebRtc_UWord16 _recWarning;
|
||||
WebRtc_UWord16 _recError;
|
||||
// Signal flags to threads
|
||||
bool _startRec;
|
||||
bool _stopRec;
|
||||
bool _startPlay;
|
||||
bool _stopPlay;
|
||||
|
||||
// Delay
|
||||
WebRtc_UWord16 _delayPlayout;
|
||||
WebRtc_UWord16 _delayRecording;
|
||||
// Warnings and errors
|
||||
WebRtc_UWord16 _playWarning;
|
||||
WebRtc_UWord16 _playError;
|
||||
WebRtc_UWord16 _recWarning;
|
||||
WebRtc_UWord16 _recError;
|
||||
|
||||
// AGC state
|
||||
bool _AGC;
|
||||
// Delay
|
||||
WebRtc_UWord16 _delayPlayout;
|
||||
WebRtc_UWord16 _delayRecording;
|
||||
|
||||
// Stored device properties
|
||||
WebRtc_UWord16 _samplingFreqIn; // Sampling frequency for Mic
|
||||
WebRtc_UWord16 _samplingFreqOut; // Sampling frequency for Speaker
|
||||
WebRtc_UWord32 _maxSpeakerVolume; // The maximum speaker volume value
|
||||
bool _loudSpeakerOn;
|
||||
// Stores the desired audio source to use, set in SetRecordingDevice
|
||||
int _recAudioSource;
|
||||
// AGC state
|
||||
bool _AGC;
|
||||
|
||||
// JNI and Java
|
||||
JavaVM* _javaVM; // denotes a Java VM
|
||||
jobject _javaContext; // the application context
|
||||
// Stored device properties
|
||||
WebRtc_UWord16 _samplingFreqIn; // Sampling frequency for Mic
|
||||
WebRtc_UWord16 _samplingFreqOut; // Sampling frequency for Speaker
|
||||
WebRtc_UWord32 _maxSpeakerVolume; // The maximum speaker volume value
|
||||
bool _loudSpeakerOn;
|
||||
// Stores the desired audio source to use, set in SetRecordingDevice
|
||||
int _recAudioSource;
|
||||
|
||||
JNIEnv* _jniEnvPlay; // The JNI env for playout thread
|
||||
JNIEnv* _jniEnvRec; // The JNI env for recording thread
|
||||
// JNI and Java
|
||||
JavaVM* _javaVM; // denotes a Java VM
|
||||
jobject _javaContext; // the application context
|
||||
|
||||
jclass _javaScClass; // AudioDeviceAndroid class
|
||||
jobject _javaScObj; // AudioDeviceAndroid object
|
||||
JNIEnv* _jniEnvPlay; // The JNI env for playout thread
|
||||
JNIEnv* _jniEnvRec; // The JNI env for recording thread
|
||||
|
||||
// The play buffer field in AudioDeviceAndroid object (global ref)
|
||||
jobject _javaPlayBuffer;
|
||||
// The rec buffer field in AudioDeviceAndroid object (global ref)
|
||||
jobject _javaRecBuffer;
|
||||
void* _javaDirectPlayBuffer; // Direct buffer pointer to play buffer
|
||||
void* _javaDirectRecBuffer; // Direct buffer pointer to rec buffer
|
||||
jmethodID _javaMidPlayAudio; // Method ID of play in AudioDeviceAndroid
|
||||
jmethodID _javaMidRecAudio; // Method ID of rec in AudioDeviceAndroid
|
||||
jclass _javaScClass; // AudioDeviceAndroid class
|
||||
jobject _javaScObj; // AudioDeviceAndroid object
|
||||
|
||||
// The play buffer field in AudioDeviceAndroid object (global ref)
|
||||
jobject _javaPlayBuffer;
|
||||
// The rec buffer field in AudioDeviceAndroid object (global ref)
|
||||
jobject _javaRecBuffer;
|
||||
void* _javaDirectPlayBuffer; // Direct buffer pointer to play buffer
|
||||
void* _javaDirectRecBuffer; // Direct buffer pointer to rec buffer
|
||||
jmethodID _javaMidPlayAudio; // Method ID of play in AudioDeviceAndroid
|
||||
jmethodID _javaMidRecAudio; // Method ID of rec in AudioDeviceAndroid
|
||||
|
||||
// TODO(leozwang): Android holds only one JVM, all these jni handling
|
||||
// will be consolidated into a single place to make it consistant and
|
||||
// reliable. Chromium has a good example at base/android.
|
||||
static JavaVM* globalJvm;
|
||||
static JNIEnv* globalJNIEnv;
|
||||
static jobject globalSndContext;
|
||||
static jclass globalScClass;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -110,8 +110,6 @@
|
||||
'win/audio_device_utility_win.h',
|
||||
'win/audio_mixer_manager_win.cc',
|
||||
'win/audio_mixer_manager_win.h',
|
||||
'android/audio_device_android_opensles.cc',
|
||||
'android/audio_device_android_opensles.h',
|
||||
'android/audio_device_utility_android.cc',
|
||||
'android/audio_device_utility_android.h',
|
||||
],
|
||||
@ -123,6 +121,19 @@
|
||||
'-lOpenSLES',
|
||||
],
|
||||
},
|
||||
'conditions': [
|
||||
['enable_android_opensl==1', {
|
||||
'sources': [
|
||||
'android/audio_device_android_opensles.cc',
|
||||
'android/audio_device_android_opensles.h',
|
||||
],
|
||||
}, {
|
||||
'sources': [
|
||||
'android/audio_device_android_jni.cc',
|
||||
'android/audio_device_android_jni.h',
|
||||
],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
['OS=="linux"', {
|
||||
'defines': [
|
||||
|
@ -21,7 +21,8 @@ LOCAL_SRC_FILES := \
|
||||
src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java \
|
||||
src/org/webrtc/videoengine/ViEAndroidGLES20.java \
|
||||
src/org/webrtc/videoengine/ViERenderer.java \
|
||||
src/org/webrtc/videoengine/ViESurfaceRenderer.java
|
||||
src/org/webrtc/videoengine/ViESurfaceRenderer.java \
|
||||
src/org/webrtc/voiceengine/AudioDeviceAndroid.java
|
||||
|
||||
LOCAL_PACKAGE_NAME := webrtc-video-demo
|
||||
LOCAL_CERTIFICATE := platform
|
||||
|
@ -891,7 +891,7 @@ Notice for all the files in this folder.
|
||||
verbose="${verbose}"
|
||||
classpathref="project.javac.classpath"
|
||||
fork="${need.javac.fork}">
|
||||
<src path="${source.absolute.dir}:../../../../modules/video_capture/main/source/android/java:../../../../modules/video_render/main/source/android/java" />
|
||||
<src path="${source.absolute.dir}:../../../../modules/video_capture/main/source/android/java:../../../../modules/video_render/main/source/android/java:../../../../modules/audio_device/main/source/android" />
|
||||
<src path="${gen.absolute.dir}" />
|
||||
<compilerarg line="${java.compilerargs}" />
|
||||
</javac>
|
||||
|
@ -1103,9 +1103,11 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopInco
|
||||
*/
|
||||
JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create(
|
||||
JNIEnv *env,
|
||||
jobject)
|
||||
jobject context)
|
||||
{
|
||||
__android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create");
|
||||
__android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create VoiceEngine");
|
||||
|
||||
VoiceEngine::SetAndroidObjects(webrtcGlobalVM, env, context);
|
||||
|
||||
// Check if already created
|
||||
if (voeData.ve) {
|
||||
|
@ -24,7 +24,7 @@ int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
|
||||
webrtc::VideoEngine::SetAndroidObjects(javaVM, context);
|
||||
#ifndef WEBRTC_ANDROID_OPENSLES
|
||||
// voice engine calls into ADM directly
|
||||
webrtc::VoiceEngine::SetAndroidAudioDeviceObjects(javaVM, env, context);
|
||||
webrtc::VoiceEngine::SetAndroidObjects(javaVM, env, context);
|
||||
#endif
|
||||
|
||||
if (subTestSelection == 0) {
|
||||
|
@ -8,17 +8,13 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES)
|
||||
#include "modules/audio_device/main/source/android/audio_device_android_jni.h"
|
||||
#endif
|
||||
|
||||
#include "voice_engine_impl.h"
|
||||
#include "trace.h"
|
||||
|
||||
#ifdef WEBRTC_ANDROID
|
||||
extern "C"
|
||||
{
|
||||
extern WebRtc_Word32 SetAndroidAudioDeviceObjects(
|
||||
void* javaVM, void* env, void* context);
|
||||
} // extern "C"
|
||||
#endif
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
@ -149,11 +145,10 @@ bool VoiceEngine::Delete(VoiceEngine*& voiceEngine)
|
||||
int VoiceEngine::SetAndroidObjects(void* javaVM, void* env, void* context)
|
||||
{
|
||||
#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES)
|
||||
// modules/audio_device/main/source/android/audio_device_android_jni.cc
|
||||
// contains the actual implementation.
|
||||
return SetAndroidAudioDeviceObjects(javaVM, env, context);
|
||||
return AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
|
||||
javaVM, env, context);
|
||||
#else
|
||||
return -1;
|
||||
return -1;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user