AppRTCDemo (Android): built-in AEC should be enabled if device supports it and in combination with Java-based audio layer

BUG=4034
R=andrew@webrtc.org, perkj@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/32179004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7849 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
henrika@webrtc.org 2014-12-09 16:22:09 +00:00
parent 19dd129c69
commit a954c07ee1
18 changed files with 250 additions and 15 deletions

View File

@ -198,6 +198,8 @@ class FakeAudioCaptureModule
virtual int32_t ResetAudioDevice() OVERRIDE;
virtual int32_t SetLoudspeakerStatus(bool enable) OVERRIDE;
virtual int32_t GetLoudspeakerStatus(bool* enabled) const OVERRIDE;
virtual bool BuiltInAECIsAvailable() const { return false; }
virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
// End of functions inherited from webrtc::AudioDeviceModule.
// The following function is inherited from rtc::MessageHandler.

View File

@ -1,6 +1,6 @@
/*
* libjingle
* Copyright 2013, Google Inc.
* Copyright 2014, Google Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:

View File

@ -836,6 +836,7 @@ class FakeWebRtcVoiceEngine
}
WEBRTC_STUB(EnableBuiltInAEC, (bool enable));
virtual bool BuiltInAECIsEnabled() const { return true; }
virtual bool BuiltInAECIsAvailable() const { return false; }
// webrtc::VoENetEqStats
WEBRTC_STUB(GetNetworkStatistics, (int, webrtc::NetworkStatistics&));

View File

@ -811,8 +811,23 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
webrtc::VoEAudioProcessing* voep = voe_wrapper_->processing();
bool echo_cancellation;
bool echo_cancellation = false;
if (options.echo_cancellation.Get(&echo_cancellation)) {
// Check if platform supports built-in EC. Currently only supported on
// Android and in combination with Java based audio layer.
// TODO(henrika): investigate possibility to support built-in EC also
// in combination with Open SL ES audio.
const bool built_in_aec = voe_wrapper_->hw()->BuiltInAECIsAvailable();
if (built_in_aec) {
// Set mode of built-in EC according to the audio options.
voe_wrapper_->hw()->EnableBuiltInAEC(echo_cancellation);
if (echo_cancellation) {
// Disable internal software EC if device has its own built-in EC,
// i.e., replace the software EC with the built-in EC.
options.echo_cancellation.Set(false);
LOG(LS_INFO) << "Disabling EC since built-in EC will be used instead";
}
}
if (voep->SetEcStatus(echo_cancellation, ec_mode) == -1) {
LOG_RTCERR2(SetEcStatus, echo_cancellation, ec_mode);
return false;

View File

@ -405,6 +405,14 @@ class AudioDeviceTemplate : public AudioDeviceGeneric {
return output_.GetLoudspeakerStatus(enable);
}
bool BuiltInAECIsAvailable() const {
return input_.BuiltInAECIsAvailable();
}
int32_t EnableBuiltInAEC(bool enable) {
return input_.EnableBuiltInAEC(enable);
}
private:
OutputType output_;
InputType input_;

View File

@ -23,7 +23,6 @@
#include "webrtc/modules/audio_device/android/audio_common.h"
#include "webrtc/modules/audio_device/audio_device_config.h"
#include "webrtc/modules/audio_device/audio_device_utility.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/thread_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
@ -360,7 +359,6 @@ int32_t AudioRecordJni::RecordingIsAvailable(bool& available) { // NOLINT
int32_t AudioRecordJni::InitRecording() {
CriticalSectionScoped lock(&_critSect);
if (!_initialized)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
@ -799,6 +797,84 @@ int32_t AudioRecordJni::SetRecordingSampleRate(const uint32_t samplesPerSec) {
return 0;
}
bool AudioRecordJni::BuiltInAECIsAvailable() const {
assert(_javaVM);
JNIEnv* env = NULL;
bool isAttached = false;
// Get the JNI env for this thread
if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
jint res = _javaVM->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
return false;
}
isAttached = true;
}
// Get method ID for BuiltInAECIsAvailable
jmethodID builtInAECIsAvailable = env->GetStaticMethodID(
_javaScClass, "BuiltInAECIsAvailable", "()Z");
if (builtInAECIsAvailable == NULL) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"%s: Unable to get BuiltInAECIsAvailable ID", __FUNCTION__);
return false;
}
// Call the static BuiltInAECIsAvailable method
jboolean hw_aec = env->CallStaticBooleanMethod(_javaScClass,
builtInAECIsAvailable);
// Detach this thread if it was attached
if (isAttached) {
if (_javaVM->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
return hw_aec;
}
int32_t AudioRecordJni::EnableBuiltInAEC(bool enable) {
assert(_javaVM);
jint res = 0;
JNIEnv* env = NULL;
bool isAttached = false;
// Get the JNI env for this thread
if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
res = _javaVM->AttachCurrentThread(&env, NULL);
if ((res < 0) || !env) {
return false;
}
isAttached = true;
}
// Get method ID for EnableBuiltInAEC "(argument-types)return-type"
jmethodID enableBuiltInAEC = env->GetMethodID(_javaScClass,
"EnableBuiltInAEC",
"(Z)I");
// Call the EnableBuiltInAEC method
res = env->CallIntMethod(_javaScObj, enableBuiltInAEC, enable);
if (res < 0) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
"EnableBuiltInAEC failed (%d)", res);
}
// Detach this thread if it was attached
if (isAttached) {
if (_javaVM->DetachCurrentThread() < 0) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"%s: Could not detach thread from JVM", __FUNCTION__);
}
}
return res;
}
int32_t AudioRecordJni::InitJavaResources() {
// todo: Check if we already have created the java object
_javaVM = globalJvm;

View File

@ -109,6 +109,9 @@ class AudioRecordJni {
int32_t SetRecordingSampleRate(const uint32_t samplesPerSec);
bool BuiltInAECIsAvailable() const;
int32_t EnableBuiltInAEC(bool enable);
private:
void Lock() EXCLUSIVE_LOCK_FUNCTION(_critSect) {
_critSect.Enter();

View File

@ -15,9 +15,13 @@ import java.util.concurrent.locks.ReentrantLock;
import android.content.Context;
import android.media.AudioFormat;
import android.media.audiofx.AcousticEchoCanceler;
import android.media.audiofx.AudioEffect;
import android.media.audiofx.AudioEffect.Descriptor;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.MediaRecorder.AudioSource;
import android.os.Build;
import android.util.Log;
class WebRtcAudioRecord {
@ -35,6 +39,13 @@ class WebRtcAudioRecord {
private int _bufferedRecSamples = 0;
private AcousticEchoCanceler _aec = null;
private boolean _useBuiltInAEC = false;
private static boolean runningOnJellyBeanOrHigher() {
return Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN;
}
WebRtcAudioRecord() {
try {
_recBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48
@ -46,8 +57,42 @@ class WebRtcAudioRecord {
_tempBufRec = new byte[2 * 480];
}
public static boolean BuiltInAECIsAvailable() {
// AcousticEchoCanceler was added in API level 16 (Jelly Bean).
if (!runningOnJellyBeanOrHigher()) {
return false;
}
// TODO(henrika): add black-list based on device name. We could also
// use uuid to exclude devices but that would require a session ID from
// an existing AudioRecord object.
return AcousticEchoCanceler.isAvailable();
}
private int EnableBuiltInAEC(boolean enable) {
DoLog("EnableBuiltInAEC(" + enable + ')');
// AcousticEchoCanceler was added in API level 16 (Jelly Bean).
if (!runningOnJellyBeanOrHigher()) {
return -1;
}
_useBuiltInAEC = enable;
// Set AEC state if AEC has already been created.
if (_aec != null) {
int ret = _aec.setEnabled(enable);
if (ret != AudioEffect.SUCCESS) {
DoLogErr("AcousticEchoCanceler.setEnabled failed");
return -1;
}
DoLog("AcousticEchoCanceler.getEnabled: " + _aec.getEnabled());
}
return 0;
}
@SuppressWarnings("unused")
private int InitRecording(int audioSource, int sampleRate) {
DoLog("InitRecording");
audioSource = AudioSource.VOICE_COMMUNICATION;
// get the minimum buffer size that can be used
int minRecBufSize = AudioRecord.getMinBufferSize(
@ -64,6 +109,11 @@ class WebRtcAudioRecord {
_bufferedRecSamples = sampleRate / 200;
// DoLog("rough rec delay set to " + _bufferedRecSamples);
if (_aec != null) {
_aec.release();
_aec = null;
}
// release the object
if (_audioRecord != null) {
_audioRecord.release();
@ -91,11 +141,36 @@ class WebRtcAudioRecord {
// DoLog("rec sample rate set to " + sampleRate);
DoLog("AcousticEchoCanceler.isAvailable: " + BuiltInAECIsAvailable());
if (!BuiltInAECIsAvailable()) {
return _bufferedRecSamples;
}
_aec = AcousticEchoCanceler.create(_audioRecord.getAudioSessionId());
if (_aec == null) {
DoLogErr("AcousticEchoCanceler.create failed");
return -1;
}
int ret = _aec.setEnabled(_useBuiltInAEC);
if (ret != AudioEffect.SUCCESS) {
DoLogErr("AcousticEchoCanceler.setEnabled failed");
return -1;
}
Descriptor descriptor = _aec.getDescriptor();
DoLog("AcousticEchoCanceler " +
"name: " + descriptor.name + ", " +
"implementor: " + descriptor.implementor + ", " +
"uuid: " + descriptor.uuid);
DoLog("AcousticEchoCanceler.getEnabled: " + _aec.getEnabled());
return _bufferedRecSamples;
}
@SuppressWarnings("unused")
private int StartRecording() {
DoLog("StartRecording");
// start recording
try {
_audioRecord.startRecording();
@ -111,6 +186,7 @@ class WebRtcAudioRecord {
@SuppressWarnings("unused")
private int StopRecording() {
DoLog("StopRecording");
_recLock.lock();
try {
// only stop if we are recording
@ -125,7 +201,13 @@ class WebRtcAudioRecord {
}
}
// release the object
// Release the AEC object.
if (_aec != null) {
_aec.release();
_aec = null;
}
// Release the AudioRecord object.
_audioRecord.release();
_audioRecord = null;
@ -185,7 +267,7 @@ class WebRtcAudioRecord {
return _bufferedRecSamples;
}
final String logTag = "WebRTC AD java";
final String logTag = "WebRtcAudioRecord-Java";
private void DoLog(String msg) {
Log.d(logTag, msg);

View File

@ -118,6 +118,10 @@ class OpenSlesInput {
// Attach audio buffer
void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
// Built-in AEC is only supported in combination with Java/AudioRecord.
bool BuiltInAECIsAvailable() const { return false; }
int32_t EnableBuiltInAEC(bool enable) { return -1; }
private:
enum {
kNumInterfaces = 2,

View File

@ -58,10 +58,16 @@ int32_t AudioDeviceGeneric::SoundDeviceControl(unsigned int par1,
return -1;
}
bool AudioDeviceGeneric::BuiltInAECIsAvailable() const {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
"Built-in AEC not supported on this platform");
return false;
}
int32_t AudioDeviceGeneric::EnableBuiltInAEC(bool enable)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
"Windows AEC not supported on this platform");
"Built-in AEC not supported on this platform");
return -1;
}

View File

@ -155,8 +155,13 @@ class AudioDeviceGeneric
unsigned int par3 = 0,
unsigned int par4 = 0);
// Windows Core Audio only.
// Android only
virtual bool BuiltInAECIsAvailable() const;
// Windows Core Audio and Android only.
virtual int32_t EnableBuiltInAEC(bool enable);
// Windows Core Audio only.
virtual bool BuiltInAECIsEnabled() const;
public:

View File

@ -1978,9 +1978,8 @@ int32_t AudioDeviceModuleImpl::GetLoudspeakerStatus(bool* enabled) const
int32_t AudioDeviceModuleImpl::EnableBuiltInAEC(bool enable)
{
CHECK_INITIALIZED();
return _ptrAudioDevice->EnableBuiltInAEC(enable);
CHECK_INITIALIZED();
return _ptrAudioDevice->EnableBuiltInAEC(enable);
}
bool AudioDeviceModuleImpl::BuiltInAECIsEnabled() const
@ -1990,6 +1989,11 @@ bool AudioDeviceModuleImpl::BuiltInAECIsEnabled() const
return _ptrAudioDevice->BuiltInAECIsEnabled();
}
bool AudioDeviceModuleImpl::BuiltInAECIsAvailable() const {
CHECK_INITIALIZED_BOOL();
return _ptrAudioDevice->BuiltInAECIsAvailable();
}
// ============================================================================
// Private Methods
// ============================================================================

View File

@ -199,6 +199,8 @@ public:
virtual int32_t SetLoudspeakerStatus(bool enable) OVERRIDE;
virtual int32_t GetLoudspeakerStatus(bool* enabled) const OVERRIDE;
virtual bool BuiltInAECIsAvailable() const OVERRIDE;
virtual int32_t EnableBuiltInAEC(bool enable) OVERRIDE;
virtual bool BuiltInAECIsEnabled() const OVERRIDE;

View File

@ -182,15 +182,20 @@ class AudioDeviceModule : public RefCountedModule {
virtual int32_t SetLoudspeakerStatus(bool enable) = 0;
virtual int32_t GetLoudspeakerStatus(bool* enabled) const = 0;
// *Experimental - not recommended for use.*
// Enables the Windows Core Audio built-in AEC. Fails on other platforms.
// Only supported on Android.
virtual bool BuiltInAECIsAvailable() const = 0;
// Enables the built-in AEC. Only supported on Windows and Android.
//
// For usage on Windows (requires Core Audio):
// Must be called before InitRecording(). When enabled:
// 1. StartPlayout() must be called before StartRecording().
// 2. StopRecording() should be called before StopPlayout().
// The reverse order may cause garbage audio to be rendered or the
// capture side to halt until StopRecording() is called.
virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
virtual int32_t EnableBuiltInAEC(bool enable) = 0;
// Don't use.
virtual bool BuiltInAECIsEnabled() const { return false; }
protected:

View File

@ -144,6 +144,7 @@ class FakeAudioDeviceModule : public AudioDeviceModule {
virtual int32_t ResetAudioDevice() { return 0; }
virtual int32_t SetLoudspeakerStatus(bool enable) { return 0; }
virtual int32_t GetLoudspeakerStatus(bool* enabled) const { return 0; }
virtual bool BuiltInAECIsAvailable() const { return false; }
virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
virtual bool BuiltInAECIsEnabled() const { return false; }
};

View File

@ -89,8 +89,10 @@ public:
virtual int SetPlayoutSampleRate(unsigned int samples_per_sec) = 0;
virtual int PlayoutSampleRate(unsigned int* samples_per_sec) const = 0;
virtual bool BuiltInAECIsAvailable() const = 0;
virtual int EnableBuiltInAEC(bool enable) = 0;
// To be removed. Don't use.
virtual int EnableBuiltInAEC(bool enable) { return -1; }
virtual bool BuiltInAECIsEnabled() const { return false; }
virtual int GetRecordingDeviceStatus(bool& isAvailable) { return -1; }
virtual int GetPlayoutDeviceStatus(bool& isAvailable) { return -1; }

View File

@ -568,6 +568,22 @@ int VoEHardwareImpl::PlayoutSampleRate(unsigned int* samples_per_sec) const {
return _shared->audio_device()->PlayoutSampleRate(samples_per_sec);
}
bool VoEHardwareImpl::BuiltInAECIsAvailable() const {
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return false;
}
return _shared->audio_device()->BuiltInAECIsAvailable();
}
int VoEHardwareImpl::EnableBuiltInAEC(bool enable) {
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return false;
}
return _shared->audio_device()->EnableBuiltInAEC(enable);
}
#endif // WEBRTC_VOICE_ENGINE_HARDWARE_API
} // namespace webrtc

View File

@ -48,6 +48,9 @@ public:
virtual int SetPlayoutSampleRate(unsigned int samples_per_sec);
virtual int PlayoutSampleRate(unsigned int* samples_per_sec) const;
virtual bool BuiltInAECIsAvailable() const;
virtual int EnableBuiltInAEC(bool enable);
protected:
VoEHardwareImpl(voe::SharedData* shared);
virtual ~VoEHardwareImpl();