APK for opensl loopback.

BUG=N/A
R=andrew@webrtc.org, fischman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2212004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4901 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
henrike@webrtc.org 2013-10-02 14:58:19 +00:00
parent de74b64184
commit 1fdc51ae2a
17 changed files with 697 additions and 13 deletions

View File

@ -43,9 +43,9 @@ class AudioManagerJni {
// SetAndroidAudioDeviceObjects.
static void ClearAndroidAudioDeviceObjects();
bool low_latency_supported() { return low_latency_supported_; }
int native_output_sample_rate() { return native_output_sample_rate_; }
int native_buffer_size() { return native_buffer_size_; }
bool low_latency_supported() const { return low_latency_supported_; }
int native_output_sample_rate() const { return native_output_sample_rate_; }
int native_buffer_size() const { return native_buffer_size_; }
private:
bool HasDeviceObjects();

View File

@ -17,7 +17,8 @@ namespace webrtc_opensl {
enum {
kDefaultSampleRate = 44100,
kNumChannels = 1
kNumChannels = 1,
kDefaultBufSizeInSamples = kDefaultSampleRate * 10 / 1000,
};

View File

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:versionCode="1" package="org.webrtc.app" android:versionName="1.07">
<application android:icon="@drawable/logo"
android:label="@string/app_name"
android:debuggable="true">
<activity android:name=".OpenSlDemo"
android:label="@string/app_name"
android:screenOrientation="landscape"
>
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
<action android:name="android.intent.action.HEADSET_PLUG"/>
</intent-filter>
</activity>
</application>
<uses-sdk android:minSdkVersion="14" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
</manifest>

View File

@ -0,0 +1,23 @@
This directory contains an app for measuring the total delay from the native
OpenSL implementation. Note that it just loops audio back from mic to speakers.
Prerequisites:
- Make sure gclient is checking out tools necessary to target Android: your
.gclient file should contain a line like:
target_os = ['android']
Make sure to re-run gclient sync after adding this to download the tools.
- Env vars need to be set up to target Android; easiest way to do this is to run
(from the libjingle trunk directory):
. ./build/android/envsetup.sh
Note that this clobbers any previously-set $GYP_DEFINES so it must be done
before the next item.
- Set up webrtc-related GYP variables:
export GYP_DEFINES="$GYP_DEFINES java_home=</path/to/JDK>
enable_android_opensl=1"
- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
Example of building & using the app:
cd <path/to/repository>/trunk
ninja -C out/Debug OpenSlDemo
adb install -r out/Debug/OpenSlDemo-debug.apk

View File

@ -0,0 +1,92 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="OpenSlDemo" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked into
Version Control Systems. -->
<property file="local.properties" />
<!-- The ant.properties file can be created by you. It is only edited by the
'android' tool to add properties to it.
This is the place to change some Ant specific build properties.
Here are some properties you may want to change/update:
source.dir
The name of the source directory. Default is 'src'.
out.dir
The name of the output directory. Default is 'bin'.
For other overridable properties, look at the beginning of the rules
files in the SDK, at tools/ant/build.xml
Properties related to the SDK location or the project target should
be updated using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your
application and should be checked into Version Control Systems.
-->
<property file="ant.properties" />
<!-- if sdk.dir was not set from one of the property file, then
get it from the ANDROID_SDK_ROOT env var.
This must be done before we load project.properties since
the proguard config can use sdk.dir -->
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
<isset property="env.ANDROID_SDK_ROOT" />
</condition>
<!-- The project.properties file is created and updated by the 'android'
tool, as well as ADT.
This contains project specific properties such as project target, and library
dependencies. Lower level build properties are stored in ant.properties
(or in .classpath for Eclipse projects).
This file is an integral part of the build system for your
application and should be checked into Version Control Systems. -->
<loadproperties srcFile="project.properties" />
<!-- quick check on sdk.dir -->
<fail
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
unless="sdk.dir"
/>
<!--
Import per project custom build rules if present at the root of the project.
This is the place to put custom intermediary targets such as:
-pre-build
-pre-compile
-post-compile (This is typically used for code obfuscation.
Compiled code location: ${out.classes.absolute.dir}
If this is not done in place, override ${out.dex.input.absolute.dir})
-post-package
-post-build
-pre-clean
-->
<import file="custom_rules.xml" optional="true" />
<!-- Import the actual build file.
To customize existing targets, there are two options:
- Customize only one target:
- copy/paste the target into this file, *before* the
<import> task.
- customize it to your needs.
- Customize the whole content of build.xml
- copy/paste the content of the rules files (minus the top node)
into this file, replacing the <import> task.
- customize to your needs.
***********************
****** IMPORTANT ******
***********************
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
in order to avoid having your file be overridden by tools such as "android update project"
-->
<!-- version-tag: 1 -->
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

View File

@ -0,0 +1,109 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/audio_device/android/test/fake_audio_device_buffer.h"
#include <assert.h>
#include "webrtc/modules/audio_device/android/opensles_common.h"
using webrtc_opensl::kDefaultBufSizeInSamples;
namespace webrtc {
FakeAudioDeviceBuffer::FakeAudioDeviceBuffer()
: fifo_(kNumBuffers),
next_available_buffer_(0),
record_channels_(0),
play_channels_(0) {
buf_.reset(new scoped_array<int8_t>[kNumBuffers]);
for (int i = 0; i < kNumBuffers; ++i) {
buf_[i].reset(new int8_t[buffer_size_bytes()]);
}
}
int32_t FakeAudioDeviceBuffer::SetRecordingSampleRate(uint32_t fsHz) {
assert(static_cast<int>(fsHz) == sample_rate());
return 0;
}
int32_t FakeAudioDeviceBuffer::SetPlayoutSampleRate(uint32_t fsHz) {
assert(static_cast<int>(fsHz) == sample_rate());
return 0;
}
int32_t FakeAudioDeviceBuffer::SetRecordingChannels(uint8_t channels) {
assert(channels > 0);
record_channels_ = channels;
assert((play_channels_ == 0) ||
(record_channels_ == play_channels_));
return 0;
}
int32_t FakeAudioDeviceBuffer::SetPlayoutChannels(uint8_t channels) {
assert(channels > 0);
play_channels_ = channels;
assert((record_channels_ == 0) ||
(record_channels_ == play_channels_));
return 0;
}
int32_t FakeAudioDeviceBuffer::SetRecordedBuffer(const void* audioBuffer,
uint32_t nSamples) {
assert(audioBuffer);
assert(fifo_.size() < fifo_.capacity());
assert(nSamples == kDefaultBufSizeInSamples);
int8_t* buffer = buf_[next_available_buffer_].get();
next_available_buffer_ = (next_available_buffer_ + 1) % kNumBuffers;
memcpy(buffer, audioBuffer, nSamples * sizeof(int16_t));
fifo_.Push(buffer);
return 0;
}
int32_t FakeAudioDeviceBuffer::RequestPlayoutData(uint32_t nSamples) {
assert(nSamples == kDefaultBufSizeInSamples);
return 0;
}
int32_t FakeAudioDeviceBuffer::GetPlayoutData(void* audioBuffer) {
assert(audioBuffer);
if (fifo_.size() < 1) {
// Playout silence until there is data available.
memset(audioBuffer, 0, buffer_size_bytes());
return buffer_size_samples();
}
int8_t* buffer = fifo_.Pop();
memcpy(audioBuffer, buffer, buffer_size_bytes());
return buffer_size_samples();
}
int FakeAudioDeviceBuffer::sample_rate() const {
return audio_manager_.low_latency_supported() ?
audio_manager_.native_output_sample_rate() :
webrtc_opensl::kDefaultSampleRate;
}
int FakeAudioDeviceBuffer::buffer_size_samples() const {
return sample_rate() * 10 / 1000;
}
int FakeAudioDeviceBuffer::buffer_size_bytes() const {
return buffer_size_samples() * webrtc_opensl::kNumChannels * sizeof(int16_t);
}
void FakeAudioDeviceBuffer::ClearBuffer() {
while (fifo_.size() != 0) {
fifo_.Pop();
}
next_available_buffer_ = 0;
}
} // namespace webrtc

View File

@ -0,0 +1,67 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_FAKE_AUDIO_DEVICE_BUFFER_H_
#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_FAKE_AUDIO_DEVICE_BUFFER_H_
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
#include "webrtc/modules/audio_device/audio_device_buffer.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
namespace webrtc {
// Fake AudioDeviceBuffer implementation that returns audio data that is pushed
// to it. It implements all APIs used by the OpenSL implementation.
class FakeAudioDeviceBuffer : public AudioDeviceBuffer {
public:
FakeAudioDeviceBuffer();
virtual ~FakeAudioDeviceBuffer() {}
virtual int32_t SetRecordingSampleRate(uint32_t fsHz);
virtual int32_t SetPlayoutSampleRate(uint32_t fsHz);
virtual int32_t SetRecordingChannels(uint8_t channels);
virtual int32_t SetPlayoutChannels(uint8_t channels);
virtual int32_t SetRecordedBuffer(const void* audioBuffer,
uint32_t nSamples);
virtual void SetVQEData(int playDelayMS,
int recDelayMS,
int clockDrift) {}
virtual int32_t DeliverRecordedData() { return 0; }
virtual int32_t RequestPlayoutData(uint32_t nSamples);
virtual int32_t GetPlayoutData(void* audioBuffer);
void ClearBuffer();
private:
enum {
// Each buffer contains 10 ms of data since that is what OpenSlesInput
// delivers. Keep 7 buffers which would cover 70 ms of data. These buffers
// are needed because of jitter between OpenSl recording and playing.
kNumBuffers = 7,
};
int sample_rate() const;
int buffer_size_samples() const;
int buffer_size_bytes() const;
// Java API handle
AudioManagerJni audio_manager_;
SingleRwFifo fifo_;
scoped_array<scoped_array<int8_t> > buf_;
int next_available_buffer_;
uint8_t record_channels_;
uint8_t play_channels_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_FAKE_AUDIO_DEVICE_BUFFER_H_

View File

@ -0,0 +1,104 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/audio_device/android/test/jni/opensl_runner.h"
#include <assert.h>
#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
// Java globals
static JavaVM* g_vm = NULL;
static jclass g_osr = NULL;
// Global class implementing native code.
static webrtc::OpenSlRunner* g_runner = NULL;
jint JNI_OnLoad(JavaVM* vm, void* reserved) {
// Only called once.
assert(!g_vm);
JNIEnv* env;
if (vm->GetEnv(reinterpret_cast<void**>(&env), JNI_VERSION_1_6) != JNI_OK) {
return -1;
}
jclass local_osr = env->FindClass("org/webrtc/app/OpenSlRunner");
assert(local_osr != NULL);
g_osr = static_cast<jclass>(env->NewGlobalRef(local_osr));
JNINativeMethod nativeFunctions[] = {
{"RegisterApplicationContext", "(Landroid/content/Context;)V",
reinterpret_cast<void*>(
&webrtc::OpenSlRunner::RegisterApplicationContext)},
{"Start", "()V", reinterpret_cast<void*>(&webrtc::OpenSlRunner::Start)},
{"Stop", "()V", reinterpret_cast<void*>(&webrtc::OpenSlRunner::Stop)}
};
int ret_val = env->RegisterNatives(g_osr, nativeFunctions, 3);
if (ret_val != 0) {
assert(false);
}
g_vm = vm;
return JNI_VERSION_1_6;
}
namespace webrtc {
OpenSlRunner::OpenSlRunner()
: output_(0),
input_(0, &output_) {
output_.AttachAudioBuffer(&audio_buffer_);
if (output_.Init() != 0) {
assert(false);
}
if (output_.InitPlayout() != 0) {
assert(false);
}
input_.AttachAudioBuffer(&audio_buffer_);
if (input_.Init() != 0) {
assert(false);
}
if (input_.InitRecording() != 0) {
assert(false);
}
}
void OpenSlRunner::StartPlayRecord() {
output_.StartPlayout();
input_.StartRecording();
}
void OpenSlRunner::StopPlayRecord() {
// There are large enough buffers to compensate for recording and playing
// jitter such that the timing of stopping playing or recording should not
// result in over or underrun.
input_.StopRecording();
output_.StopPlayout();
audio_buffer_.ClearBuffer();
}
JNIEXPORT void JNICALL OpenSlRunner::RegisterApplicationContext(
JNIEnv * env,
jobject,
jobject context) {
assert(!g_runner); // Should only be called once.
AudioManagerJni::SetAndroidAudioDeviceObjects(g_vm, env, context);
// Might as well create the global instance since everything is set up at this
// point.
g_runner = new webrtc::OpenSlRunner();
}
JNIEXPORT void JNICALL OpenSlRunner::Start(JNIEnv * env, jobject) {
g_runner->StartPlayRecord();
}
JNIEXPORT void JNICALL OpenSlRunner::Stop(JNIEnv * env, jobject) {
g_runner->StopPlayRecord();
}
} // namespace webrtc

View File

@ -0,0 +1,47 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include "webrtc/modules/audio_device/android/test/fake_audio_device_buffer.h"
#include "webrtc/modules/audio_device/android/opensles_input.h"
#include "webrtc/modules/audio_device/android/opensles_output.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_JNI_OPENSL_RUNNER_H_
#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_JNI_OPENSL_RUNNER_H_
namespace webrtc {
class FakeAudioDeviceBuffer;
class OpenSlRunner {
public:
OpenSlRunner();
~OpenSlRunner() {}
void StartPlayRecord();
void StopPlayRecord();
static JNIEXPORT void JNICALL RegisterApplicationContext(JNIEnv * env,
jobject,
jobject context);
static JNIEXPORT void JNICALL Start(JNIEnv * env, jobject);
static JNIEXPORT void JNICALL Stop(JNIEnv * env, jobject);
private:
OpenSlesOutput output_;
OpenSlesInput input_;
FakeAudioDeviceBuffer audio_buffer_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_JNI_OPENSL_RUNNER_H_

View File

@ -0,0 +1,14 @@
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system edit
# "ant.properties", and override values to adapt the script to your
# project structure.
#
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
# Project target.
target=android-17

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

View File

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:gravity="bottom">
<TextView android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
android:layout_gravity="top"
android:text="About: This application, when started, loops back audio as quickly as the native OpenSL implementation allows. Just starting it will lead to a feedback loop. It can be used to measure delay with the proper hardware. Using it as is has little utility." />
<Button android:id="@+id/btStartStopCall"
android:layout_width="100dip"
android:layout_height="wrap_content"
android:text="@string/startCall"
android:layout_gravity="center"/>
<Button android:id="@+id/btExit"
android:layout_width="100dip"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:text="@string/exit"/>
</LinearLayout >

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">WebRTCOpenSLLoopback</string>
<string name="startCall">StartCall</string>
<string name="stopCall">StopCall</string>
<string name="exit">Exit</string>
</resources>

View File

@ -0,0 +1,91 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.app;
import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.media.AudioManager;
import android.os.Bundle;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import android.util.Log;
import android.view.View;
import android.widget.Button;
public class OpenSlDemo extends Activity implements View.OnClickListener {
private static final String TAG = "WEBRTC";
private Button btStartStopCall;
private boolean isRunning = false;
private WakeLock wakeLock;
private OpenSlRunner runner;
// Called when activity is created.
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
PowerManager pm = (PowerManager)this.getSystemService(
Context.POWER_SERVICE);
wakeLock = pm.newWakeLock(
PowerManager.SCREEN_DIM_WAKE_LOCK, TAG);
wakeLock.acquire(); // Keep screen on until app terminates.
setContentView(R.layout.open_sl_demo);
// Direct hardware volume controls to affect the voice call audio stream.
setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
btStartStopCall = (Button) findViewById(R.id.btStartStopCall);
btStartStopCall.setOnClickListener(this);
findViewById(R.id.btExit).setOnClickListener(this);
runner = new OpenSlRunner();
// Native code calls back into JVM to be able to configure OpenSL to low
// latency mode. Provide the context needed to do this.
runner.RegisterApplicationContext(getApplicationContext());
}
// Called before activity is destroyed.
@Override
public void onDestroy() {
Log.d(TAG, "onDestroy");
wakeLock.release();
super.onDestroy();
}
private void startOrStop() {
if (isRunning) {
runner.Stop();
btStartStopCall.setText(R.string.startCall);
isRunning = false;
} else if (!isRunning){
runner.Start();
btStartStopCall.setText(R.string.stopCall);
isRunning = true;
}
}
public void onClick(View arg0) {
switch (arg0.getId()) {
case R.id.btStartStopCall:
startOrStop();
break;
case R.id.btExit:
finish();
break;
}
}
}

View File

@ -0,0 +1,24 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.app;
import android.content.Context;
public class OpenSlRunner {
public OpenSlRunner() {
System.loadLibrary("opensl-demo-jni");
}
public static native void RegisterApplicationContext(Context context);
public static native void Start();
public static native void Stop();
}

View File

@ -253,6 +253,66 @@
},
],
}],
['OS=="android" and enable_android_opensl==1', {
'targets': [
{
'target_name': 'libopensl-demo-jni',
'type': 'loadable_module',
'dependencies': [
'audio_device',
],
'sources': [
'android/test/jni/opensl_runner.cc',
'android/test/fake_audio_device_buffer.cc',
],
'link_settings': {
'libraries': [
'-llog',
'-lOpenSLES',
],
},
},
{
'target_name': 'OpenSlDemo',
'type': 'none',
'dependencies': [
'libopensl-demo-jni',
'<(modules_java_gyp_path):*',
],
'actions': [
{
# TODO(henrik): Convert building of the demo to a proper GYP
# target so this action is not needed once chromium's
# apk-building machinery can be used. (crbug.com/225101)
'action_name': 'build_opensldemo_apk',
'variables': {
'android_opensl_demo_root': '<(webrtc_root)/modules/audio_device/android/test',
},
'inputs' : [
'<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
'<(PRODUCT_DIR)/libopensl-demo-jni.so',
'<!@(find <(android_opensl_demo_root)/src -name "*.java")',
'<!@(find <(android_opensl_demo_root)/res -name "*.xml")',
'<!@(find <(android_opensl_demo_root)/res -name "*.png")',
'<(android_opensl_demo_root)/AndroidManifest.xml',
'<(android_opensl_demo_root)/build.xml',
'<(android_opensl_demo_root)/project.properties',
],
'outputs': ['<(PRODUCT_DIR)/OpenSlDemo-debug.apk'],
'action': ['bash', '-ec',
'rm -f <(_outputs) && '
'mkdir -p <(android_opensl_demo_root)/libs/<(android_app_abi) && '
'<(android_strip) -o <(android_opensl_demo_root)/libs/<(android_app_abi)/libopensl-demo-jni.so <(PRODUCT_DIR)/libopensl-demo-jni.so && '
'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_opensl_demo_root)/libs/ &&'
'cd <(android_opensl_demo_root) && '
'ant debug && '
'cd - && '
'cp <(android_opensl_demo_root)/bin/OpenSlDemo-debug.apk <(_outputs)'
],
},
],
}],
}],
['OS=="android" and enable_android_opensl==1', {
'targets': [
{

View File

@ -36,13 +36,13 @@ public:
int32_t InitPlayout();
int32_t InitRecording();
int32_t SetRecordingSampleRate(uint32_t fsHz);
int32_t SetPlayoutSampleRate(uint32_t fsHz);
virtual int32_t SetRecordingSampleRate(uint32_t fsHz);
virtual int32_t SetPlayoutSampleRate(uint32_t fsHz);
int32_t RecordingSampleRate() const;
int32_t PlayoutSampleRate() const;
int32_t SetRecordingChannels(uint8_t channels);
int32_t SetPlayoutChannels(uint8_t channels);
virtual int32_t SetRecordingChannels(uint8_t channels);
virtual int32_t SetPlayoutChannels(uint8_t channels);
uint8_t RecordingChannels() const;
uint8_t PlayoutChannels() const;
int32_t SetRecordingChannel(
@ -50,12 +50,13 @@ public:
int32_t RecordingChannel(
AudioDeviceModule::ChannelType& channel) const;
int32_t SetRecordedBuffer(const void* audioBuffer, uint32_t nSamples);
virtual int32_t SetRecordedBuffer(const void* audioBuffer,
uint32_t nSamples);
int32_t SetCurrentMicLevel(uint32_t level);
void SetVQEData(int playDelayMS,
int recDelayMS,
int clockDrift);
int32_t DeliverRecordedData();
virtual void SetVQEData(int playDelayMS,
int recDelayMS,
int clockDrift);
virtual int32_t DeliverRecordedData();
uint32_t NewMicLevel() const;
virtual int32_t RequestPlayoutData(uint32_t nSamples);