diff --git a/webrtc/modules/audio_device/android/audio_manager_jni.h b/webrtc/modules/audio_device/android/audio_manager_jni.h
index 6f85e72df..298890e16 100644
--- a/webrtc/modules/audio_device/android/audio_manager_jni.h
+++ b/webrtc/modules/audio_device/android/audio_manager_jni.h
@@ -43,9 +43,9 @@ class AudioManagerJni {
// SetAndroidAudioDeviceObjects.
static void ClearAndroidAudioDeviceObjects();
- bool low_latency_supported() { return low_latency_supported_; }
- int native_output_sample_rate() { return native_output_sample_rate_; }
- int native_buffer_size() { return native_buffer_size_; }
+ bool low_latency_supported() const { return low_latency_supported_; }
+ int native_output_sample_rate() const { return native_output_sample_rate_; }
+ int native_buffer_size() const { return native_buffer_size_; }
private:
bool HasDeviceObjects();
diff --git a/webrtc/modules/audio_device/android/opensles_common.h b/webrtc/modules/audio_device/android/opensles_common.h
index e15217184..15fa9efbe 100644
--- a/webrtc/modules/audio_device/android/opensles_common.h
+++ b/webrtc/modules/audio_device/android/opensles_common.h
@@ -17,7 +17,8 @@ namespace webrtc_opensl {
enum {
kDefaultSampleRate = 44100,
- kNumChannels = 1
+ kNumChannels = 1,
+ kDefaultBufSizeInSamples = kDefaultSampleRate * 10 / 1000,
};
diff --git a/webrtc/modules/audio_device/android/test/AndroidManifest.xml b/webrtc/modules/audio_device/android/test/AndroidManifest.xml
new file mode 100644
index 000000000..3d32a7afc
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/AndroidManifest.xml
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/webrtc/modules/audio_device/android/test/README b/webrtc/modules/audio_device/android/test/README
new file mode 100644
index 000000000..59f6de9db
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/README
@@ -0,0 +1,23 @@
+This directory contains an app for measuring the total delay from the native
+OpenSL implementation. Note that it just loops audio back from mic to speakers.
+
+Prerequisites:
+- Make sure gclient is checking out tools necessary to target Android: your
+ .gclient file should contain a line like:
+ target_os = ['android']
+ Make sure to re-run gclient sync after adding this to download the tools.
+- Env vars need to be set up to target Android; easiest way to do this is to run
+ (from the libjingle trunk directory):
+ . ./build/android/envsetup.sh
+ Note that this clobbers any previously-set $GYP_DEFINES so it must be done
+ before the next item.
+- Set up webrtc-related GYP variables:
+ export GYP_DEFINES="$GYP_DEFINES java_home=
+ enable_android_opensl=1"
+- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
+
+Example of building & using the app:
+
+cd /trunk
+ninja -C out/Debug OpenSlDemo
+adb install -r out/Debug/OpenSlDemo-debug.apk
\ No newline at end of file
diff --git a/webrtc/modules/audio_device/android/test/build.xml b/webrtc/modules/audio_device/android/test/build.xml
new file mode 100644
index 000000000..b6e033a6a
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/build.xml
@@ -0,0 +1,92 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/webrtc/modules/audio_device/android/test/fake_audio_device_buffer.cc b/webrtc/modules/audio_device/android/test/fake_audio_device_buffer.cc
new file mode 100644
index 000000000..c636ee694
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/fake_audio_device_buffer.cc
@@ -0,0 +1,109 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_device/android/test/fake_audio_device_buffer.h"
+
+#include
+
+#include "webrtc/modules/audio_device/android/opensles_common.h"
+
+using webrtc_opensl::kDefaultBufSizeInSamples;
+
+namespace webrtc {
+
+FakeAudioDeviceBuffer::FakeAudioDeviceBuffer()
+ : fifo_(kNumBuffers),
+ next_available_buffer_(0),
+ record_channels_(0),
+ play_channels_(0) {
+ buf_.reset(new scoped_array[kNumBuffers]);
+ for (int i = 0; i < kNumBuffers; ++i) {
+ buf_[i].reset(new int8_t[buffer_size_bytes()]);
+ }
+}
+
+int32_t FakeAudioDeviceBuffer::SetRecordingSampleRate(uint32_t fsHz) {
+ assert(static_cast(fsHz) == sample_rate());
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetPlayoutSampleRate(uint32_t fsHz) {
+ assert(static_cast(fsHz) == sample_rate());
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetRecordingChannels(uint8_t channels) {
+ assert(channels > 0);
+ record_channels_ = channels;
+ assert((play_channels_ == 0) ||
+ (record_channels_ == play_channels_));
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetPlayoutChannels(uint8_t channels) {
+ assert(channels > 0);
+ play_channels_ = channels;
+ assert((record_channels_ == 0) ||
+ (record_channels_ == play_channels_));
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::SetRecordedBuffer(const void* audioBuffer,
+ uint32_t nSamples) {
+ assert(audioBuffer);
+ assert(fifo_.size() < fifo_.capacity());
+ assert(nSamples == kDefaultBufSizeInSamples);
+ int8_t* buffer = buf_[next_available_buffer_].get();
+ next_available_buffer_ = (next_available_buffer_ + 1) % kNumBuffers;
+ memcpy(buffer, audioBuffer, nSamples * sizeof(int16_t));
+ fifo_.Push(buffer);
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::RequestPlayoutData(uint32_t nSamples) {
+ assert(nSamples == kDefaultBufSizeInSamples);
+ return 0;
+}
+
+int32_t FakeAudioDeviceBuffer::GetPlayoutData(void* audioBuffer) {
+ assert(audioBuffer);
+ if (fifo_.size() < 1) {
+ // Playout silence until there is data available.
+ memset(audioBuffer, 0, buffer_size_bytes());
+ return buffer_size_samples();
+ }
+ int8_t* buffer = fifo_.Pop();
+ memcpy(audioBuffer, buffer, buffer_size_bytes());
+ return buffer_size_samples();
+}
+
+int FakeAudioDeviceBuffer::sample_rate() const {
+ return audio_manager_.low_latency_supported() ?
+ audio_manager_.native_output_sample_rate() :
+ webrtc_opensl::kDefaultSampleRate;
+}
+
+int FakeAudioDeviceBuffer::buffer_size_samples() const {
+ return sample_rate() * 10 / 1000;
+}
+
+int FakeAudioDeviceBuffer::buffer_size_bytes() const {
+ return buffer_size_samples() * webrtc_opensl::kNumChannels * sizeof(int16_t);
+}
+
+
+void FakeAudioDeviceBuffer::ClearBuffer() {
+ while (fifo_.size() != 0) {
+ fifo_.Pop();
+ }
+ next_available_buffer_ = 0;
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_device/android/test/fake_audio_device_buffer.h b/webrtc/modules/audio_device/android/test/fake_audio_device_buffer.h
new file mode 100644
index 000000000..9372e2940
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/fake_audio_device_buffer.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_FAKE_AUDIO_DEVICE_BUFFER_H_
+#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_FAKE_AUDIO_DEVICE_BUFFER_H_
+
+#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
+#include "webrtc/modules/audio_device/android/single_rw_fifo.h"
+#include "webrtc/modules/audio_device/audio_device_buffer.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+// Fake AudioDeviceBuffer implementation that returns audio data that is pushed
+// to it. It implements all APIs used by the OpenSL implementation.
+class FakeAudioDeviceBuffer : public AudioDeviceBuffer {
+ public:
+ FakeAudioDeviceBuffer();
+ virtual ~FakeAudioDeviceBuffer() {}
+
+ virtual int32_t SetRecordingSampleRate(uint32_t fsHz);
+ virtual int32_t SetPlayoutSampleRate(uint32_t fsHz);
+ virtual int32_t SetRecordingChannels(uint8_t channels);
+ virtual int32_t SetPlayoutChannels(uint8_t channels);
+ virtual int32_t SetRecordedBuffer(const void* audioBuffer,
+ uint32_t nSamples);
+ virtual void SetVQEData(int playDelayMS,
+ int recDelayMS,
+ int clockDrift) {}
+ virtual int32_t DeliverRecordedData() { return 0; }
+ virtual int32_t RequestPlayoutData(uint32_t nSamples);
+ virtual int32_t GetPlayoutData(void* audioBuffer);
+
+ void ClearBuffer();
+
+ private:
+ enum {
+ // Each buffer contains 10 ms of data since that is what OpenSlesInput
+ // delivers. Keep 7 buffers which would cover 70 ms of data. These buffers
+ // are needed because of jitter between OpenSl recording and playing.
+ kNumBuffers = 7,
+ };
+ int sample_rate() const;
+ int buffer_size_samples() const;
+ int buffer_size_bytes() const;
+
+ // Java API handle
+ AudioManagerJni audio_manager_;
+
+ SingleRwFifo fifo_;
+ scoped_array > buf_;
+ int next_available_buffer_;
+
+ uint8_t record_channels_;
+ uint8_t play_channels_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_FAKE_AUDIO_DEVICE_BUFFER_H_
diff --git a/webrtc/modules/audio_device/android/test/jni/opensl_runner.cc b/webrtc/modules/audio_device/android/test/jni/opensl_runner.cc
new file mode 100644
index 000000000..ba801a1fd
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/jni/opensl_runner.cc
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/modules/audio_device/android/test/jni/opensl_runner.h"
+
+#include
+
+#include "webrtc/modules/audio_device/android/audio_manager_jni.h"
+
+// Java globals
+static JavaVM* g_vm = NULL;
+static jclass g_osr = NULL;
+
+// Global class implementing native code.
+static webrtc::OpenSlRunner* g_runner = NULL;
+
+jint JNI_OnLoad(JavaVM* vm, void* reserved) {
+ // Only called once.
+ assert(!g_vm);
+ JNIEnv* env;
+ if (vm->GetEnv(reinterpret_cast(&env), JNI_VERSION_1_6) != JNI_OK) {
+ return -1;
+ }
+
+ jclass local_osr = env->FindClass("org/webrtc/app/OpenSlRunner");
+ assert(local_osr != NULL);
+ g_osr = static_cast(env->NewGlobalRef(local_osr));
+ JNINativeMethod nativeFunctions[] = {
+ {"RegisterApplicationContext", "(Landroid/content/Context;)V",
+ reinterpret_cast(
+ &webrtc::OpenSlRunner::RegisterApplicationContext)},
+ {"Start", "()V", reinterpret_cast(&webrtc::OpenSlRunner::Start)},
+ {"Stop", "()V", reinterpret_cast(&webrtc::OpenSlRunner::Stop)}
+ };
+ int ret_val = env->RegisterNatives(g_osr, nativeFunctions, 3);
+ if (ret_val != 0) {
+ assert(false);
+ }
+ g_vm = vm;
+ return JNI_VERSION_1_6;
+}
+
+namespace webrtc {
+
+OpenSlRunner::OpenSlRunner()
+ : output_(0),
+ input_(0, &output_) {
+ output_.AttachAudioBuffer(&audio_buffer_);
+ if (output_.Init() != 0) {
+ assert(false);
+ }
+ if (output_.InitPlayout() != 0) {
+ assert(false);
+ }
+ input_.AttachAudioBuffer(&audio_buffer_);
+ if (input_.Init() != 0) {
+ assert(false);
+ }
+ if (input_.InitRecording() != 0) {
+ assert(false);
+ }
+}
+
+void OpenSlRunner::StartPlayRecord() {
+ output_.StartPlayout();
+ input_.StartRecording();
+}
+
+void OpenSlRunner::StopPlayRecord() {
+ // There are large enough buffers to compensate for recording and playing
+ // jitter such that the timing of stopping playing or recording should not
+ // result in over or underrun.
+ input_.StopRecording();
+ output_.StopPlayout();
+ audio_buffer_.ClearBuffer();
+}
+
+JNIEXPORT void JNICALL OpenSlRunner::RegisterApplicationContext(
+ JNIEnv * env,
+ jobject,
+ jobject context) {
+ assert(!g_runner); // Should only be called once.
+ AudioManagerJni::SetAndroidAudioDeviceObjects(g_vm, env, context);
+ // Might as well create the global instance since everything is set up at this
+ // point.
+ g_runner = new webrtc::OpenSlRunner();
+}
+
+JNIEXPORT void JNICALL OpenSlRunner::Start(JNIEnv * env, jobject) {
+ g_runner->StartPlayRecord();
+}
+
+JNIEXPORT void JNICALL OpenSlRunner::Stop(JNIEnv * env, jobject) {
+ g_runner->StopPlayRecord();
+}
+
+} // namespace webrtc
diff --git a/webrtc/modules/audio_device/android/test/jni/opensl_runner.h b/webrtc/modules/audio_device/android/test/jni/opensl_runner.h
new file mode 100644
index 000000000..5d4f867c6
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/jni/opensl_runner.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include
+
+#include "webrtc/modules/audio_device/android/test/fake_audio_device_buffer.h"
+#include "webrtc/modules/audio_device/android/opensles_input.h"
+#include "webrtc/modules/audio_device/android/opensles_output.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+
+#ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_JNI_OPENSL_RUNNER_H_
+#define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_JNI_OPENSL_RUNNER_H_
+
+namespace webrtc {
+
+class FakeAudioDeviceBuffer;
+
+class OpenSlRunner {
+ public:
+ OpenSlRunner();
+ ~OpenSlRunner() {}
+
+ void StartPlayRecord();
+ void StopPlayRecord();
+
+ static JNIEXPORT void JNICALL RegisterApplicationContext(JNIEnv * env,
+ jobject,
+ jobject context);
+ static JNIEXPORT void JNICALL Start(JNIEnv * env, jobject);
+ static JNIEXPORT void JNICALL Stop(JNIEnv * env, jobject);
+
+ private:
+ OpenSlesOutput output_;
+ OpenSlesInput input_;
+ FakeAudioDeviceBuffer audio_buffer_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_JNI_OPENSL_RUNNER_H_
diff --git a/webrtc/modules/audio_device/android/test/project.properties b/webrtc/modules/audio_device/android/test/project.properties
new file mode 100644
index 000000000..a3ee5ab64
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/project.properties
@@ -0,0 +1,14 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system edit
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+#
+# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
+#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
+
+# Project target.
+target=android-17
diff --git a/webrtc/modules/audio_device/android/test/res/drawable/logo.png b/webrtc/modules/audio_device/android/test/res/drawable/logo.png
new file mode 100644
index 000000000..a07c69fa5
Binary files /dev/null and b/webrtc/modules/audio_device/android/test/res/drawable/logo.png differ
diff --git a/webrtc/modules/audio_device/android/test/res/layout/open_sl_demo.xml b/webrtc/modules/audio_device/android/test/res/layout/open_sl_demo.xml
new file mode 100644
index 000000000..1efad7336
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/res/layout/open_sl_demo.xml
@@ -0,0 +1,22 @@
+
+
+
+
+
+
diff --git a/webrtc/modules/audio_device/android/test/res/values/strings.xml b/webrtc/modules/audio_device/android/test/res/values/strings.xml
new file mode 100644
index 000000000..f51980624
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/res/values/strings.xml
@@ -0,0 +1,7 @@
+
+
+ WebRTCOpenSLLoopback
+ StartCall
+ StopCall
+ Exit
+
diff --git a/webrtc/modules/audio_device/android/test/src/org/webrtc/app/OpenSlDemo.java b/webrtc/modules/audio_device/android/test/src/org/webrtc/app/OpenSlDemo.java
new file mode 100644
index 000000000..8060c998d
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/src/org/webrtc/app/OpenSlDemo.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.app;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.pm.ActivityInfo;
+import android.media.AudioManager;
+import android.os.Bundle;
+import android.os.PowerManager;
+import android.os.PowerManager.WakeLock;
+import android.util.Log;
+import android.view.View;
+import android.widget.Button;
+
+public class OpenSlDemo extends Activity implements View.OnClickListener {
+ private static final String TAG = "WEBRTC";
+
+ private Button btStartStopCall;
+ private boolean isRunning = false;
+
+ private WakeLock wakeLock;
+
+ private OpenSlRunner runner;
+
+ // Called when activity is created.
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ PowerManager pm = (PowerManager)this.getSystemService(
+ Context.POWER_SERVICE);
+ wakeLock = pm.newWakeLock(
+ PowerManager.SCREEN_DIM_WAKE_LOCK, TAG);
+ wakeLock.acquire(); // Keep screen on until app terminates.
+
+ setContentView(R.layout.open_sl_demo);
+
+ // Direct hardware volume controls to affect the voice call audio stream.
+ setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
+
+ btStartStopCall = (Button) findViewById(R.id.btStartStopCall);
+ btStartStopCall.setOnClickListener(this);
+ findViewById(R.id.btExit).setOnClickListener(this);
+
+ runner = new OpenSlRunner();
+ // Native code calls back into JVM to be able to configure OpenSL to low
+ // latency mode. Provide the context needed to do this.
+ runner.RegisterApplicationContext(getApplicationContext());
+ }
+
+ // Called before activity is destroyed.
+ @Override
+ public void onDestroy() {
+ Log.d(TAG, "onDestroy");
+ wakeLock.release();
+ super.onDestroy();
+ }
+
+ private void startOrStop() {
+ if (isRunning) {
+ runner.Stop();
+ btStartStopCall.setText(R.string.startCall);
+ isRunning = false;
+ } else if (!isRunning){
+ runner.Start();
+ btStartStopCall.setText(R.string.stopCall);
+ isRunning = true;
+ }
+ }
+
+ public void onClick(View arg0) {
+ switch (arg0.getId()) {
+ case R.id.btStartStopCall:
+ startOrStop();
+ break;
+ case R.id.btExit:
+ finish();
+ break;
+ }
+ }
+
+}
diff --git a/webrtc/modules/audio_device/android/test/src/org/webrtc/app/OpenSlRunner.java b/webrtc/modules/audio_device/android/test/src/org/webrtc/app/OpenSlRunner.java
new file mode 100644
index 000000000..489cb55bb
--- /dev/null
+++ b/webrtc/modules/audio_device/android/test/src/org/webrtc/app/OpenSlRunner.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.app;
+
+import android.content.Context;
+
+public class OpenSlRunner {
+ public OpenSlRunner() {
+ System.loadLibrary("opensl-demo-jni");
+ }
+
+ public static native void RegisterApplicationContext(Context context);
+ public static native void Start();
+ public static native void Stop();
+
+}
\ No newline at end of file
diff --git a/webrtc/modules/audio_device/audio_device.gypi b/webrtc/modules/audio_device/audio_device.gypi
index 3df12eb76..a56c81d14 100644
--- a/webrtc/modules/audio_device/audio_device.gypi
+++ b/webrtc/modules/audio_device/audio_device.gypi
@@ -253,6 +253,66 @@
},
],
}],
+ ['OS=="android" and enable_android_opensl==1', {
+ 'targets': [
+ {
+ 'target_name': 'libopensl-demo-jni',
+ 'type': 'loadable_module',
+ 'dependencies': [
+ 'audio_device',
+ ],
+ 'sources': [
+ 'android/test/jni/opensl_runner.cc',
+ 'android/test/fake_audio_device_buffer.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-llog',
+ '-lOpenSLES',
+ ],
+ },
+ },
+ {
+ 'target_name': 'OpenSlDemo',
+ 'type': 'none',
+ 'dependencies': [
+ 'libopensl-demo-jni',
+ '<(modules_java_gyp_path):*',
+ ],
+ 'actions': [
+ {
+ # TODO(henrik): Convert building of the demo to a proper GYP
+ # target so this action is not needed once chromium's
+ # apk-building machinery can be used. (crbug.com/225101)
+ 'action_name': 'build_opensldemo_apk',
+ 'variables': {
+ 'android_opensl_demo_root': '<(webrtc_root)/modules/audio_device/android/test',
+ },
+ 'inputs' : [
+ '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
+ '<(PRODUCT_DIR)/libopensl-demo-jni.so',
+ '