diff --git a/all.gyp b/all.gyp
index 99ca3abe1..27cc65003 100644
--- a/all.gyp
+++ b/all.gyp
@@ -18,6 +18,13 @@
'talk/libjingle_examples.gyp:*',
'talk/libjingle_tests.gyp:*',
],
+ 'conditions': [
+ ['OS=="android"', {
+ 'dependencies': [
+ 'webrtc/webrtc_examples.gyp:*',
+ ],
+ }],
+ ],
},
],
}
diff --git a/webrtc/video_engine/test/android/OWNERS b/webrtc/examples/android/OWNERS
similarity index 100%
rename from webrtc/video_engine/test/android/OWNERS
rename to webrtc/examples/android/OWNERS
diff --git a/webrtc/video_engine/test/android/AndroidManifest.xml b/webrtc/examples/android/media_demo/AndroidManifest.xml
similarity index 80%
rename from webrtc/video_engine/test/android/AndroidManifest.xml
rename to webrtc/examples/android/media_demo/AndroidManifest.xml
index 39a3c515e..62bf46076 100644
--- a/webrtc/video_engine/test/android/AndroidManifest.xml
+++ b/webrtc/examples/android/media_demo/AndroidManifest.xml
@@ -1,11 +1,12 @@
+ android:versionCode="1" package="org.webrtc.webrtcdemo" android:versionName="1.07">
+ android:label="@string/appName"
+ android:debuggable="true">
@@ -25,4 +26,4 @@
-
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/README b/webrtc/examples/android/media_demo/README
new file mode 100644
index 000000000..af8872151
--- /dev/null
+++ b/webrtc/examples/android/media_demo/README
@@ -0,0 +1,24 @@
+This directory contains a sample app for sending and receiving video and audio
+on Android. It further lets you enable and disable some call quality
+enhancements such as echo cancellation, noise suppression etc.
+
+Prerequisites:
+- Make sure gclient is checking out tools necessary to target Android: your
+ .gclient file should contain a line like:
+ target_os = ['android']
+ Make sure to re-run gclient sync after adding this to download the tools.
+- Env vars need to be set up to target Android; easiest way to do this is to run
+ (from the libjingle trunk directory):
+ . ./build/android/envsetup.sh
+ Note that this clobbers any previously-set $GYP_DEFINES so it must be done
+ before the next item.
+- Set up webrtc-related GYP variables:
+ export GYP_DEFINES="$GYP_DEFINES java_home="
+- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
+
+Example of building the app:
+cd /trunk
+ninja -C out/Debug WebRTCDemo
+
+It can then be installed and run on the device:
+adb install -r out/Debug/WebRTCDemo-debug.apk
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/build.xml b/webrtc/examples/android/media_demo/build.xml
new file mode 100644
index 000000000..c8a51dd5f
--- /dev/null
+++ b/webrtc/examples/android/media_demo/build.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.cc b/webrtc/examples/android/media_demo/jni/jni_helpers.cc
new file mode 100644
index 000000000..d7e326766
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/jni_helpers.cc
@@ -0,0 +1,82 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+
+#include
+
+#include "third_party/icu/source/common/unicode/unistr.h"
+
+using icu::UnicodeString;
+
+jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
+ const char* signature) {
+ jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
+ CHECK_EXCEPTION(jni, "error during GetMethodID");
+ return m;
+}
+
+jlong jlongFromPointer(void* ptr) {
+ CHECK(sizeof(intptr_t) <= sizeof(jlong), "Time to rethink the use of jlongs");
+ // Going through intptr_t to be obvious about the definedness of the
+ // conversion from pointer to integral type. intptr_t to jlong is a standard
+ // widening by the COMPILE_ASSERT above.
+ jlong ret = reinterpret_cast(ptr);
+ CHECK(reinterpret_cast(ret) == ptr,
+ "jlong does not convert back to pointer");
+ return ret;
+}
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
+ const jchar* jchars = jni->GetStringChars(j_string, NULL);
+ CHECK_EXCEPTION(jni, "Error during GetStringChars");
+ UnicodeString ustr(jchars, jni->GetStringLength(j_string));
+ CHECK_EXCEPTION(jni, "Error during GetStringLength");
+ jni->ReleaseStringChars(j_string, jchars);
+ CHECK_EXCEPTION(jni, "Error during ReleaseStringChars");
+ std::string ret;
+ return ustr.toUTF8String(ret);
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni, const char** classes,
+ int size) {
+ for (int i = 0; i < size; ++i) {
+ LoadClass(jni, classes[i]);
+ }
+}
+ClassReferenceHolder::~ClassReferenceHolder() {
+ CHECK(classes_.empty(), "Must call FreeReferences() before dtor!");
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+ for (std::map::const_iterator it = classes_.begin();
+ it != classes_.end(); ++it) {
+ jni->DeleteGlobalRef(it->second);
+ }
+ classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+ std::map::iterator it = classes_.find(name);
+ CHECK(it != classes_.end(), "Could not find class");
+ return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+ jclass localRef = jni->FindClass(name.c_str());
+ CHECK_EXCEPTION(jni, "Could not load class");
+ CHECK(localRef, name.c_str());
+ jclass globalRef = reinterpret_cast(jni->NewGlobalRef(localRef));
+ CHECK_EXCEPTION(jni, "error during NewGlobalRef");
+ CHECK(globalRef, name.c_str());
+ bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+ CHECK(inserted, "Duplicate class name");
+}
diff --git a/webrtc/examples/android/media_demo/jni/jni_helpers.h b/webrtc/examples/android/media_demo/jni/jni_helpers.h
new file mode 100644
index 000000000..25706db4b
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/jni_helpers.h
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
+#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
+
+// TODO(henrike): this file contains duplication with regards to
+// talk/app/webrtc/java/jni/peerconnection_jni.cc. When/if code can be shared
+// between trunk/talk and trunk/webrtc remove the duplication.
+
+#include
+#include
+
+#include
+#include
+
+#define TAG "WEBRTC-NATIVE"
+
+// Abort the process if |x| is false, emitting |msg| to logcat.
+#define CHECK(x, msg) \
+ if (x) { \
+ } else { \
+ __android_log_print(ANDROID_LOG_ERROR, TAG, "%s:%d: %s", __FILE__, \
+ __LINE__, msg); \
+ abort(); \
+ }
+
+// Abort the process if |jni| has a Java exception pending, emitting |msg| to
+// logcat.
+#define CHECK_EXCEPTION(jni, msg) \
+ if (0) { \
+ } else { \
+ if (jni->ExceptionCheck()) { \
+ jni->ExceptionDescribe(); \
+ jni->ExceptionClear(); \
+ CHECK(0, msg); \
+ } \
+ }
+
+#define ARRAYSIZE(instance) \
+ static_cast(sizeof(instance) / sizeof(instance[0]))
+
+// JNIEnv-helper methods that CHECK success: no Java exception thrown and found
+// object/class/method/field is non-null.
+jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
+ const char* signature);
+
+// Return a |jlong| that will automatically convert back to |ptr| when assigned
+// to a |uint64|
+jlong jlongFromPointer(void* ptr);
+
+// Given a (UTF-16) jstring return a new UTF-8 native string.
+std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
+
+// Android's FindClass() is trickier than usual because the app-specific
+// ClassLoader is not consulted when there is no app-specific frame on the
+// stack. Consequently, we only look up classes once in JNI_OnLoad.
+// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
+class ClassReferenceHolder {
+ public:
+ ClassReferenceHolder(JNIEnv* jni, const char** classes, int size);
+ ~ClassReferenceHolder();
+
+ void FreeReferences(JNIEnv* jni);
+
+ jclass GetClass(const std::string& name);
+
+ private:
+ void LoadClass(JNIEnv* jni, const std::string& name);
+
+ std::map classes_;
+};
+
+#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
diff --git a/webrtc/examples/android/media_demo/jni/media_codec_video_decoder.cc b/webrtc/examples/android/media_demo/jni/media_codec_video_decoder.cc
new file mode 100644
index 000000000..15eb2d720
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/media_codec_video_decoder.cc
@@ -0,0 +1,81 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "webrtc/examples/android/media_demo/jni/media_codec_video_decoder.h"
+
+#include
+
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+#include "webrtc/modules/utility/interface/helpers_android.h"
+
+namespace webrtc {
+
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(JavaVM* vm, jobject decoder)
+ : vm_(vm), decoder_(NULL), j_start_(NULL), j_push_buffer_(NULL) {
+ AttachThreadScoped ats(vm_);
+ JNIEnv* jni = ats.env();
+ // Make sure that the decoder is not recycled.
+ decoder_ = jni->NewGlobalRef(decoder);
+
+ // Get all function IDs.
+ jclass decoderClass = jni->GetObjectClass(decoder);
+ j_push_buffer_ =
+ jni->GetMethodID(decoderClass, "pushBuffer", "(Ljava/nio/ByteBuffer;J)V");
+ j_start_ = jni->GetMethodID(decoderClass, "start", "(II)Z");
+}
+
+MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
+ AttachThreadScoped ats(vm_);
+ JNIEnv* jni = ats.env();
+ jni->DeleteGlobalRef(decoder_);
+}
+
+int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* codecSettings,
+ int32_t numberOfCores) {
+ AttachThreadScoped ats(vm_);
+ JNIEnv* jni = ats.env();
+ if (!jni->CallBooleanMethod(decoder_, j_start_, codecSettings->width,
+ codecSettings->height)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Decode(
+ const EncodedImage& inputImage, bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo, int64_t renderTimeMs) {
+
+ AttachThreadScoped ats(vm_);
+ JNIEnv* jni = ats.env();
+ jobject byteBuffer =
+ jni->NewDirectByteBuffer(inputImage._buffer, inputImage._length);
+ jni->CallVoidMethod(decoder_, j_push_buffer_, byteBuffer, renderTimeMs);
+ jni->DeleteLocalRef(byteBuffer);
+ return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
+}
+
+int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback) {
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Release() {
+ // TODO(hellner): this maps nicely to MediaCodecVideoDecoder::dispose().
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t MediaCodecVideoDecoder::Reset() {
+ // TODO(hellner): implement. MediaCodec::stop() followed by
+ // MediaCodec::start()?
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+} // namespace webrtc
diff --git a/webrtc/examples/android/media_demo/jni/media_codec_video_decoder.h b/webrtc/examples/android/media_demo/jni/media_codec_video_decoder.h
new file mode 100644
index 000000000..0aafc0bf1
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/media_codec_video_decoder.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_MEDIA_CODEC_VIDEO_DECODER_H_
+#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_MEDIA_CODEC_VIDEO_DECODER_H_
+
+#include
+
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+
+namespace webrtc {
+
+class MediaCodecVideoDecoder : public VideoDecoder {
+ public:
+ MediaCodecVideoDecoder(JavaVM* vm, jobject decoder);
+ virtual ~MediaCodecVideoDecoder();
+
+ virtual int32_t InitDecode(const VideoCodec* codecSettings,
+ int32_t numberOfCores);
+
+ virtual int32_t Decode(const EncodedImage& inputImage, bool missingFrames,
+ const RTPFragmentationHeader* fragmentation,
+ const CodecSpecificInfo* codecSpecificInfo,
+ int64_t renderTimeMs);
+
+ virtual int32_t RegisterDecodeCompleteCallback(
+ DecodedImageCallback* callback);
+
+ virtual int32_t Release();
+
+ virtual int32_t Reset();
+
+ virtual int32_t SetCodecConfigParameters(const uint8_t* /*buffer*/,
+ int32_t /*size*/) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ virtual VideoDecoder* Copy() {
+ CHECK(0, "Not implemented");
+ return NULL;
+ }
+
+ private:
+ JavaVM* vm_;
+ // Global reference to a (Java) MediaCodecVideoDecoder object.
+ jobject decoder_;
+ jmethodID j_start_;
+ jmethodID j_push_buffer_;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_MEDIA_CODEC_VIDEO_DECODER_H_
diff --git a/webrtc/examples/android/media_demo/jni/on_load.cc b/webrtc/examples/android/media_demo/jni/on_load.cc
new file mode 100644
index 000000000..27a2394b3
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/on_load.cc
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include
+
+#include
+
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+#include "webrtc/examples/android/media_demo/jni/video_engine_jni.h"
+#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
+#include "webrtc/video_engine/include/vie_base.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+
+// Macro for native functions that can be found by way of jni-auto discovery.
+// Note extern "C" is needed for "discovery" of native methods to work.
+#define JOWW(rettype, name) \
+ extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
+
+static JavaVM* g_vm = NULL;
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
+ // Only called once.
+ CHECK(!g_vm, "OnLoad called more than once");
+ g_vm = vm;
+ return JNI_VERSION_1_4;
+}
+
+JOWW(void, NativeWebRtcContextRegistry_register)(
+ JNIEnv* jni,
+ jclass,
+ jobject context) {
+ webrtc_examples::SetVoeDeviceObjects(g_vm);
+ webrtc_examples::SetVieDeviceObjects(g_vm);
+ CHECK(webrtc::VideoEngine::SetAndroidObjects(g_vm) == 0,
+ "Failed to register android objects to video engine");
+ CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, jni, context) == 0,
+ "Failed to register android objects to voice engine");
+}
+
+JOWW(void, NativeWebRtcContextRegistry_unRegister)(
+ JNIEnv* jni,
+ jclass) {
+ CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL, NULL) == 0,
+ "Failed to unregister android objects from voice engine");
+ webrtc_examples::ClearVieDeviceObjects();
+ webrtc_examples::ClearVoeDeviceObjects();
+}
diff --git a/webrtc/examples/android/media_demo/jni/video_engine_jni.cc b/webrtc/examples/android/media_demo/jni/video_engine_jni.cc
new file mode 100644
index 000000000..712e17866
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/video_engine_jni.cc
@@ -0,0 +1,711 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains JNI for the video engine interfaces.
+// The native functions are found using jni's auto discovery.
+
+#include "webrtc/examples/android/media_demo/jni/video_engine_jni.h"
+
+#include
+#include
+
+#include "webrtc/common_types.h"
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+#include "webrtc/examples/android/media_demo/jni/media_codec_video_decoder.h"
+#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
+#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/video_engine/include/vie_base.h"
+#include "webrtc/video_engine/include/vie_capture.h"
+#include "webrtc/video_engine/include/vie_codec.h"
+#include "webrtc/video_engine/include/vie_external_codec.h"
+#include "webrtc/video_engine/include/vie_network.h"
+#include "webrtc/video_engine/include/vie_render.h"
+#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
+
+// Macro for native functions that can be found by way of jni-auto discovery.
+// Note extern "C" is needed for "discovery" of native methods to work.
+#define JOWW(rettype, name) \
+ extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
+
+namespace {
+
+static JavaVM* g_vm = NULL;
+static ClassReferenceHolder* g_class_reference_holder = NULL;
+
+jclass GetClass(const char* name) {
+ CHECK(g_class_reference_holder, "Class reference holder NULL");
+ return g_class_reference_holder->GetClass(name);
+}
+
+// C(++) description of a camera. This class is created by Java native calls
+// and associated with the CameraDesc Java class. The Java class is used in the
+// Java code but it is just a thin wrapper of the C(++) class that contain the
+// actual information. The information is stored in C(++) as it is used to
+// call video engine APIs.
+struct CameraDesc {
+ // The name and id corresponds to ViECapture's |device_nameUTF8| and
+ // |unique_idUTF8|.
+ char name[64];
+ char unique_id[64];
+};
+
+// C++ callback class that can be used to register for callbacks from the
+// video engine. It further propagates the callbacks to
+// VideoDecodeEncodeObserver.java interface. The memory associated with this
+// class is managed globally by the VideoEngineData class when registering and
+// unregistering VideoDecodeEncodeObserver.java to receive callbacks.
+class VideoDecodeEncodeObserver : public webrtc::ViEDecoderObserver,
+ public webrtc::ViEEncoderObserver {
+ public:
+ explicit VideoDecodeEncodeObserver(jobject j_observer)
+ : j_observer_(j_observer) {
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ jclass j_observer_class = jni->GetObjectClass(j_observer_);
+ incoming_rate_ =
+ GetMethodID(jni, j_observer_class, "incomingRate", "(III)V");
+ incoming_codec_changed_ =
+ GetMethodID(jni, j_observer_class, "incomingCodecChanged",
+ "(ILorg/webrtc/webrtcdemo/VideoCodecInst;)V");
+ request_new_keyframe_ =
+ GetMethodID(jni, j_observer_class, "requestNewKeyFrame", "(I)V");
+ outgoing_rate_ =
+ GetMethodID(jni, j_observer_class, "outgoingRate", "(III)V");
+ j_observer_ = jni->NewGlobalRef(j_observer_);
+ }
+
+ ~VideoDecodeEncodeObserver() {
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ jni->DeleteGlobalRef(j_observer_);
+ }
+
+ virtual void IncomingRate(const int video_channel,
+ const unsigned int framerate,
+ const unsigned int bitrate) {
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ jni->CallVoidMethod(j_observer_, incoming_rate_, video_channel,
+ static_cast(framerate), static_cast(bitrate));
+ }
+
+ virtual void DecoderTiming(int decode_ms, int max_decode_ms,
+ int current_delay_ms, int target_delay_ms,
+ int jitter_buffer_ms, int min_playout_delay_ms,
+ int render_delay_ms) {
+ // TODO(fischman): consider plumbing this through to Java.
+ }
+
+ virtual void IncomingCodecChanged(const int video_channel,
+ const webrtc::VideoCodec& video_codec) {
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ webrtc::VideoCodec* codec = new webrtc::VideoCodec(video_codec);
+ jclass j_codec_class =
+ GetClass("org/webrtc/webrtcdemo/VideoCodecInst");
+ jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "", "(J)V");
+ jobject j_codec =
+ jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
+ CHECK_EXCEPTION(jni, "error during NewObject");
+ jni->CallVoidMethod(j_observer_, incoming_codec_changed_, video_channel,
+ j_codec);
+ }
+
+ virtual void RequestNewKeyFrame(const int video_channel) {
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ jni->CallVoidMethod(j_observer_, request_new_keyframe_, video_channel);
+ }
+
+ virtual void OutgoingRate(const int video_channel,
+ const unsigned int framerate,
+ const unsigned int bitrate) {
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ jni->CallVoidMethod(j_observer_, outgoing_rate_, video_channel,
+ static_cast(framerate), static_cast(bitrate));
+ }
+
+ virtual void SuspendChange(int video_channel, bool is_suspended) {}
+
+ private:
+ jobject j_observer_;
+ jmethodID incoming_rate_;
+ jmethodID incoming_codec_changed_;
+ jmethodID request_new_keyframe_;
+ jmethodID outgoing_rate_;
+};
+
+template
+void ReleaseSubApi(T instance) {
+ CHECK(instance->Release() == 0, "failed to release instance")
+}
+
+class VideoEngineData {
+ public:
+ VideoEngineData()
+ : vie(webrtc::VideoEngine::Create()),
+ base(webrtc::ViEBase::GetInterface(vie)),
+ codec(webrtc::ViECodec::GetInterface(vie)),
+ network(webrtc::ViENetwork::GetInterface(vie)),
+ rtp(webrtc::ViERTP_RTCP::GetInterface(vie)),
+ render(webrtc::ViERender::GetInterface(vie)),
+ capture(webrtc::ViECapture::GetInterface(vie)),
+ externalCodec(webrtc::ViEExternalCodec::GetInterface(vie)) {
+ CHECK(vie != NULL, "Video engine instance failed to be created");
+ CHECK(base != NULL, "Failed to acquire base interface");
+ CHECK(codec != NULL, "Failed to acquire codec interface");
+ CHECK(network != NULL, "Failed to acquire network interface");
+ CHECK(rtp != NULL, "Failed to acquire rtp interface");
+ CHECK(render != NULL, "Failed to acquire render interface");
+ CHECK(capture != NULL, "Failed to acquire capture interface");
+ CHECK(externalCodec != NULL, "Failed to acquire externalCodec interface");
+ }
+
+ ~VideoEngineData() {
+ CHECK(channel_transports_.empty(),
+ "ViE transports must be deleted before terminating");
+ CHECK(observers_.empty(),
+ "ViE observers must be deleted before terminating");
+ CHECK(external_decoders_.empty(),
+ "ViE external decoders must be deleted before terminating");
+ ReleaseSubApi(externalCodec);
+ ReleaseSubApi(capture);
+ ReleaseSubApi(render);
+ ReleaseSubApi(rtp);
+ ReleaseSubApi(network);
+ ReleaseSubApi(codec);
+ ReleaseSubApi(base);
+ webrtc::VideoEngine* vie_pointer = vie;
+ CHECK(webrtc::VideoEngine::Delete(vie_pointer), "ViE failed to be deleted");
+ }
+
+ int CreateChannel() {
+ int channel;
+ CHECK(base->CreateChannel(channel) == 0, "Failed to create channel");
+ CreateTransport(channel);
+ return channel;
+ }
+
+ int DeleteChannel(int channel) {
+ if (base->DeleteChannel(channel) != 0) {
+ return -1;
+ }
+ DeleteTransport(channel);
+ return 0;
+ }
+
+ webrtc::test::VideoChannelTransport* GetTransport(int channel) {
+ ChannelTransports::iterator found = channel_transports_.find(channel);
+ if (found == channel_transports_.end()) {
+ return NULL;
+ }
+ return found->second;
+ }
+
+ int RegisterObserver(int channel, jobject j_observer) {
+ CHECK(observers_.find(channel) == observers_.end(),
+ "Observer already created for channel, inconsistent state");
+ observers_[channel] = new VideoDecodeEncodeObserver(j_observer);
+ int ret_val = codec->RegisterDecoderObserver(channel, *observers_[channel]);
+ ret_val |= codec->RegisterEncoderObserver(channel, *observers_[channel]);
+ return ret_val;
+ }
+
+ int DeregisterObserver(int channel) {
+ Observers::iterator found = observers_.find(channel);
+ if (observers_.find(channel) == observers_.end()) {
+ return -1;
+ }
+ int ret_val = codec->DeregisterDecoderObserver(channel);
+ ret_val |= codec->DeregisterEncoderObserver(channel);
+ delete found->second;
+ observers_.erase(found);
+ return ret_val;
+ }
+
+ int RegisterExternalReceiveCodec(jint channel, jint pl_type, jobject decoder,
+ bool internal_source) {
+ CHECK(external_decoders_.find(channel) == external_decoders_.end(),
+ "External decoder already created for channel, inconsistent state");
+ external_decoders_[channel] =
+ new webrtc::MediaCodecVideoDecoder(g_vm, decoder);
+ return externalCodec->RegisterExternalReceiveCodec(
+ channel, pl_type, external_decoders_[channel], internal_source);
+ }
+
+ int DeRegisterExternalReceiveCodec(jint channel, jint pl_type) {
+ ExternalDecoders::iterator found = external_decoders_.find(channel);
+ CHECK(found != external_decoders_.end(),
+ "ViE channel missing external decoder, inconsistent state");
+ CHECK(externalCodec->DeRegisterExternalReceiveCodec(channel, pl_type) == 0,
+ "Failed to register external receive decoder");
+ delete found->second;
+ external_decoders_.erase(found);
+ return 0;
+ }
+
+ webrtc::VideoEngine* const vie;
+ webrtc::ViEBase* const base;
+ webrtc::ViECodec* const codec;
+ webrtc::ViENetwork* const network;
+ webrtc::ViERTP_RTCP* const rtp;
+ webrtc::ViERender* const render;
+ webrtc::ViECapture* const capture;
+ webrtc::ViEExternalCodec* const externalCodec;
+
+ private:
+ // Video engine no longer provides a socket implementation. There is,
+ // however, a socket implementation in webrtc::test.
+ typedef std::map
+ ChannelTransports;
+ typedef std::map Observers;
+ typedef std::map ExternalDecoders;
+
+ void CreateTransport(int channel) {
+ CHECK(GetTransport(channel) == NULL,
+ "Transport already created for ViE channel, inconsistent state");
+ channel_transports_[channel] =
+ new webrtc::test::VideoChannelTransport(network, channel);
+ }
+ void DeleteTransport(int channel) {
+ CHECK(GetTransport(channel) != NULL,
+ "ViE channel missing transport, inconsistent state");
+ delete channel_transports_[channel];
+ channel_transports_.erase(channel);
+ }
+
+ ChannelTransports channel_transports_;
+ Observers observers_;
+ ExternalDecoders external_decoders_;
+};
+
+webrtc::VideoCodec* GetCodecInst(JNIEnv* jni, jobject j_codec) {
+ jclass j_codec_class = jni->GetObjectClass(j_codec);
+ jfieldID native_codec_id =
+ jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
+ jlong j_p = jni->GetLongField(j_codec, native_codec_id);
+ return reinterpret_cast(j_p);
+}
+
+CameraDesc* GetCameraDesc(JNIEnv* jni, jobject j_camera) {
+ jclass j_camera_class = jni->GetObjectClass(j_camera);
+ jfieldID native_camera_id =
+ jni->GetFieldID(j_camera_class, "nativeCameraDesc", "J");
+ jlong j_p = jni->GetLongField(j_camera, native_camera_id);
+ return reinterpret_cast(j_p);
+}
+
+VideoEngineData* GetVideoEngineData(JNIEnv* jni, jobject j_vie) {
+ jclass j_vie_class = jni->GetObjectClass(j_vie);
+ jfieldID native_vie_id =
+ jni->GetFieldID(j_vie_class, "nativeVideoEngine", "J");
+ jlong j_p = jni->GetLongField(j_vie, native_vie_id);
+ return reinterpret_cast(j_p);
+}
+
+} // namespace
+
+namespace webrtc_examples {
+
+static const char* g_classes[] = {
+ "org/webrtc/webrtcdemo/CameraDesc",
+ "org/webrtc/webrtcdemo/RtcpStatistics",
+ "org/webrtc/webrtcdemo/VideoCodecInst",
+ "org/webrtc/webrtcdemo/VideoDecodeEncodeObserver",
+ "org/webrtc/webrtcdemo/MediaCodecVideoDecoder"};
+
+void SetVieDeviceObjects(JavaVM* vm) {
+ CHECK(vm, "Trying to register NULL vm");
+ CHECK(!g_vm, "Trying to re-register vm");
+ g_vm = vm;
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ g_class_reference_holder = new ClassReferenceHolder(
+ jni, g_classes, ARRAYSIZE(g_classes));
+}
+
+void ClearVieDeviceObjects() {
+ CHECK(g_vm, "Clearing vm without it being set");
+ {
+ webrtc::AttachThreadScoped ats(g_vm);
+ g_class_reference_holder->FreeReferences(ats.env());
+ }
+ g_vm = NULL;
+ delete g_class_reference_holder;
+ g_class_reference_holder = NULL;
+}
+
+} // namespace webrtc_examples
+
+JOWW(jlong, VideoEngine_create)(JNIEnv* jni, jclass) {
+ VideoEngineData* vie_data = new VideoEngineData();
+ return jlongFromPointer(vie_data);
+}
+
+JOWW(jint, VideoEngine_init)(JNIEnv* jni, jobject j_vie) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->base->Init();
+}
+
+JOWW(jint, VideoEngine_setVoiceEngine)(JNIEnv* jni, jobject j_vie,
+ jobject j_voe) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ webrtc::VoiceEngine* voe = GetVoiceEngine(jni, j_voe);
+ return vie_data->base->SetVoiceEngine(voe);
+}
+
+JOWW(void, VideoEngine_dispose)(JNIEnv* jni, jobject j_vie) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ delete vie_data;
+}
+
+JOWW(jint, VideoEngine_startSend)(JNIEnv* jni, jobject j_vie, jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->base->StartSend(channel);
+}
+
+JOWW(jint, VideoEngine_stopRender)(JNIEnv* jni, jobject j_vie, jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->render->StopRender(channel);
+}
+
+JOWW(jint, VideoEngine_stopSend)(JNIEnv* jni, jobject j_vie, jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->base->StopSend(channel);
+}
+
+JOWW(jint, VideoEngine_startReceive)(JNIEnv* jni, jobject j_vie, jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->base->StartReceive(channel);
+}
+
+JOWW(jint, VideoEngine_stopReceive)(JNIEnv* jni, jobject j_vie, jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->base->StopReceive(channel);
+}
+
+JOWW(jint, VideoEngine_createChannel)(JNIEnv* jni, jobject j_vie) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->CreateChannel();
+}
+
+JOWW(jint, VideoEngine_deleteChannel)(JNIEnv* jni, jobject j_vie,
+ jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->DeleteChannel(channel);
+}
+
+JOWW(jint,
+ VideoEngine_connectAudioChannel(JNIEnv* jni, jobject j_vie,
+ jint video_channel, jint audio_channel)) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->base->ConnectAudioChannel(video_channel, audio_channel);
+}
+
+JOWW(jint, VideoEngine_setLocalReceiver)(JNIEnv* jni, jobject j_vie,
+ jint channel, jint port) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->GetTransport(channel)->SetLocalReceiver(port);
+}
+
+JOWW(jint, VideoEngine_setSendDestination)(JNIEnv* jni, jobject j_vie,
+ jint channel, jint port,
+ jstring j_addr) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ std::string addr = JavaToStdString(jni, j_addr);
+ webrtc::test::VideoChannelTransport* transport =
+ vie_data->GetTransport(channel);
+ return transport->SetSendDestination(addr.c_str(), port);
+}
+
+JOWW(jint, VideoEngine_setReceiveCodec)(JNIEnv* jni, jobject j_vie,
+ jint channel, jobject j_codec) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ webrtc::VideoCodec* codec = GetCodecInst(jni, j_codec);
+ return vie_data->codec->SetReceiveCodec(channel, *codec);
+}
+
+JOWW(jint, VideoEngine_setSendCodec)(JNIEnv* jni, jobject j_vie, jint channel,
+ jobject j_codec) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ webrtc::VideoCodec* codec = GetCodecInst(jni, j_codec);
+ return vie_data->codec->SetSendCodec(channel, *codec);
+}
+
+JOWW(jint, VideoEngine_numberOfCodecs)(JNIEnv* jni, jobject j_vie) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->codec->NumberOfCodecs();
+}
+
+JOWW(jobject, VideoEngine_getCodec)(JNIEnv* jni, jobject j_vie, jint index) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ webrtc::VideoCodec* codec = new webrtc::VideoCodec();
+ CHECK(vie_data->codec->GetCodec(index, *codec) == 0,
+ "getCodec must be called with valid index");
+ jclass j_codec_class = GetClass("org/webrtc/webrtcdemo/VideoCodecInst");
+ jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "", "(J)V");
+ jobject j_codec =
+ jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
+ CHECK_EXCEPTION(jni, "error during NewObject");
+ return j_codec;
+}
+
+JOWW(jint, VideoEngine_addRenderer)(JNIEnv* jni, jobject j_vie, jint channel,
+ jobject gl_surface, jint z_order,
+ jfloat left, jfloat top, jfloat right,
+ jfloat bottom) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->render->AddRenderer(channel, gl_surface, z_order, left, top,
+ right, bottom);
+}
+
+JOWW(jint, VideoEngine_removeRenderer)(JNIEnv* jni, jobject j_vie,
+ jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->render->RemoveRenderer(channel);
+}
+
+JOWW(jint, VideoEngine_registerExternalReceiveCodec)(JNIEnv* jni, jobject j_vie,
+ jint channel, jint pl_type,
+ jobject decoder,
+ bool internal_source) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->RegisterExternalReceiveCodec(channel, pl_type, decoder,
+ true);
+}
+
+JOWW(jint,
+ VideoEngine_deRegisterExternalReceiveCodec)(JNIEnv* jni, jobject j_vie,
+ jint channel, jint pl_type) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->DeRegisterExternalReceiveCodec(channel, pl_type);
+}
+
+JOWW(jint, VideoEngine_startRender)(JNIEnv* jni, jobject j_vie, jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->render->StartRender(channel);
+}
+
+JOWW(jint, VideoEngine_numberOfCaptureDevices)(JNIEnv* jni, jobject j_vie) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->capture->NumberOfCaptureDevices();
+}
+
+JOWW(jobject,
+ VideoEngine_getCaptureDevice(JNIEnv* jni, jobject j_vie, jint index)) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ CameraDesc* camera_info = new CameraDesc();
+ if (vie_data->capture->GetCaptureDevice(
+ index, camera_info->name, sizeof(camera_info->name),
+ camera_info->unique_id, sizeof(camera_info->unique_id)) != 0) {
+ delete camera_info;
+ return NULL;
+ }
+ jclass j_camera_class = GetClass("org/webrtc/webrtcdemo/CameraDesc");
+ jmethodID j_camera_ctor = GetMethodID(jni, j_camera_class, "", "(J)V");
+ jobject j_camera = jni->NewObject(j_camera_class, j_camera_ctor,
+ jlongFromPointer(camera_info));
+ CHECK_EXCEPTION(jni, "error during NewObject");
+ return j_camera;
+}
+
+JOWW(jint, VideoEngine_allocateCaptureDevice)(JNIEnv* jni, jobject j_vie,
+ jobject j_camera) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ CameraDesc* camera_info = GetCameraDesc(jni, j_camera);
+ jint capture_id;
+ if (vie_data->capture->AllocateCaptureDevice(camera_info->unique_id,
+ sizeof(camera_info->unique_id),
+ capture_id) != 0) {
+ return -1;
+ }
+ return capture_id;
+}
+
+JOWW(jint, VideoEngine_connectCaptureDevice)(JNIEnv* jni, jobject j_vie,
+ jint camera_num, jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->capture->ConnectCaptureDevice(camera_num, channel);
+}
+
+JOWW(jint, VideoEngine_startCapture)(JNIEnv* jni, jobject j_vie,
+ jint camera_num) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->capture->StartCapture(camera_num);
+}
+
+JOWW(jint, VideoEngine_stopCapture)(JNIEnv* jni, jobject j_vie,
+ jint camera_id) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->capture->StopCapture(camera_id);
+}
+
+JOWW(jint, VideoEngine_releaseCaptureDevice)(JNIEnv* jni, jobject j_vie,
+ jint camera_id) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->capture->ReleaseCaptureDevice(camera_id);
+}
+
+JOWW(jint, VideoEngine_getOrientation)(JNIEnv* jni, jobject j_vie,
+ jobject j_camera) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ CameraDesc* camera_info = GetCameraDesc(jni, j_camera);
+ webrtc::RotateCapturedFrame orientation;
+ if (vie_data->capture->GetOrientation(camera_info->unique_id, orientation) !=
+ 0) {
+ return -1;
+ }
+ return static_cast(orientation);
+}
+
+JOWW(jint, VideoEngine_setRotateCapturedFrames)(JNIEnv* jni, jobject j_vie,
+ jint capture_id, jint degrees) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->capture->SetRotateCapturedFrames(
+ capture_id, static_cast(degrees));
+}
+
+JOWW(jint, VideoEngine_setNackStatus)(JNIEnv* jni, jobject j_vie, jint channel,
+ jboolean enable) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->rtp->SetNACKStatus(channel, enable);
+}
+
+JOWW(jint, VideoEngine_setKeyFrameRequestMethod)(JNIEnv* jni, jobject j_vie,
+ jint channel,
+ jint request_method) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->rtp->SetKeyFrameRequestMethod(
+ channel, static_cast(request_method));
+}
+
+JOWW(jobject, VideoEngine_getReceivedRtcpStatistics)(JNIEnv* jni, jobject j_vie,
+ jint channel) {
+ unsigned short fraction_lost; // NOLINT
+ unsigned int cumulative_lost; // NOLINT
+ unsigned int extended_max; // NOLINT
+ unsigned int jitter; // NOLINT
+ int rtt_ms;
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ if (vie_data->rtp->GetReceivedRTCPStatistics(channel, fraction_lost,
+ cumulative_lost, extended_max,
+ jitter, rtt_ms) != 0) {
+ return NULL;
+ }
+ jclass j_rtcp_statistics_class =
+ GetClass("org/webrtc/webrtcdemo/RtcpStatistics");
+ jmethodID j_rtcp_statistics_ctor =
+ GetMethodID(jni, j_rtcp_statistics_class, "", "(IIIII)V");
+ jobject j_rtcp_statistics =
+ jni->NewObject(j_rtcp_statistics_class, j_rtcp_statistics_ctor,
+ fraction_lost, cumulative_lost, extended_max, jitter,
+ rtt_ms);
+ CHECK_EXCEPTION(jni, "error during NewObject");
+ return j_rtcp_statistics;
+}
+
+JOWW(jint, VideoEngine_registerObserver)(JNIEnv* jni, jobject j_vie,
+ jint channel, jobject callback) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->RegisterObserver(channel, callback);
+}
+
+JOWW(jint, VideoEngine_deregisterObserver)(JNIEnv* jni, jobject j_vie,
+ jint channel) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->DeregisterObserver(channel);
+}
+
+JOWW(jint, VideoEngine_setTraceFile)(JNIEnv* jni, jobject, jstring j_filename,
+ jboolean file_counter) {
+ std::string filename = JavaToStdString(jni, j_filename);
+ return webrtc::VideoEngine::SetTraceFile(filename.c_str(), file_counter);
+}
+
+JOWW(jint, VideoEngine_setTraceFilter)(JNIEnv* jni, jobject, jint filter) {
+ return webrtc::VideoEngine::SetTraceFilter(filter);
+}
+
+JOWW(jint, VideoEngine_startRtpDump)(JNIEnv* jni, jobject j_vie, jint channel,
+ jstring j_filename, jint direction) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return vie_data->rtp->StartRTPDump(
+ channel, filename.c_str(), static_cast(direction));
+}
+
+JOWW(jint, VideoEngine_stopRtpDump)(JNIEnv* jni, jobject j_vie, jint channel,
+ jint direction) {
+ VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
+ return vie_data->rtp->StopRTPDump(
+ channel, static_cast(direction));
+}
+
+JOWW(void, VideoCodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
+ delete GetCodecInst(jni, j_codec);
+}
+
+JOWW(jint, VideoCodecInst_plType)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->plType;
+}
+
+JOWW(jstring, VideoCodecInst_name)(JNIEnv* jni, jobject j_codec) {
+ return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plName);
+}
+
+JOWW(jint, VideoCodecInst_width)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->width;
+}
+
+JOWW(void, VideoCodecInst_setWidth)(JNIEnv* jni, jobject j_codec, jint width) {
+ GetCodecInst(jni, j_codec)->width = width;
+}
+
+JOWW(jint, VideoCodecInst_height)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->height;
+}
+
+JOWW(void, VideoCodecInst_setHeight)(JNIEnv* jni, jobject j_codec,
+ jint height) {
+ GetCodecInst(jni, j_codec)->height = height;
+}
+
+JOWW(jint, VideoCodecInst_startBitRate)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->startBitrate;
+}
+
+JOWW(void, VideoCodecInst_setStartBitRate)(JNIEnv* jni, jobject j_codec,
+ jint bitrate) {
+ GetCodecInst(jni, j_codec)->startBitrate = bitrate;
+}
+
+JOWW(jint, VideoCodecInst_maxBitRate)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->maxBitrate;
+}
+
+JOWW(void, VideoCodecInst_setMaxBitRate)(JNIEnv* jni, jobject j_codec,
+ jint bitrate) {
+ GetCodecInst(jni, j_codec)->maxBitrate = bitrate;
+}
+
+JOWW(jint, VideoCodecInst_maxFrameRate)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->maxFramerate;
+}
+
+JOWW(void, VideoCodecInst_setMaxFrameRate)(JNIEnv* jni, jobject j_codec,
+ jint framerate) {
+ GetCodecInst(jni, j_codec)->maxFramerate = framerate;
+}
+
+JOWW(void, CameraDesc_dispose)(JNIEnv* jni, jobject j_camera) {
+ delete GetCameraDesc(jni, j_camera);
+}
diff --git a/webrtc/examples/android/media_demo/jni/video_engine_jni.h b/webrtc/examples/android/media_demo/jni/video_engine_jni.h
new file mode 100644
index 000000000..5228f3bdd
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/video_engine_jni.h
@@ -0,0 +1,23 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VIDEO_ENGINE_H_
+#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VIDEO_ENGINE_H_
+
+#include
+
+namespace webrtc_examples {
+
+void SetVieDeviceObjects(JavaVM* vm);
+void ClearVieDeviceObjects();
+
+} // namespace webrtc_examples
+
+#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VIDEO_ENGINE_H_
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc b/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
new file mode 100644
index 000000000..72df498f3
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/voice_engine_jni.cc
@@ -0,0 +1,444 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains JNI for the voice engine interfaces.
+// The native functions are found using jni's auto discovery.
+
+#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
+
+#include
+#include
+
+#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
+#include "webrtc/modules/utility/interface/helpers_android.h"
+#include "webrtc/test/channel_transport/include/channel_transport.h"
+#include "webrtc/voice_engine/include/voe_audio_processing.h"
+#include "webrtc/voice_engine/include/voe_base.h"
+#include "webrtc/voice_engine/include/voe_codec.h"
+#include "webrtc/voice_engine/include/voe_file.h"
+#include "webrtc/voice_engine/include/voe_hardware.h"
+#include "webrtc/voice_engine/include/voe_network.h"
+#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
+#include "webrtc/voice_engine/include/voe_volume_control.h"
+
+// Macro for native functions that can be found by way of jni-auto discovery.
+// Note extern "C" is needed for "discovery" of native methods to work.
+#define JOWW(rettype, name) \
+ extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
+
+namespace {
+
+static JavaVM* g_vm = NULL;
+static ClassReferenceHolder* g_class_reference_holder = NULL;
+
+jclass GetClass(JNIEnv* jni, const char* name) {
+ CHECK(g_class_reference_holder, "Class reference holder NULL");
+ return g_class_reference_holder->GetClass(name);
+}
+
+static const char* g_classes[] = {"org/webrtc/webrtcdemo/CodecInst"};
+
+template
+void ReleaseSubApi(T instance) {
+ CHECK(instance->Release() >= 0, "failed to release instance")
+}
+
+class VoiceEngineData {
+ public:
+ VoiceEngineData()
+ : ve(webrtc::VoiceEngine::Create()),
+ base(webrtc::VoEBase::GetInterface(ve)),
+ codec(webrtc::VoECodec::GetInterface(ve)),
+ file(webrtc::VoEFile::GetInterface(ve)),
+ netw(webrtc::VoENetwork::GetInterface(ve)),
+ apm(webrtc::VoEAudioProcessing::GetInterface(ve)),
+ volume(webrtc::VoEVolumeControl::GetInterface(ve)),
+ hardware(webrtc::VoEHardware::GetInterface(ve)),
+ rtp(webrtc::VoERTP_RTCP::GetInterface(ve)) {
+ CHECK(ve != NULL, "Voice engine instance failed to be created");
+ CHECK(base != NULL, "Failed to acquire base interface");
+ CHECK(codec != NULL, "Failed to acquire codec interface");
+ CHECK(file != NULL, "Failed to acquire file interface");
+ CHECK(netw != NULL, "Failed to acquire netw interface");
+ CHECK(apm != NULL, "Failed to acquire apm interface");
+ CHECK(volume != NULL, "Failed to acquire volume interface");
+ CHECK(hardware != NULL, "Failed to acquire hardware interface");
+ CHECK(rtp != NULL, "Failed to acquire rtp interface");
+ }
+
+ ~VoiceEngineData() {
+ CHECK(channel_transports_.empty(),
+ "VoE transports must be deleted before terminating");
+ CHECK(base->Terminate() == 0, "VoE failed to terminate");
+ ReleaseSubApi(base);
+ ReleaseSubApi(codec);
+ ReleaseSubApi(file);
+ ReleaseSubApi(netw);
+ ReleaseSubApi(apm);
+ ReleaseSubApi(volume);
+ ReleaseSubApi(hardware);
+ ReleaseSubApi(rtp);
+ webrtc::VoiceEngine* ve_instance = ve;
+ CHECK(webrtc::VoiceEngine::Delete(ve_instance), "VoE failed to be deleted");
+ }
+
+ int CreateChannel() {
+ int channel = base->CreateChannel();
+ if (channel == -1) {
+ return -1;
+ }
+ CreateTransport(channel);
+ return channel;
+ }
+
+ int DeleteChannel(int channel) {
+ if (base->DeleteChannel(channel) != 0) {
+ return -1;
+ }
+ DeleteTransport(channel);
+ return 0;
+ }
+
+ webrtc::test::VoiceChannelTransport* GetTransport(int channel) {
+ ChannelTransports::iterator found = channel_transports_.find(channel);
+ if (found == channel_transports_.end()) {
+ return NULL;
+ }
+ return found->second;
+ }
+
+ webrtc::VoiceEngine* const ve;
+ webrtc::VoEBase* const base;
+ webrtc::VoECodec* const codec;
+ webrtc::VoEFile* const file;
+ webrtc::VoENetwork* const netw;
+ webrtc::VoEAudioProcessing* const apm;
+ webrtc::VoEVolumeControl* const volume;
+ webrtc::VoEHardware* const hardware;
+ webrtc::VoERTP_RTCP* const rtp;
+
+ private:
+ // Voice engine no longer provides a socket implementation. There is,
+ // however, a socket implementation in webrtc::test.
+ typedef std::map
+ ChannelTransports;
+
+ void CreateTransport(int channel) {
+ CHECK(GetTransport(channel) == NULL,
+ "Transport already created for VoE channel, inconsistent state");
+ channel_transports_[channel] =
+ new webrtc::test::VoiceChannelTransport(netw, channel);
+ }
+ void DeleteTransport(int channel) {
+ CHECK(GetTransport(channel) != NULL,
+ "VoE channel missing transport, inconsistent state");
+ delete channel_transports_[channel];
+ channel_transports_.erase(channel);
+ }
+
+ ChannelTransports channel_transports_;
+};
+
+webrtc::CodecInst* GetCodecInst(JNIEnv* jni, jobject j_codec) {
+ jclass j_codec_class = jni->GetObjectClass(j_codec);
+ jfieldID native_codec_id =
+ jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
+ jlong j_p = jni->GetLongField(j_codec, native_codec_id);
+ return reinterpret_cast(j_p);
+}
+
+} // namespace
+
+namespace webrtc_examples {
+
+void SetVoeDeviceObjects(JavaVM* vm) {
+ CHECK(vm, "Trying to register NULL vm");
+ g_vm = vm;
+ webrtc::AttachThreadScoped ats(g_vm);
+ JNIEnv* jni = ats.env();
+ g_class_reference_holder = new ClassReferenceHolder(
+ jni, g_classes, ARRAYSIZE(g_classes));
+}
+
+void ClearVoeDeviceObjects() {
+ CHECK(g_vm, "Clearing vm without it being set");
+ {
+ webrtc::AttachThreadScoped ats(g_vm);
+ g_class_reference_holder->FreeReferences(ats.env());
+ }
+ g_vm = NULL;
+ delete g_class_reference_holder;
+ g_class_reference_holder = NULL;
+}
+
+} // namespace webrtc_examples
+
+VoiceEngineData* GetVoiceEngineData(JNIEnv* jni, jobject j_voe) {
+ jclass j_voe_class = jni->GetObjectClass(j_voe);
+ jfieldID native_voe_id =
+ jni->GetFieldID(j_voe_class, "nativeVoiceEngine", "J");
+ jlong j_p = jni->GetLongField(j_voe, native_voe_id);
+ return reinterpret_cast(j_p);
+}
+
+webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe) {
+ return GetVoiceEngineData(jni, j_voe)->ve;
+}
+
+JOWW(jlong, VoiceEngine_create)(JNIEnv* jni, jclass) {
+ VoiceEngineData* voe_data = new VoiceEngineData();
+ return jlongFromPointer(voe_data);
+}
+
+JOWW(void, VoiceEngine_dispose)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ delete voe_data;
+}
+
+JOWW(jint, VoiceEngine_init)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->Init();
+}
+
+JOWW(jint, VoiceEngine_createChannel)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->CreateChannel();
+}
+
+JOWW(jint, VoiceEngine_deleteChannel)(JNIEnv* jni, jobject j_voe,
+ jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->DeleteChannel(channel);
+}
+
+JOWW(jint, VoiceEngine_setLocalReceiver)(JNIEnv* jni, jobject j_voe,
+ jint channel, jint port) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::test::VoiceChannelTransport* transport =
+ voe_data->GetTransport(channel);
+ return transport->SetLocalReceiver(port);
+}
+
+JOWW(jint, VoiceEngine_setSendDestination)(JNIEnv* jni, jobject j_voe,
+ jint channel, jint port,
+ jstring j_addr) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string addr = JavaToStdString(jni, j_addr);
+ webrtc::test::VoiceChannelTransport* transport =
+ voe_data->GetTransport(channel);
+ return transport->SetSendDestination(addr.c_str(), port);
+}
+
+JOWW(jint, VoiceEngine_startListen)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartReceive(channel);
+}
+
+JOWW(jint, VoiceEngine_startPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartPlayout(channel);
+}
+
+JOWW(jint, VoiceEngine_startSend)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartSend(channel);
+}
+
+JOWW(jint, VoiceEngine_stopListen)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StartReceive(channel);
+}
+
+JOWW(jint, VoiceEngine_stopPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StopPlayout(channel);
+}
+
+JOWW(jint, VoiceEngine_stopSend)(JNIEnv* jni, jobject j_voe, jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->base->StopSend(channel);
+}
+
+JOWW(jint, VoiceEngine_setSpeakerVolume)(JNIEnv* jni, jobject j_voe,
+ jint level) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->volume->SetSpeakerVolume(level);
+}
+
+JOWW(jint, VoiceEngine_setLoudspeakerStatus)(JNIEnv* jni, jobject j_voe,
+ jboolean enable) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->hardware->SetLoudspeakerStatus(enable);
+}
+
+JOWW(jint, VoiceEngine_startPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
+ jint channel,
+ jstring j_filename,
+ jboolean loop) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return voe_data->file->StartPlayingFileLocally(channel,
+ filename.c_str(),
+ loop);
+}
+
+JOWW(jint, VoiceEngine_stopPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
+ jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->file->StopPlayingFileLocally(channel);
+}
+
+JOWW(jint, VoiceEngine_startPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
+ jint channel,
+ jstring j_filename,
+ jboolean loop) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return voe_data->file->StartPlayingFileAsMicrophone(channel,
+ filename.c_str(),
+ loop);
+}
+
+JOWW(jint, VoiceEngine_stopPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
+ jint channel) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->file->StopPlayingFileAsMicrophone(channel);
+}
+
+JOWW(jint, VoiceEngine_numOfCodecs)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->codec->NumOfCodecs();
+}
+
+JOWW(jobject, VoiceEngine_getCodec)(JNIEnv* jni, jobject j_voe, jint index) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::CodecInst* codec = new webrtc::CodecInst();
+ CHECK(voe_data->codec->GetCodec(index, *codec) == 0,
+ "getCodec must be called with valid index");
+ jclass j_codec_class = GetClass(jni, "org/webrtc/webrtcdemo/CodecInst");
+ jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "", "(J)V");
+ jobject j_codec =
+ jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
+ CHECK_EXCEPTION(jni, "error during NewObject");
+ return j_codec;
+}
+
+JOWW(jint, VoiceEngine_setSendCodec)(JNIEnv* jni, jobject j_voe, jint channel,
+ jobject j_codec) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::CodecInst* inst = GetCodecInst(jni, j_codec);
+ return voe_data->codec->SetSendCodec(channel, *inst);
+}
+
+JOWW(jint, VoiceEngine_setEcStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
+ jint ec_mode) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetEcStatus(enable,
+ static_cast(ec_mode));
+}
+
+JOWW(jint, VoiceEngine_setAecmMode)(JNIEnv* jni, jobject j_voe, jint aecm_mode,
+ jboolean cng) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetAecmMode(static_cast(aecm_mode),
+ cng);
+}
+
+JOWW(jint, VoiceEngine_setAgcStatus)(JNIEnv* jni, jobject j_voe,
+ jboolean enable, jint agc_mode) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetAgcStatus(enable,
+ static_cast(agc_mode));
+}
+
+// Returns the native AgcConfig object associated with the Java object
+// |j_codec|.
+void GetNativeAgcConfig(JNIEnv* jni, jobject j_codec,
+ webrtc::AgcConfig* agc_config) {
+ jclass j_codec_class = jni->GetObjectClass(j_codec);
+ jfieldID dBOv_id = jni->GetFieldID(j_codec_class, "targetLevelDbOv", "I");
+ agc_config->targetLeveldBOv = jni->GetIntField(j_codec, dBOv_id);
+ jfieldID gain_id =
+ jni->GetFieldID(j_codec_class, "digitalCompressionGaindB", "I");
+ agc_config->digitalCompressionGaindB = jni->GetIntField(j_codec, gain_id);
+ jfieldID limiter_id = jni->GetFieldID(j_codec_class, "limiterEnable", "Z");
+ agc_config->limiterEnable = jni->GetBooleanField(j_codec, limiter_id);
+}
+
+JOWW(jint, VoiceEngine_setAgcConfig)(JNIEnv* jni, jobject j_voe,
+ jobject j_config) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ webrtc::AgcConfig config;
+ GetNativeAgcConfig(jni, j_config, &config);
+ return voe_data->apm->SetAgcConfig(config);
+}
+
+JOWW(jint, VoiceEngine_setNsStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
+ jint ns_mode) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->SetNsStatus(enable,
+ static_cast(ns_mode));
+}
+
+JOWW(jint, VoiceEngine_startDebugRecording)(JNIEnv* jni, jobject j_voe,
+ jstring j_filename) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return voe_data->apm->StartDebugRecording(filename.c_str());
+}
+
+JOWW(jint, VoiceEngine_stopDebugRecording)(JNIEnv* jni, jobject j_voe) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->apm->StopDebugRecording();
+}
+
+JOWW(jint, VoiceEngine_startRtpDump)(JNIEnv* jni, jobject j_voe, jint channel,
+ jstring j_filename, jint direction) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ std::string filename = JavaToStdString(jni, j_filename);
+ return voe_data->rtp->StartRTPDump(
+ channel, filename.c_str(),
+ static_cast(direction));
+}
+
+JOWW(jint, VoiceEngine_stopRtpDump)(JNIEnv* jni, jobject j_voe, jint channel,
+ jint direction) {
+ VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
+ return voe_data->rtp->StopRTPDump(
+ channel, static_cast(direction));
+}
+
+JOWW(void, CodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
+ delete GetCodecInst(jni, j_codec);
+}
+
+JOWW(jint, CodecInst_plType)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->pltype;
+}
+
+JOWW(jstring, CodecInst_name)(JNIEnv* jni, jobject j_codec) {
+ return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plname);
+}
+
+JOWW(jint, CodecInst_plFrequency)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->plfreq;
+}
+
+JOWW(jint, CodecInst_pacSize)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->pacsize;
+}
+
+JOWW(jint, CodecInst_channels)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->channels;
+}
+
+JOWW(jint, CodecInst_rate)(JNIEnv* jni, jobject j_codec) {
+ return GetCodecInst(jni, j_codec)->rate;
+}
diff --git a/webrtc/examples/android/media_demo/jni/voice_engine_jni.h b/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
new file mode 100644
index 000000000..57ef50765
--- /dev/null
+++ b/webrtc/examples/android/media_demo/jni/voice_engine_jni.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
+#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
+
+#include
+
+namespace webrtc {
+
+class VoiceEngine;
+
+} // namespace webrtc
+
+namespace webrtc_examples {
+
+void SetVoeDeviceObjects(JavaVM* vm);
+void ClearVoeDeviceObjects();
+
+} // namespace webrtc_examples
+
+webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe);
+
+#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
diff --git a/webrtc/video_engine/test/android/project.properties b/webrtc/examples/android/media_demo/project.properties
similarity index 100%
rename from webrtc/video_engine/test/android/project.properties
rename to webrtc/examples/android/media_demo/project.properties
diff --git a/webrtc/examples/android/media_demo/res/drawable/logo.png b/webrtc/examples/android/media_demo/res/drawable/logo.png
new file mode 100644
index 000000000..1ff07d110
Binary files /dev/null and b/webrtc/examples/android/media_demo/res/drawable/logo.png differ
diff --git a/webrtc/examples/android/media_demo/res/layout/audiomenu.xml b/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
new file mode 100644
index 000000000..f35547062
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/audiomenu.xml
@@ -0,0 +1,80 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml b/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
new file mode 100644
index 000000000..101461200
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/dropdownitems.xml
@@ -0,0 +1,17 @@
+
+
+
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/mainmenu.xml b/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
new file mode 100644
index 000000000..8a119dc01
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/mainmenu.xml
@@ -0,0 +1,39 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml b/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
new file mode 100644
index 000000000..365bb1490
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/settingsmenu.xml
@@ -0,0 +1,71 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/layout/videomenu.xml b/webrtc/examples/android/media_demo/res/layout/videomenu.xml
new file mode 100644
index 000000000..761710ff0
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/layout/videomenu.xml
@@ -0,0 +1,64 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml b/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
new file mode 100644
index 000000000..a4921a6bb
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/menu/main_activity_actions.xml
@@ -0,0 +1,5 @@
+
+
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/bools.xml b/webrtc/examples/android/media_demo/res/values/bools.xml
new file mode 100644
index 000000000..d4f3fc0e9
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/values/bools.xml
@@ -0,0 +1,13 @@
+
+
+ false
+ true
+ true
+ true
+ true
+ false
+ true
+ true
+ true
+ true
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/integers.xml b/webrtc/examples/android/media_demo/res/values/integers.xml
new file mode 100644
index 000000000..562643b5f
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/values/integers.xml
@@ -0,0 +1,13 @@
+
+
+ 11113
+ 11113
+ 0
+ 1
+ 2
+ 0
+ 0
+ 0
+ 11111
+ 11111
+
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/res/values/strings.xml b/webrtc/examples/android/media_demo/res/values/strings.xml
new file mode 100644
index 000000000..297d289b0
--- /dev/null
+++ b/webrtc/examples/android/media_demo/res/values/strings.xml
@@ -0,0 +1,41 @@
+
+
+ AECM
+ WebRTC Engine Demo
+ Audio Rx Port
+ Audio Tx Port
+ AGC
+ SwitchToBack
+ Codec Size
+ Codec Type
+ APMRecord
+ Video Engine Android Demo
+ Video Receive
+ Video Send
+ Audio
+ Error
+ Camera Error
+ Exit
+ SwitchToFront
+ Global Settings
+ Loopback
+ 127.0.0.1
+ NACK
+ NS
+ Remote IP address
+ rtpdump
+ Speaker
+ Start Both
+ StartCall
+ Start Listen
+ Start Send
+ Stats
+ Stats on
+ Stats off
+ StopCall
+ SurfaceView
+ WEBRTC
+ Video Rx Port
+ Video Settings
+ Video Tx Port
+
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
new file mode 100644
index 000000000..94e23c246
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/AudioMenuFragment.java
@@ -0,0 +1,156 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.Spinner;
+import android.widget.TextView;
+import java.lang.Integer;
+
+public class AudioMenuFragment extends Fragment {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.audiomenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ String[] audioCodecsStrings = getEngine().audioCodecsAsString();
+ Spinner spAudioCodecType = (Spinner) v.findViewById(R.id.spAudioCodecType);
+ spAudioCodecType.setAdapter(new SpinnerAdapter(getActivity(),
+ R.layout.dropdownitems,
+ audioCodecsStrings,
+ inflater));
+ spAudioCodecType.setSelection(getEngine().audioCodecIndex());
+ spAudioCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
+ public void onItemSelected(AdapterView> adapterView, View view,
+ int position, long id) {
+ getEngine().setAudioCodec(position);
+ }
+ public void onNothingSelected(AdapterView> arg0) {
+ Log.d(TAG, "No setting selected");
+ }
+ });
+
+ EditText etATxPort = (EditText) v.findViewById(R.id.etATxPort);
+ etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
+ etATxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etATxPort = (EditText) editText;
+ getEngine()
+ .setAudioTxPort(Integer.parseInt(etATxPort.getText().toString()));
+ etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
+ }
+ });
+ EditText etARxPort = (EditText) v.findViewById(R.id.etARxPort);
+ etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
+ etARxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etARxPort = (EditText) editText;
+ getEngine()
+ .setAudioRxPort(Integer.parseInt(etARxPort.getText().toString()));
+ etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
+
+ }
+ });
+
+ CheckBox cbEnableAecm = (CheckBox) v.findViewById(R.id.cbAecm);
+ cbEnableAecm.setChecked(getEngine().aecmEnabled());
+ cbEnableAecm.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAecm = (CheckBox) checkBox;
+ getEngine().setEc(cbEnableAecm.isChecked());
+ cbEnableAecm.setChecked(getEngine().aecmEnabled());
+ }
+ });
+ CheckBox cbEnableNs = (CheckBox) v.findViewById(R.id.cbNoiseSuppression);
+ cbEnableNs.setChecked(getEngine().nsEnabled());
+ cbEnableNs.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableNs = (CheckBox) checkBox;
+ getEngine().setNs(cbEnableNs.isChecked());
+ cbEnableNs.setChecked(getEngine().nsEnabled());
+ }
+ });
+ CheckBox cbEnableAgc = (CheckBox) v.findViewById(R.id.cbAutoGainControl);
+ cbEnableAgc.setChecked(getEngine().agcEnabled());
+ cbEnableAgc.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAgc = (CheckBox) checkBox;
+ getEngine().setAgc(cbEnableAgc.isChecked());
+ cbEnableAgc.setChecked(getEngine().agcEnabled());
+ }
+ });
+ CheckBox cbEnableSpeaker = (CheckBox) v.findViewById(R.id.cbSpeaker);
+ cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
+ cbEnableSpeaker.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableSpeaker = (CheckBox) checkBox;
+ getEngine().setSpeaker(cbEnableSpeaker.isChecked());
+ cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
+ }
+ });
+ CheckBox cbEnableDebugAPM =
+ (CheckBox) v.findViewById(R.id.cbDebugRecording);
+ cbEnableDebugAPM.setChecked(getEngine().apmRecord());
+ cbEnableDebugAPM.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableDebugAPM = (CheckBox) checkBox;
+ getEngine().setDebuging(cbEnableDebugAPM.isChecked());
+ cbEnableDebugAPM.setChecked(getEngine().apmRecord());
+ }
+ });
+ CheckBox cbEnableAudioRTPDump =
+ (CheckBox) v.findViewById(R.id.cbAudioRTPDump);
+ cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
+ cbEnableAudioRTPDump.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableAudioRTPDump = (CheckBox) checkBox;
+ getEngine().setIncomingVoeRtpDump(cbEnableAudioRTPDump.isChecked());
+ cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
+ }
+ });
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CameraDesc.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CameraDesc.java
new file mode 100644
index 000000000..28cc4fb35
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CameraDesc.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class CameraDesc {
+ private final long nativeCameraDesc;
+
+ // CameraDesc can only be created from the native layer.
+ private CameraDesc(long nativeCameraDesc) {
+ this.nativeCameraDesc = nativeCameraDesc;
+ }
+
+ // Dispose must be called before all references to CameraDesc are lost as it
+ // will free memory allocated in the native layer.
+ public native void dispose();
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
new file mode 100644
index 000000000..133d63926
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/CodecInst.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class CodecInst {
+ private final long nativeCodecInst;
+
+ // CodecInst can only be created from the native layer.
+ private CodecInst(long nativeCodecInst) {
+ this.nativeCodecInst = nativeCodecInst;
+ }
+
+ public String toString() {
+ return name() + " " +
+ "PlType: " + plType() + " " +
+ "PlFreq: " + plFrequency() + " " +
+ "Size: " + pacSize() + " " +
+ "Channels: " + channels() + " " +
+ "Rate: " + rate();
+ }
+
+ // Dispose must be called before all references to CodecInst are lost as it
+ // will free memory allocated in the native layer.
+ public native void dispose();
+ public native int plType();
+ public native String name();
+ public native int plFrequency();
+ public native int pacSize();
+ public native int channels();
+ public native int rate();
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
new file mode 100644
index 000000000..30daef33b
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MainMenuFragment.java
@@ -0,0 +1,183 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Button;
+import android.widget.LinearLayout;
+import android.widget.TextView;
+
+public class MainMenuFragment extends Fragment implements MediaEngineObserver {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ private Button btStartStopCall;
+ private TextView tvStats;
+
+ // Remote and local stream displays.
+ private LinearLayout llRemoteSurface;
+ private LinearLayout llLocalSurface;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.mainmenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ llRemoteSurface = (LinearLayout) v.findViewById(R.id.llRemoteView);
+ llLocalSurface = (LinearLayout) v.findViewById(R.id.llLocalView);
+
+ Button btStats = (Button) v.findViewById(R.id.btStats);
+ boolean stats = getResources().getBoolean(R.bool.stats_enabled_default);
+ enableStats(btStats, stats);
+ btStats.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View button) {
+ boolean turnOnStats = ((Button) button).getText().equals(
+ getResources().getString(R.string.statsOn));
+ enableStats((Button) button, turnOnStats);
+ }
+ });
+ tvStats = (TextView) v.findViewById(R.id.tvStats);
+
+ Button btSwitchCamera = (Button) v.findViewById(R.id.btSwitchCamera);
+ if (getEngine().hasMultipleCameras()) {
+ btSwitchCamera.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View button) {
+ toggleCamera((Button) button);
+ }
+ });
+ } else {
+ btSwitchCamera.setEnabled(false);
+ }
+ btSwitchCamera.setText(getEngine().frontCameraIsSet() ?
+ R.string.backCamera :
+ R.string.frontCamera);
+
+ btStartStopCall = (Button) v.findViewById(R.id.btStartStopCall);
+ btStartStopCall.setText(getEngine().isRunning() ?
+ R.string.stopCall :
+ R.string.startCall);
+ btStartStopCall.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View button) {
+ toggleStart();
+ }
+ });
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ // tvStats need to be updated on the UI thread.
+ public void newStats(final String stats) {
+ getActivity().runOnUiThread(new Runnable() {
+ public void run() {
+ tvStats.setText(stats);
+ }
+ });
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+ private void setViews() {
+ SurfaceView remoteSurfaceView = getEngine().getRemoteSurfaceView();
+ if (remoteSurfaceView != null) {
+ llRemoteSurface.addView(remoteSurfaceView);
+ }
+ SurfaceView svLocal = getEngine().getLocalSurfaceView();
+ if (svLocal != null) {
+ llLocalSurface.addView(svLocal);
+ }
+ }
+
+ private void clearViews() {
+ SurfaceView remoteSurfaceView = getEngine().getRemoteSurfaceView();
+ if (remoteSurfaceView != null) {
+ llRemoteSurface.removeView(remoteSurfaceView);
+ }
+ SurfaceView svLocal = getEngine().getLocalSurfaceView();
+ if (svLocal != null) {
+ llLocalSurface.removeView(svLocal);
+ }
+ }
+
+ private void enableStats(Button btStats, boolean enable) {
+ if (enable) {
+ getEngine().setObserver(this);
+ } else {
+ getEngine().setObserver(null);
+ // Clear old stats text by posting empty stats.
+ newStats("");
+ }
+ // If stats was true it was just turned on. This means that
+ // clicking the button again should turn off stats.
+ btStats.setText(enable ? R.string.statsOff : R.string.statsOn);
+ }
+
+ private void toggleCamera(Button btSwitchCamera) {
+ SurfaceView svLocal = getEngine().getLocalSurfaceView();
+ boolean resetLocalView = svLocal != null;
+ if (resetLocalView) {
+ llLocalSurface.removeView(svLocal);
+ }
+ getEngine().toggleCamera();
+ if (resetLocalView) {
+ svLocal = getEngine().getLocalSurfaceView();
+ llLocalSurface.addView(svLocal);
+ }
+ btSwitchCamera.setText(getEngine().frontCameraIsSet() ?
+ R.string.backCamera :
+ R.string.frontCamera);
+ }
+
+ public void toggleStart() {
+ if (getEngine().isRunning()) {
+ stopAll();
+ } else {
+ startCall();
+ }
+ btStartStopCall.setText(getEngine().isRunning() ?
+ R.string.stopCall :
+ R.string.startCall);
+ }
+
+ public void stopAll() {
+ clearViews();
+ getEngine().stop();
+ }
+
+ private void startCall() {
+ getEngine().start();
+ setViews();
+ }
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaCodecVideoDecoder.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaCodecVideoDecoder.java
new file mode 100644
index 000000000..ba811d0d2
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaCodecVideoDecoder.java
@@ -0,0 +1,338 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.AlertDialog;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceView;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.LinkedList;
+
+class MediaCodecVideoDecoder {
+ public static final int DECODE = 0;
+ private enum CodecName { ON2_VP8, GOOGLE_VPX, EXYNOX_VP8 }
+
+ private void check(boolean value, String message) {
+ if (value) {
+ return;
+ }
+ Log.e("WEBRTC-CHECK", message);
+ AlertDialog alertDialog = new AlertDialog.Builder(context).create();
+ alertDialog.setTitle("WebRTC Error");
+ alertDialog.setMessage(message);
+ alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
+ "OK",
+ new DialogInterface.OnClickListener() {
+ public void onClick(DialogInterface dialog, int which) {
+ return;
+ }
+ }
+ );
+ alertDialog.show();
+ }
+
+ class Frame {
+ public ByteBuffer buffer;
+ public long timestampUs;
+
+ Frame(ByteBuffer buffer, long timestampUs) {
+ this.buffer = buffer;
+ this.timestampUs = timestampUs;
+ }
+ }
+
+ // This class enables decoding being run on a separate thread.
+ class DecodeHandler extends Handler {
+ @Override
+ public void handleMessage(Message msg) {
+ // TODO(dwkang): figure out exceptions just make this thread finish.
+ try {
+ switch (msg.what) {
+ case DECODE:
+ decodePendingBuffers();
+ long delayMillis = 5; // Don't busy wait.
+ handler.sendMessageDelayed(
+ handler.obtainMessage(DECODE), delayMillis);
+ break;
+ default:
+ break;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private static String TAG;
+ private Context context;
+ private SurfaceView surfaceView;
+
+ private DecodeHandler handler;
+ private Thread looperThread;
+
+ MediaCodec codec;
+ MediaFormat format;
+
+ // Buffers supplied by MediaCodec for pushing encoded data to and pulling
+ // decoded data from.
+ private ByteBuffer[] codecInputBuffers;
+ private ByteBuffer[] codecOutputBuffers;
+
+ // Frames from the native layer.
+ private LinkedList frameQueue;
+ // Indexes to MediaCodec buffers
+ private LinkedList availableInputBufferIndices;
+ private LinkedList availableOutputBufferIndices;
+ private LinkedList availableOutputBufferInfos;
+
+ // Offset between system time and media time.
+ private long deltaTimeUs;
+
+ public MediaCodecVideoDecoder(Context context) {
+ TAG = context.getString(R.string.tag);
+ this.context = context;
+ surfaceView = new SurfaceView(context);
+ frameQueue = new LinkedList ();
+ availableInputBufferIndices = new LinkedList();
+ availableOutputBufferIndices = new LinkedList();
+ availableOutputBufferInfos = new LinkedList();
+ }
+
+ public void dispose() {
+ codec.stop();
+ codec.release();
+ }
+
+ // Return view that is written to by MediaCodec.
+ public SurfaceView getView() { return surfaceView; }
+
+ // Entry point from the native layer. Called when the class should be ready
+ // to start receiving raw frames.
+ private boolean start(int width, int height) {
+ deltaTimeUs = -1;
+ if (!setCodecState(width, height, CodecName.ON2_VP8)) {
+ return false;
+ }
+ startLooperThread();
+ // The decoding must happen on |looperThread| thread.
+ handler.sendMessage(handler.obtainMessage(DECODE));
+ return true;
+ }
+
+ private boolean setCodecState(int width, int height, CodecName codecName) {
+ // TODO(henrike): enable more than ON2_VP8 codec.
+ format = new MediaFormat();
+ format.setInteger(MediaFormat.KEY_WIDTH, width);
+ format.setInteger(MediaFormat.KEY_HEIGHT, height);
+ try {
+ switch (codecName) {
+ case ON2_VP8:
+ format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
+ codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
+ break;
+ case GOOGLE_VPX:
+ // SW VP8 decoder
+ codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
+ break;
+ case EXYNOX_VP8:
+ // Nexus10 HW VP8 decoder
+ codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
+ break;
+ default:
+ return false;
+ }
+ } catch (Exception e) {
+ // TODO(dwkang): replace this instanceof/throw with a narrower catch
+ // clause once the SDK advances.
+ if (e instanceof IOException) {
+ Log.e(TAG, "Failed to create MediaCodec for VP8.", e);
+ return false;
+ }
+ throw new RuntimeException(e);
+ }
+ Surface surface = surfaceView.getHolder().getSurface();
+ MediaCrypto crypto = null; // No crypto.
+ int flags = 0; // Decoder (1 for encoder)
+ codec.configure(format, surface, crypto, flags);
+ codec.start();
+ codecInputBuffers = codec.getInputBuffers();
+ codecOutputBuffers = codec.getOutputBuffers();
+ return true;
+ }
+
+ private void startLooperThread() {
+ looperThread = new Thread() {
+ @Override
+ public void run() {
+ Looper.prepare();
+ // Handler that is run by this thread.
+ handler = new DecodeHandler();
+ // Notify that the thread has created a handler.
+ synchronized(MediaCodecVideoDecoder.this) {
+ MediaCodecVideoDecoder.this.notify();
+ }
+ Looper.loop();
+ }
+ };
+ looperThread.start();
+ // Wait for thread to notify that Handler has been set up.
+ synchronized(this) {
+ try {
+ wait();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ // Entry point from the native layer. It pushes the raw buffer to this class.
+ private void pushBuffer(ByteBuffer buffer, long renderTimeMs) {
+ // TODO(dwkang): figure out why exceptions just make this thread finish.
+ try {
+ final long renderTimeUs = renderTimeMs * 1000;
+ synchronized(frameQueue) {
+ frameQueue.add(new Frame(buffer, renderTimeUs));
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private boolean hasFrame() {
+ synchronized(frameQueue) {
+ return !frameQueue.isEmpty();
+ }
+ }
+
+ private Frame dequeueFrame() {
+ synchronized(frameQueue) {
+ return frameQueue.removeFirst();
+ }
+ }
+
+ private void flush() {
+ availableInputBufferIndices.clear();
+ availableOutputBufferIndices.clear();
+ availableOutputBufferInfos.clear();
+
+ codec.flush();
+ }
+
+ // Media time is relative to previous frame.
+ private long mediaTimeToSystemTime(long mediaTimeUs) {
+ if (deltaTimeUs == -1) {
+ long nowUs = System.currentTimeMillis() * 1000;
+ deltaTimeUs = nowUs - mediaTimeUs;
+ }
+ return deltaTimeUs + mediaTimeUs;
+ }
+
+ private void decodePendingBuffers() {
+ int timeoutUs = 0; // Don't block on dequeuing input buffer.
+
+ int index = codec.dequeueInputBuffer(timeoutUs);
+ if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
+ availableInputBufferIndices.add(index);
+ }
+ while (feedInputBuffer()) {}
+
+ MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
+ index = codec.dequeueOutputBuffer(info, timeoutUs);
+ if (index > 0) {
+ availableOutputBufferIndices.add(index);
+ availableOutputBufferInfos.add(info);
+ }
+ if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ codecOutputBuffers = codec.getOutputBuffers();
+ }
+
+ while (drainOutputBuffer()) {}
+ }
+
+ // Returns true if MediaCodec is ready for more data and there was data
+ // available from the native layer.
+ private boolean feedInputBuffer() {
+ if (availableInputBufferIndices.isEmpty()) {
+ return false;
+ }
+ if (!hasFrame()) {
+ return false;
+ }
+ Frame frame = dequeueFrame();
+ ByteBuffer buffer = frame.buffer;
+
+ int index = availableInputBufferIndices.pollFirst();
+ ByteBuffer codecData = codecInputBuffers[index];
+ check(codecData.capacity() >= buffer.capacity(),
+ "Buffer is too small to copy a frame.");
+ buffer.rewind();
+ codecData.rewind();
+ codecData.put(buffer);
+
+ try {
+ int offset = 0;
+ int flags = 0;
+ codec.queueInputBuffer(index, offset, buffer.capacity(),
+ frame.timestampUs, flags);
+ } catch (MediaCodec.CryptoException e) {
+ check(false, "CryptoException w/ errorCode " + e.getErrorCode() +
+ ", '" + e.getMessage() + "'");
+ }
+ return true;
+ }
+
+ // Returns true if more output data could be drained.MediaCodec has more data
+ // to deliver.
+ private boolean drainOutputBuffer() {
+ if (availableOutputBufferIndices.isEmpty()) {
+ return false;
+ }
+
+ int index = availableOutputBufferIndices.peekFirst();
+ MediaCodec.BufferInfo info = availableOutputBufferInfos.peekFirst();
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ // End of stream is unexpected with streamed video.
+ check(false, "Saw output end of stream.");
+ return false;
+ }
+ long realTimeUs = mediaTimeToSystemTime(info.presentationTimeUs);
+ long nowUs = System.currentTimeMillis() * 1000;
+ long lateUs = nowUs - realTimeUs;
+ if (lateUs < -10000) {
+ // Frame should not be presented yet.
+ return false;
+ }
+
+ // TODO(dwkang): For some extreme cases, just not doing rendering is not
+ // enough. Need to seek to the next key frame.
+ boolean render = lateUs <= 30000;
+ if (!render) {
+ Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
+ }
+ // Decode and render to surface if desired.
+ codec.releaseOutputBuffer(index, render);
+ availableOutputBufferIndices.removeFirst();
+ availableOutputBufferInfos.removeFirst();
+ return true;
+ }
+}
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
new file mode 100644
index 000000000..1808a3a94
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngine.java
@@ -0,0 +1,739 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import org.webrtc.videoengine.ViERenderer;
+
+import android.app.AlertDialog;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.hardware.SensorManager;
+import android.os.Environment;
+import android.util.Log;
+import android.view.OrientationEventListener;
+import android.view.SurfaceView;
+import java.io.File;
+
+public class MediaEngine implements VideoDecodeEncodeObserver {
+ // TODO(henrike): Most of these should be moved to xml (since static).
+ private static final int VCM_VP8_PAYLOAD_TYPE = 100;
+ private static final int SEND_CODEC_FPS = 30;
+ // TODO(henrike): increase INIT_BITRATE_KBPS to 2000 and ensure that
+ // 720p30fps can be acheived (on hardware that can handle it). Note that
+ // setting 2000 currently leads to failure, so that has to be resolved first.
+ private static final int INIT_BITRATE_KBPS = 500;
+ private static final int MAX_BITRATE_KBPS = 3000;
+ private static final String LOG_DIR = "webrtc";
+ private static final int WIDTH_IDX = 0;
+ private static final int HEIGHT_IDX = 1;
+ private static final int[][] RESOLUTIONS = {
+ {176,144}, {320,240}, {352,288}, {640,480}, {1280,720}
+ };
+ // Arbitrary choice of 4/5 volume (204/256).
+ private static final int volumeLevel = 204;
+
+ public static int numberOfResolutions() { return RESOLUTIONS.length; }
+
+ public static String[] resolutionsAsString() {
+ String[] retVal = new String[numberOfResolutions()];
+ for (int i = 0; i < numberOfResolutions(); ++i) {
+ retVal[i] = RESOLUTIONS[i][0] + "x" + RESOLUTIONS[i][1];
+ }
+ return retVal;
+ }
+
+ // Checks for and communicate failures to user (logcat and popup).
+ private void check(boolean value, String message) {
+ if (value) {
+ return;
+ }
+ Log.e("WEBRTC-CHECK", message);
+ AlertDialog alertDialog = new AlertDialog.Builder(context).create();
+ alertDialog.setTitle("WebRTC Error");
+ alertDialog.setMessage(message);
+ alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
+ "OK",
+ new DialogInterface.OnClickListener() {
+ public void onClick(DialogInterface dialog, int which) {
+ dialog.dismiss();
+ return;
+ }
+ }
+ );
+ alertDialog.show();
+ }
+
+ // This class represent the cameras available on the device.
+ private class WebrtcCamera {
+ private final CameraInfo info;
+
+ WebrtcCamera(CameraInfo info) {
+ this.info = info;
+ }
+
+ // Converts device rotation to camera rotation. Rotation depends on if the
+ // camera is back facing and rotate with the device or front facing and
+ // rotating in the opposite direction of the device.
+ public int rotationFromRealWorldUp(int deviceRotation) {
+ int coarseDeviceOrientation = roundRotation(deviceRotation);
+ if (frontFacing()) {
+ // The front camera rotates in the opposite direction of the
+ // device.
+ int inverseDeviceOrientation = 360 - coarseDeviceOrientation;
+ return (inverseDeviceOrientation + orientation()) % 360;
+ }
+ return (coarseDeviceOrientation + orientation()) % 360;
+ }
+
+ // Rounds rotation to the nearest 90 degree rotation.
+ private int roundRotation(int rotation) {
+ return (int)(Math.round((double)rotation / 90) * 90) % 360;
+ }
+
+ public boolean frontFacing() {
+ return info.facing == CameraInfo.CAMERA_FACING_FRONT;
+ }
+
+ // Rotation of camera with respect to device up.
+ private int orientation() {
+ return info.orientation;
+ }
+ }
+
+ // Shared Audio/Video members.
+ private final Context context;
+ private String remoteIp;
+ private boolean enableTrace;
+
+ // Audio
+ private VoiceEngine voe;
+ private int audioChannel;
+ private boolean audioEnabled;
+ private boolean voeRunning;
+ private int audioCodecIndex;
+ private int audioTxPort;
+ private int audioRxPort;
+
+ private boolean speakerEnabled;
+ private boolean headsetPluggedIn;
+ private boolean enableAgc;
+ private boolean enableNs;
+ private boolean enableAecm;
+
+ private BroadcastReceiver headsetListener;
+
+ private boolean audioRtpDump;
+ private boolean apmRecord;
+
+ // Video
+ private VideoEngine vie;
+ private int videoChannel;
+ private boolean receiveVideo;
+ private boolean sendVideo;
+ private boolean vieRunning;
+ private int videoCodecIndex;
+ private int resolutionIndex;
+ private int videoTxPort;
+ private int videoRxPort;
+
+ private WebrtcCamera cameras[];
+ private boolean useFrontCamera;
+ private int currentCameraHandle;
+ private boolean enableNack;
+ // openGl, surfaceView or mediaCodec (integers.xml)
+ private int viewSelection;
+ private boolean videoRtpDump;
+
+ private SurfaceView svLocal;
+ private SurfaceView svRemote;
+ MediaCodecVideoDecoder externalCodec;
+
+ private int inFps;
+ private int inKbps;
+ private int outFps;
+ private int outKbps;
+ private int inWidth;
+ private int inHeight;
+
+ private OrientationEventListener orientationListener;
+ private int deviceOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
+
+ public MediaEngine(Context context) {
+ this.context = context;
+ voe = new VoiceEngine();
+ check(voe.init() == 0, "Failed voe Init");
+ audioChannel = voe.createChannel();
+ check(audioChannel >= 0, "Failed voe CreateChannel");
+ vie = new VideoEngine();
+ check(vie.init() == 0, "Failed voe Init");
+ check(vie.setVoiceEngine(voe) == 0, "Failed setVoiceEngine");
+ videoChannel = vie.createChannel();
+ check(audioChannel >= 0, "Failed voe CreateChannel");
+ check(vie.connectAudioChannel(videoChannel, audioChannel) == 0,
+ "Failed ConnectAudioChannel");
+
+ cameras = new WebrtcCamera[Camera.getNumberOfCameras()];
+ CameraInfo info = new CameraInfo();
+ for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ Camera.getCameraInfo(i, info);
+ cameras[info.facing] = new WebrtcCamera(info);
+ }
+ setDefaultCamera();
+ check(voe.setSpeakerVolume(volumeLevel) == 0,
+ "Failed setSpeakerVolume");
+ check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
+ "VoE set Aecm speakerphone mode failed");
+ check(vie.setKeyFrameRequestMethod(videoChannel,
+ VideoEngine.VieKeyFrameRequestMethod.
+ KEY_FRAME_REQUEST_PLI_RTCP) == 0,
+ "Failed setKeyFrameRequestMethod");
+ check(vie.registerObserver(videoChannel, this) == 0,
+ "Failed registerObserver");
+
+ // TODO(hellner): SENSOR_DELAY_NORMAL?
+ // Listen to changes in device orientation.
+ orientationListener =
+ new OrientationEventListener(context, SensorManager.SENSOR_DELAY_UI) {
+ public void onOrientationChanged (int orientation) {
+ deviceOrientation = orientation;
+ compensateRotation();
+ }
+ };
+ orientationListener.enable();
+ // Listen to headset being plugged in/out.
+ IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
+ headsetListener = new BroadcastReceiver() {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
+ headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
+ updateAudioOutput();
+ }
+ }
+ };
+ context.registerReceiver(headsetListener, receiverFilter);
+ }
+
+ public void dispose() {
+ check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
+ context.unregisterReceiver(headsetListener);
+ orientationListener.disable();
+ check(vie.deregisterObserver(videoChannel) == 0,
+ "Failed deregisterObserver");
+ if (externalCodec != null) {
+ check(vie.deRegisterExternalReceiveCodec(videoChannel,
+ VCM_VP8_PAYLOAD_TYPE) == 0,
+ "Failed to deregister external decoder");
+ externalCodec = null;
+ }
+ check(vie.deleteChannel(videoChannel) == 0, "DeleteChannel");
+ vie.dispose();
+ check(voe.deleteChannel(audioChannel) == 0, "VoE delete channel failed");
+ voe.dispose();
+ }
+
+ public void start() {
+ if (audioEnabled) {
+ startVoE();
+ }
+ if (receiveVideo || sendVideo) {
+ startViE();
+ }
+ }
+
+ public void stop() {
+ stopVoe();
+ stopVie();
+ }
+
+ public boolean isRunning() {
+ return voeRunning || vieRunning;
+ }
+
+ public void setRemoteIp(String remoteIp) { this.remoteIp = remoteIp; }
+
+ public String remoteIp() { return remoteIp; }
+
+ public void setTrace(boolean enable) {
+ if (enable) {
+ vie.setTraceFile("/sdcard/trace.txt", false);
+ vie.setTraceFilter(VideoEngine.TraceLevel.TRACE_ERROR);
+ return;
+ }
+ vie.setTraceFilter(VideoEngine.TraceLevel.TRACE_NONE);
+ }
+
+ private String getDebugDirectory() {
+ // Should create a folder in /scard/|LOG_DIR|
+ return Environment.getExternalStorageDirectory().toString() + "/" +
+ LOG_DIR;
+ }
+
+ private boolean createDebugDirectory() {
+ File webrtc_dir = new File(getDebugDirectory());
+ if (!webrtc_dir.exists()) {
+ return webrtc_dir.mkdir();
+ }
+ return webrtc_dir.isDirectory();
+ }
+
+ public void startVoE() {
+ check(!voeRunning, "VoE already started");
+ check(voe.startListen(audioChannel) == 0, "Failed StartListen");
+ check(voe.startPlayout(audioChannel) == 0, "VoE start playout failed");
+ check(voe.startSend(audioChannel) == 0, "VoE start send failed");
+ voeRunning = true;
+ }
+
+ private void stopVoe() {
+ check(voeRunning, "VoE not started");
+ check(voe.stopSend(audioChannel) == 0, "VoE stop send failed");
+ check(voe.stopPlayout(audioChannel) == 0, "VoE stop playout failed");
+ check(voe.stopListen(audioChannel) == 0, "VoE stop listen failed");
+ voeRunning = false;
+ }
+
+ public void setAudio(boolean audioEnabled) {
+ this.audioEnabled = audioEnabled;
+ }
+
+ public boolean audioEnabled() { return audioEnabled; }
+
+ public int audioCodecIndex() { return audioCodecIndex; }
+
+ public void setAudioCodec(int codecNumber) {
+ audioCodecIndex = codecNumber;
+ CodecInst codec = voe.getCodec(codecNumber);
+ check(voe.setSendCodec(audioChannel, codec) == 0, "Failed setSendCodec");
+ codec.dispose();
+ }
+
+ public String[] audioCodecsAsString() {
+ String[] retVal = new String[voe.numOfCodecs()];
+ for (int i = 0; i < voe.numOfCodecs(); ++i) {
+ CodecInst codec = voe.getCodec(i);
+ retVal[i] = codec.toString();
+ codec.dispose();
+ }
+ return retVal;
+ }
+
+ private CodecInst[] defaultAudioCodecs() {
+ CodecInst[] retVal = new CodecInst[voe.numOfCodecs()];
+ for (int i = 0; i < voe.numOfCodecs(); ++i) {
+ retVal[i] = voe.getCodec(i);
+ }
+ return retVal;
+ }
+
+ public int getIsacIndex() {
+ CodecInst[] codecs = defaultAudioCodecs();
+ for (int i = 0; i < codecs.length; ++i) {
+ if (codecs[i].name().contains("ISAC")) {
+ return i;
+ }
+ }
+ return 0;
+ }
+
+ public void setAudioTxPort(int audioTxPort) {
+ this.audioTxPort = audioTxPort;
+ check(remoteIp != null,
+ "remoteIP must have been set before setting audio send port");
+ check(voe.setSendDestination(audioChannel, audioTxPort,
+ remoteIp) == 0, "VoE set send destination failed");
+ }
+
+ public int audioTxPort() { return audioTxPort; }
+
+ public void setAudioRxPort(int audioRxPort) {
+ check(voe.setLocalReceiver(audioChannel, audioRxPort) == 0,
+ "Failed setLocalReceiver");
+ this.audioRxPort = audioRxPort;
+ }
+
+ public int audioRxPort() { return audioRxPort; }
+
+ public boolean agcEnabled() { return enableAgc; }
+
+ public void setAgc(boolean enable) {
+ enableAgc = enable;
+ VoiceEngine.AgcConfig agc_config =
+ new VoiceEngine.AgcConfig(3, 9, true);
+ check(voe.setAgcConfig(agc_config) == 0, "VoE set AGC Config failed");
+ check(voe.setAgcStatus(enableAgc, VoiceEngine.AgcModes.FIXED_DIGITAL) == 0,
+ "VoE set AGC Status failed");
+ }
+
+ public boolean nsEnabled() { return enableNs; }
+
+ public void setNs(boolean enable) {
+ enableNs = enable;
+ check(voe.setNsStatus(enableNs,
+ VoiceEngine.NsModes.MODERATE_SUPPRESSION) == 0,
+ "VoE set NS Status failed");
+ }
+
+ public boolean aecmEnabled() { return enableAecm; }
+
+ public void setEc(boolean enable) {
+ enableAecm = enable;
+ check(voe.setEcStatus(enable, VoiceEngine.EcModes.AECM) == 0,
+ "voe setEcStatus");
+ }
+
+ public boolean speakerEnabled() {
+ return speakerEnabled;
+ }
+
+ public void setSpeaker(boolean enable) {
+ speakerEnabled = enable;
+ updateAudioOutput();
+ }
+
+ // Debug helpers.
+ public boolean apmRecord() { return apmRecord; }
+
+ public boolean audioRtpDump() { return audioRtpDump; }
+
+ public void setDebuging(boolean enable) {
+ apmRecord = enable;
+ if (!enable) {
+ check(voe.stopDebugRecording() == 0, "Failed stopping debug");
+ return;
+ }
+ if (!createDebugDirectory()) {
+ check(false, "Unable to create debug directory.");
+ return;
+ }
+ String debugDirectory = getDebugDirectory();
+ check(voe.startDebugRecording(debugDirectory + String.format("/apm_%d.dat",
+ System.currentTimeMillis())) == 0,
+ "Failed starting debug");
+ }
+
+ public void setIncomingVoeRtpDump(boolean enable) {
+ audioRtpDump = enable;
+ if (!enable) {
+ check(voe.stopRtpDump(videoChannel,
+ VoiceEngine.RtpDirections.INCOMING) == 0,
+ "voe stopping rtp dump");
+ return;
+ }
+ String debugDirectory = getDebugDirectory();
+ check(voe.startRtpDump(videoChannel, debugDirectory +
+ String.format("/voe_%d.rtp", System.currentTimeMillis()),
+ VoiceEngine.RtpDirections.INCOMING) == 0,
+ "voe starting rtp dump");
+ }
+
+ private void updateAudioOutput() {
+ boolean useSpeaker = !headsetPluggedIn && speakerEnabled;
+ check(voe.setLoudspeakerStatus(useSpeaker) == 0,
+ "Failed updating loudspeaker");
+ }
+
+ public void startViE() {
+ check(!vieRunning, "ViE already started");
+
+ if (receiveVideo) {
+ if (viewSelection ==
+ context.getResources().getInteger(R.integer.openGl)) {
+ svRemote = ViERenderer.CreateRenderer(context, true);
+ } else if (viewSelection ==
+ context.getResources().getInteger(R.integer.surfaceView)) {
+ svRemote = ViERenderer.CreateRenderer(context, false);
+ } else {
+ externalCodec = new MediaCodecVideoDecoder(context);
+ svRemote = externalCodec.getView();
+ }
+ if (externalCodec != null) {
+ check(vie.registerExternalReceiveCodec(videoChannel,
+ VCM_VP8_PAYLOAD_TYPE, externalCodec, true) == 0,
+ "Failed to register external decoder");
+ } else {
+ check(vie.addRenderer(videoChannel, svRemote,
+ 0, 0, 0, 1, 1) == 0, "Failed AddRenderer");
+ check(vie.startRender(videoChannel) == 0, "Failed StartRender");
+ }
+ check(vie.startReceive(videoChannel) == 0, "Failed StartReceive");
+ }
+ if (sendVideo) {
+ startCamera();
+ check(vie.startSend(videoChannel) == 0, "Failed StartSend");
+ }
+ vieRunning = true;
+ }
+
+ private void stopVie() {
+ if (!vieRunning) {
+ return;
+ }
+ check(vie.stopSend(videoChannel) == 0, "StopSend");
+ stopCamera();
+ check(vie.stopReceive(videoChannel) == 0, "StopReceive");
+ if (externalCodec != null) {
+ check(vie.deRegisterExternalReceiveCodec(videoChannel,
+ VCM_VP8_PAYLOAD_TYPE) == 0,
+ "Failed to deregister external decoder");
+ externalCodec.dispose();
+ externalCodec = null;
+ } else {
+ check(vie.stopRender(videoChannel) == 0, "StopRender");
+ check(vie.removeRenderer(videoChannel) == 0, "RemoveRenderer");
+ }
+ svRemote = null;
+ vieRunning = false;
+ }
+
+ public void setReceiveVideo(boolean receiveVideo) {
+ this.receiveVideo = receiveVideo;
+ }
+
+ public boolean receiveVideo() { return receiveVideo; }
+
+ public void setSendVideo(boolean sendVideo) { this.sendVideo = sendVideo; }
+
+ public boolean sendVideo() { return sendVideo; }
+
+ public int videoCodecIndex() { return videoCodecIndex; }
+
+ public void setVideoCodec(int codecNumber) {
+ videoCodecIndex = codecNumber;
+ updateVideoCodec();
+ }
+
+ public String[] videoCodecsAsString() {
+ String[] retVal = new String[vie.numberOfCodecs()];
+ for (int i = 0; i < vie.numberOfCodecs(); ++i) {
+ VideoCodecInst codec = vie.getCodec(i);
+ retVal[i] = codec.toString();
+ codec.dispose();
+ }
+ return retVal;
+ }
+
+ public int resolutionIndex() { return resolutionIndex; }
+
+ public void setResolutionIndex(int resolution) {
+ resolutionIndex = resolution;
+ updateVideoCodec();
+ }
+
+ private void updateVideoCodec() {
+ VideoCodecInst codec = getVideoCodec(videoCodecIndex, resolutionIndex);
+ check(vie.setSendCodec(videoChannel, codec) == 0, "Failed setReceiveCodec");
+ codec.dispose();
+ }
+
+ private VideoCodecInst getVideoCodec(int codecNumber, int resolution) {
+ VideoCodecInst retVal = vie.getCodec(codecNumber);
+ retVal.setStartBitRate(INIT_BITRATE_KBPS);
+ retVal.setMaxBitRate(MAX_BITRATE_KBPS);
+ retVal.setWidth(RESOLUTIONS[resolution][WIDTH_IDX]);
+ retVal.setHeight(RESOLUTIONS[resolution][HEIGHT_IDX]);
+ retVal.setMaxFrameRate(SEND_CODEC_FPS);
+ return retVal;
+ }
+
+ public void setVideoRxPort(int videoRxPort) {
+ this.videoRxPort = videoRxPort;
+ check(vie.setLocalReceiver(videoChannel, videoRxPort) == 0,
+ "Failed setLocalReceiver");
+ }
+
+ public int videoRxPort() { return videoRxPort; }
+
+ public void setVideoTxPort(int videoTxPort) {
+ this.videoTxPort = videoTxPort;
+ check(remoteIp != null,
+ "remoteIP must have been set before setting audio send port");
+ check(vie.setSendDestination(videoChannel, videoTxPort, remoteIp) == 0,
+ "Failed setSendDestination");
+ }
+
+ public int videoTxPort() {
+ return videoTxPort;
+ }
+
+ public boolean hasMultipleCameras() {
+ return cameras.length > 1;
+ }
+
+ public boolean frontCameraIsSet() {
+ return useFrontCamera;
+ }
+
+ // Set camera to front if one exists otherwise use back camera.
+ private void setDefaultCamera() {
+ useFrontCamera = hasFrontCamera();
+ }
+
+ public void toggleCamera() {
+ if (vieRunning) {
+ stopCamera();
+ }
+ useFrontCamera = !useFrontCamera;
+ if (vieRunning) {
+ startCamera();
+ }
+ }
+
+ private void startCamera() {
+ CameraDesc cameraInfo = vie.getCaptureDevice(getCameraId());
+ currentCameraHandle = vie.allocateCaptureDevice(cameraInfo);
+ cameraInfo.dispose();
+ check(vie.connectCaptureDevice(currentCameraHandle, videoChannel) == 0,
+ "Failed to connect capture device");
+ // Camera and preview surface. Note, renderer must be created before
+ // calling StartCapture or |svLocal| won't be able to render.
+ svLocal = ViERenderer.CreateLocalRenderer(context);
+ check(vie.startCapture(currentCameraHandle) == 0, "Failed StartCapture");
+ compensateRotation();
+ }
+
+ private void stopCamera() {
+ check(vie.stopCapture(currentCameraHandle) == 0, "Failed StopCapture");
+ svLocal = null;
+ check(vie.releaseCaptureDevice(currentCameraHandle) == 0,
+ "Failed ReleaseCaptureDevice");
+ }
+
+ private boolean hasFrontCamera() {
+ for (int i = 0; i < cameras.length; ++i) {
+ if (cameras[i].frontFacing()) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public SurfaceView getRemoteSurfaceView() {
+ return svRemote;
+ }
+
+ public SurfaceView getLocalSurfaceView() {
+ return svLocal;
+ }
+
+ public void setViewSelection(int viewSelection) {
+ this.viewSelection = viewSelection;
+ }
+
+ public int viewSelection() { return viewSelection; }
+
+ public boolean nackEnabled() { return enableNack; }
+
+ public void setNack(boolean enable) {
+ enableNack = enable;
+ check(vie.setNackStatus(videoChannel, enableNack) == 0,
+ "Failed setNackStatus");
+ }
+
+ // Collates current state into a multiline string.
+ public String sendReceiveState() {
+ int packetLoss = 0;
+ if (vieRunning) {
+ RtcpStatistics stats = vie.getReceivedRtcpStatistics(videoChannel);
+ if (stats != null) {
+ // Calculate % lost from fraction lost.
+ // Definition of fraction lost can be found in RFC3550.
+ packetLoss = (stats.fractionLost * 100) >> 8;
+ }
+ }
+ String retVal =
+ "fps in/out: " + inFps + "/" + outFps + "\n" +
+ "kBps in/out: " + inKbps / 1024 + "/ " + outKbps / 1024 + "\n" +
+ "resolution: " + inWidth + "x" + inHeight + "\n" +
+ "loss: " + packetLoss + "%";
+ return retVal;
+ }
+
+ MediaEngineObserver observer;
+ public void setObserver(MediaEngineObserver observer) {
+ this.observer = observer;
+ }
+
+ // Callbacks from the VideoDecodeEncodeObserver interface.
+ public void incomingRate(int videoChannel, int framerate, int bitrate) {
+ inFps = framerate;
+ inKbps = bitrate;
+ newStats();
+ }
+
+ public void incomingCodecChanged(int videoChannel,
+ VideoCodecInst videoCodec) {
+ inWidth = videoCodec.width();
+ inHeight = videoCodec.height();
+ videoCodec.dispose();
+ newStats();
+ }
+
+ public void requestNewKeyFrame(int videoChannel) {}
+
+ public void outgoingRate(int videoChannel, int framerate, int bitrate) {
+ outFps = framerate;
+ outKbps = bitrate;
+ newStats();
+ }
+
+ private void newStats() {
+ if (observer != null) {
+ observer.newStats(sendReceiveState());
+ }
+ }
+
+ // Debug helpers.
+ public boolean videoRtpDump() { return videoRtpDump; }
+
+ public void setIncomingVieRtpDump(boolean enable) {
+ videoRtpDump = enable;
+ if (!enable) {
+ check(vie.stopRtpDump(videoChannel,
+ VideoEngine.RtpDirections.INCOMING) == 0,
+ "vie StopRTPDump");
+ return;
+ }
+ String debugDirectory = getDebugDirectory();
+ check(vie.startRtpDump(videoChannel, debugDirectory +
+ String.format("/vie_%d.rtp", System.currentTimeMillis()),
+ VideoEngine.RtpDirections.INCOMING) == 0,
+ "vie StartRtpDump");
+ }
+
+ private int getCameraId() {
+ return useFrontCamera ? Camera.CameraInfo.CAMERA_FACING_FRONT :
+ Camera.CameraInfo.CAMERA_FACING_BACK;
+ }
+
+ private void compensateRotation() {
+ if (svLocal == null) {
+ // Not rendering (or sending).
+ return;
+ }
+ if (deviceOrientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
+ return;
+ }
+ int cameraRotation =
+ cameras[getCameraId()].rotationFromRealWorldUp(
+ deviceOrientation);
+ // Egress streams should have real world up as up.
+ check(
+ vie.setRotateCapturedFrames(currentCameraHandle, cameraRotation) == 0,
+ "Failed setRotateCapturedFrames: camera " + currentCameraHandle +
+ "rotation " + cameraRotation);
+ }
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
new file mode 100644
index 000000000..3ea91b5e9
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MediaEngineObserver.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public interface MediaEngineObserver {
+ void newStats(String stats);
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
new file mode 100644
index 000000000..08cb50866
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/MenuStateProvider.java
@@ -0,0 +1,15 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public interface MenuStateProvider {
+ public MediaEngine getEngine();
+}
\ No newline at end of file
diff --git a/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/IViEAndroidCallback.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
similarity index 52%
rename from webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
rename to webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
index 53cfe4744..3d4f00a4f 100644
--- a/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/NativeWebRtcContextRegistry.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@@ -8,12 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-package org.webrtc.videoengineapp;
+package org.webrtc.webrtcdemo;
-public interface IViEAndroidCallback {
- public int updateStats(int frameRateI, int bitRateI,
- int packetLoss, int frameRateO,
- int bitRateO);
+import android.content.Context;
- public int newIncomingResolution(int width, int height);
-}
+public class NativeWebRtcContextRegistry {
+ static {
+ System.loadLibrary("webrtcdemo-jni");
+ }
+
+ public native void register(Context context);
+ public native void unRegister();
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
new file mode 100644
index 000000000..dbe817b1a
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/RtcpStatistics.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class RtcpStatistics {
+ // Definition of fraction lost can be found in RFC3550.
+ // It is equivalent to taking the integer part after multiplying the loss
+ // fraction by 256.
+ public final int fractionLost;
+ public final int cumulativeLost;
+ public final int extendedMax;
+ public final int jitter;
+ public final int rttMs;
+
+ // Only allowed to be created by the native layer.
+ private RtcpStatistics(int fractionLost, int cumulativeLost, int extendedMax,
+ int jitter, int rttMs) {
+ this.fractionLost = fractionLost;
+ this.cumulativeLost = cumulativeLost;
+ this.extendedMax = extendedMax;
+ this.jitter = jitter;
+ this.rttMs = rttMs;
+ }
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
new file mode 100644
index 000000000..f6cac96e6
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SettingsMenuFragment.java
@@ -0,0 +1,173 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.RadioGroup;
+import android.widget.TextView;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.Enumeration;
+
+public class SettingsMenuFragment extends Fragment
+ implements RadioGroup.OnCheckedChangeListener {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ EditText etRemoteIp;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.settingsmenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ CheckBox cbVideoReceive = (CheckBox) v.findViewById(R.id.cbVideoReceive);
+ cbVideoReceive.setChecked(getEngine().receiveVideo());
+ cbVideoReceive.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbVideoReceive = (CheckBox) checkBox;
+ getEngine().setReceiveVideo(cbVideoReceive.isChecked());
+ cbVideoReceive.setChecked(getEngine().receiveVideo());
+ }
+ });
+ CheckBox cbVideoSend = (CheckBox) v.findViewById(R.id.cbVideoSend);
+ cbVideoSend.setChecked(getEngine().sendVideo());
+ cbVideoSend.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbVideoSend = (CheckBox) checkBox;
+ getEngine().setSendVideo(cbVideoSend.isChecked());
+ cbVideoSend.setChecked(getEngine().sendVideo());
+ }
+ });
+ CheckBox cbAudio = (CheckBox) v.findViewById(R.id.cbAudio);
+ cbAudio.setChecked(getEngine().audioEnabled());
+ cbAudio.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbAudio = (CheckBox) checkBox;
+ getEngine().setAudio(cbAudio.isChecked());
+ cbAudio.setChecked(getEngine().audioEnabled());
+ }
+ });
+ boolean loopback =
+ getResources().getBoolean(R.bool.loopback_enabled_default);
+ CheckBox cbLoopback = (CheckBox) v.findViewById(R.id.cbLoopback);
+ cbLoopback.setChecked(loopback);
+ cbLoopback.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ loopbackChanged((CheckBox) checkBox);
+ }
+ });
+ etRemoteIp = (EditText) v.findViewById(R.id.etRemoteIp);
+ etRemoteIp.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ getEngine().setRemoteIp(etRemoteIp.getText().toString());
+ }
+ });
+ // Has to be after remote IP as loopback changes it.
+ loopbackChanged(cbLoopback);
+ RadioGroup rRenderMechanism =
+ (RadioGroup) v.findViewById(R.id.rRenderMechanism);
+ rRenderMechanism.clearCheck();
+ if (getEngine().viewSelection() ==
+ getResources().getInteger(R.integer.openGl)) {
+ rRenderMechanism.check(R.id.rOpenGl);
+ } else if (getEngine().viewSelection() ==
+ getResources().getInteger(R.integer.surfaceView)) {
+ rRenderMechanism.check(R.id.rSurfaceView);
+ } else {
+ rRenderMechanism.check(R.id.rMediaCodec);
+ }
+ rRenderMechanism.setOnCheckedChangeListener(this);
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ private void loopbackChanged(CheckBox cbLoopback) {
+ boolean loopback = cbLoopback.isChecked();
+ etRemoteIp.setText(loopback ? getLoopbackIPString() : getLocalIpAddress());
+ getEngine().setRemoteIp(etRemoteIp.getText().toString());
+ }
+
+ private String getLoopbackIPString() {
+ return getResources().getString(R.string.loopbackIp);
+ }
+
+ private String getLocalIpAddress() {
+ String localIp = "";
+ try {
+ for (Enumeration en = NetworkInterface
+ .getNetworkInterfaces(); en.hasMoreElements();) {
+ NetworkInterface intf = en.nextElement();
+ for (Enumeration enumIpAddr =
+ intf.getInetAddresses();
+ enumIpAddr.hasMoreElements(); ) {
+ InetAddress inetAddress = enumIpAddr.nextElement();
+ if (!inetAddress.isLoopbackAddress()) {
+ // Set the remote ip address the same as
+ // the local ip address of the last netif
+ localIp = inetAddress.getHostAddress().toString();
+ }
+ }
+ }
+ } catch (SocketException e) {
+ Log.e(TAG, "Unable to get local IP address. Not the end of the world", e);
+ }
+ return localIp;
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+
+ public void onCheckedChanged(RadioGroup group, int checkedId) {
+ switch (checkedId) {
+ case R.id.rOpenGl:
+ getEngine().setViewSelection(
+ getResources().getInteger(R.integer.openGl));
+ break;
+ case R.id.rSurfaceView:
+ getEngine().setViewSelection(
+ getResources().getInteger(R.integer.surfaceView));
+ break;
+ case R.id.rMediaCodec:
+ getEngine().setViewSelection(
+ getResources().getInteger(R.integer.mediaCodec));
+ break;
+ default:
+ break;
+ }
+ }
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
new file mode 100644
index 000000000..fb04a7aac
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/SpinnerAdapter.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.widget.ArrayAdapter;
+import android.content.Context;
+import android.widget.TextView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.LayoutInflater;
+
+public class SpinnerAdapter extends ArrayAdapter {
+ private String[] menuItems;
+ LayoutInflater inflater;
+ int textViewResourceId;
+
+ public SpinnerAdapter(Context context, int textViewResourceId,
+ String[] objects, LayoutInflater inflater) {
+ super(context, textViewResourceId, objects);
+ menuItems = objects;
+ this.inflater = inflater;
+ this.textViewResourceId = textViewResourceId;
+ }
+
+ @Override public View getDropDownView(int position, View convertView,
+ ViewGroup parent) {
+ return getCustomView(position, convertView, parent);
+ }
+
+ @Override public View getView(int position, View convertView,
+ ViewGroup parent) {
+ return getCustomView(position, convertView, parent);
+ }
+
+ private View getCustomView(int position, View v, ViewGroup parent) {
+ View row = inflater.inflate(textViewResourceId, parent, false);
+ TextView label = (TextView) row.findViewById(R.id.spinner_row);
+ label.setText(menuItems[position]);
+ return row;
+ }
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoCodecInst.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoCodecInst.java
new file mode 100644
index 000000000..452acfbf0
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoCodecInst.java
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class VideoCodecInst {
+ private final long nativeCodecInst;
+
+ // VideoCodecInst can only be created from the native layer.
+ private VideoCodecInst(long nativeCodecInst) {
+ this.nativeCodecInst = nativeCodecInst;
+ }
+
+ public String toString() {
+ return name() + " " +
+ "PlType: " + plType() + " " +
+ "Width: " + width() + " " +
+ "Height: " + height() + " " +
+ "StartBitRate: " + startBitRate() + " " +
+ "MaxFrameRate: " + maxFrameRate();
+ }
+
+ // Dispose must be called before all references to VideoCodecInst are lost as
+ // it will free memory allocated in the native layer.
+ public native void dispose();
+ public native int plType();
+ public native String name();
+ public native int width();
+ public native void setWidth(int width);
+ public native int height();
+ public native void setHeight(int height);
+ public native int startBitRate();
+ public native void setStartBitRate(int bitrate);
+ public native int maxBitRate();
+ public native void setMaxBitRate(int bitrate);
+ public native int maxFrameRate();
+ public native void setMaxFrameRate(int framerate);
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoDecodeEncodeObserver.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoDecodeEncodeObserver.java
new file mode 100644
index 000000000..b6408c7e6
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoDecodeEncodeObserver.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public interface VideoDecodeEncodeObserver {
+ void incomingRate(int videoChannel, int framerate, int bitrate);
+
+ // VideoCodecInst.dispose must be called for |videoCodec| before all
+ // references to it are lost as it will free memory allocated in the native
+ // layer.
+ void incomingCodecChanged(int videoChannel, VideoCodecInst videoCodec);
+
+ void requestNewKeyFrame(int videoChannel);
+
+ void outgoingRate(int videoChannel, int framerate, int bitrate);
+}
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java
new file mode 100644
index 000000000..885f88be2
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoEngine.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class VideoEngine {
+ private final long nativeVideoEngine;
+
+ // Keep in sync (including this comment) with webrtc/common_types.h:TraceLevel
+ public enum TraceLevel {
+ TRACE_NONE(0x0000),
+ TRACE_STATE_INFO(0x0001),
+ TRACE_WARNING(0x0002),
+ TRACE_ERROR(0x0004),
+ TRACE_CRITICAL(0x0008),
+ TRACE_API_CALL(0x0010),
+ TRACE_DEFAULT(0x00ff),
+ TRACE_MODULE_CALL(0x0020),
+ TRACE_MEMORY(0x0100),
+ TRACE_TIMER(0x0200),
+ TRACE_STREAM(0x0400),
+ TRACE_DEBUG(0x0800),
+ TRACE_INFO(0x1000),
+ TRACE_TERSE_INFO(0x2000),
+ TRACE_ALL(0xfff);
+
+ public final int level;
+ TraceLevel(int level) {
+ this.level = level;
+ }
+ };
+
+ // Keep in sync (including this comment) with
+ // webrtc/video_engine/include/vie_rtp_rtcp.h:ViEKeyFrameRequestMethod
+ public enum VieKeyFrameRequestMethod {
+ KEY_FRAME_REQUEST_NONE, KEY_FRAME_REQUEST_PLI_RTCP,
+ KEY_FRAME_REQUEST_FIR_RTP, KEY_FRAME_REQUEST_FIR_RTCP
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:RtpDirections
+ public enum RtpDirections { INCOMING, OUTGOING }
+
+ public VideoEngine() {
+ nativeVideoEngine = create();
+ }
+
+ // API comments can be found in VideoEngine's native APIs. Not all native
+ // APIs are available.
+ private static native long create();
+ public native int init();
+ public native int setVoiceEngine(VoiceEngine voe);
+ public native void dispose();
+ public native int startSend(int channel);
+ public native int stopRender(int channel);
+ public native int stopSend(int channel);
+ public native int startReceive(int channel);
+ public native int stopReceive(int channel);
+ public native int createChannel();
+ public native int deleteChannel(int channel);
+ public native int connectAudioChannel(int videoChannel, int voiceChannel);
+ public native int setLocalReceiver(int channel, int port);
+ public native int setSendDestination(int channel, int port, String ipAddr);
+ public native int numberOfCodecs();
+ public native VideoCodecInst getCodec(int index);
+ public native int setReceiveCodec(int channel, VideoCodecInst codec);
+ public native int setSendCodec(int channel, VideoCodecInst codec);
+ public native int addRenderer(int channel, Object glSurface, int zOrder,
+ float left, float top,
+ float right, float bottom);
+ public native int removeRenderer(int channel);
+ public native int registerExternalReceiveCodec(int channel, int plType,
+ MediaCodecVideoDecoder decoder, boolean internal_source);
+ public native int deRegisterExternalReceiveCodec(int channel, int plType);
+ public native int startRender(int channel);
+ public native int numberOfCaptureDevices();
+ public native CameraDesc getCaptureDevice(int index);
+ public native int allocateCaptureDevice(CameraDesc camera);
+ public native int connectCaptureDevice(int cameraId, int channel);
+ public native int startCapture(int cameraId);
+ public native int stopCapture(int cameraId);
+ public native int releaseCaptureDevice(int cameraId);
+ public native int getOrientation(CameraDesc camera);
+ public native int setRotateCapturedFrames(int cameraId, int degrees);
+ public native int setNackStatus(int channel, boolean enable);
+ public int setKeyFrameRequestMethod(int channel,
+ VieKeyFrameRequestMethod requestMethod) {
+ return setKeyFrameRequestMethod(channel, requestMethod.ordinal());
+ }
+ private native int setKeyFrameRequestMethod(int channel,
+ int requestMethod);
+ public native RtcpStatistics getReceivedRtcpStatistics(int channel);
+ public native int registerObserver(int channel,
+ VideoDecodeEncodeObserver callback);
+ public native int deregisterObserver(int channel);
+ public native int setTraceFile(String fileName,
+ boolean fileCounter);
+ public int setTraceFilter(TraceLevel filter) {
+ return filter.level;
+ }
+ private native int setTraceFilter(int filter);
+ public int startRtpDump(int channel, String file,
+ RtpDirections direction) {
+ return startRtpDump(channel, file, direction.ordinal());
+ }
+ private native int startRtpDump(int channel, String file,
+ int direction);
+ public int stopRtpDump(int channel, RtpDirections direction) {
+ return stopRtpDump(channel, direction.ordinal());
+ }
+ private native int stopRtpDump(int channel, int direction);
+}
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoMenuFragment.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoMenuFragment.java
new file mode 100644
index 000000000..1990be695
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VideoMenuFragment.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.Activity;
+import android.app.Fragment;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.CheckBox;
+import android.widget.EditText;
+import android.widget.Spinner;
+import android.widget.TextView;
+import java.lang.Integer;
+
+public class VideoMenuFragment extends Fragment {
+
+ private String TAG;
+ private MenuStateProvider stateProvider;
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.videomenu, container, false);
+
+ TAG = getResources().getString(R.string.tag);
+
+ String[] videoCodecsString = getEngine().videoCodecsAsString();
+ Spinner spCodecType = (Spinner) v.findViewById(R.id.spCodecType);
+ spCodecType.setAdapter(new SpinnerAdapter(getActivity(),
+ R.layout.dropdownitems,
+ videoCodecsString,
+ inflater));
+ spCodecType.setSelection(getEngine().videoCodecIndex());
+ spCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
+ public void onItemSelected(AdapterView> adapterView, View view,
+ int position, long id) {
+ getEngine().setVideoCodec(position);
+ }
+ public void onNothingSelected(AdapterView> arg0) {
+ Log.d(TAG, "No setting selected");
+ }
+ });
+ Spinner spCodecSize = (Spinner) v.findViewById(R.id.spCodecSize);
+ spCodecSize.setAdapter(new SpinnerAdapter(getActivity(),
+ R.layout.dropdownitems,
+ MediaEngine.resolutionsAsString(),
+ inflater));
+ // -2 means selecting the 2nd highest resolution. This maintains legacy
+ // behavior. Also higher resolutions lead to lower framerate at same
+ // bit rate.
+ // TODO(hellner): make configuration in the form [width]x[height] instead of
+ // an opaque index. Also configuration should happen in a res/values xml
+ // file rather than inline.
+ spCodecSize.setSelection(getEngine().resolutionIndex() - 2);
+ spCodecSize.setOnItemSelectedListener(new OnItemSelectedListener() {
+ public void onItemSelected(AdapterView> adapterView, View view,
+ int position, long id) {
+ getEngine().setResolutionIndex(position);
+ }
+ public void onNothingSelected(AdapterView> arg0) {
+ Log.d(TAG, "No setting selected");
+ }
+ });
+
+ EditText etVTxPort = (EditText) v.findViewById(R.id.etVTxPort);
+ etVTxPort.setText(Integer.toString(getEngine().videoTxPort()));
+ etVTxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etVTxPort = (EditText) editText;
+ getEngine()
+ .setVideoTxPort(Integer.parseInt(etVTxPort.getText().toString()));
+ }
+ });
+ EditText etVRxPort = (EditText) v.findViewById(R.id.etVRxPort);
+ etVRxPort.setText(Integer.toString(getEngine().videoRxPort()));
+ etVRxPort.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View editText) {
+ EditText etVRxPort = (EditText) editText;
+ getEngine()
+ .setVideoRxPort(Integer.parseInt(etVRxPort.getText().toString()));
+ }
+ });
+
+ CheckBox cbEnableNack = (CheckBox) v.findViewById(R.id.cbNack);
+ cbEnableNack.setChecked(getEngine().nackEnabled());
+ cbEnableNack.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableNack = (CheckBox) checkBox;
+ getEngine().setNack(cbEnableNack.isChecked());
+ }
+ });
+
+ CheckBox cbEnableVideoRTPDump =
+ (CheckBox) v.findViewById(R.id.cbVideoRTPDump);
+ cbEnableVideoRTPDump.setChecked(getEngine().videoRtpDump());
+ cbEnableVideoRTPDump.setOnClickListener(new View.OnClickListener() {
+ public void onClick(View checkBox) {
+ CheckBox cbEnableVideoRTPDump = (CheckBox) checkBox;
+ getEngine().setIncomingVieRtpDump(cbEnableVideoRTPDump.isChecked());
+ }
+ });
+ return v;
+ }
+
+ @Override
+ public void onAttach(Activity activity) {
+ super.onAttach(activity);
+
+ // This makes sure that the container activity has implemented
+ // the callback interface. If not, it throws an exception.
+ try {
+ stateProvider = (MenuStateProvider) activity;
+ } catch (ClassCastException e) {
+ throw new ClassCastException(activity +
+ " must implement MenuStateProvider");
+ }
+ }
+
+ private MediaEngine getEngine() {
+ return stateProvider.getEngine();
+ }
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
new file mode 100644
index 000000000..900355ad8
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/VoiceEngine.java
@@ -0,0 +1,117 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+public class VoiceEngine {
+ private final long nativeVoiceEngine;
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:NsModes
+ public enum NsModes {
+ UNCHANGED, DEFAULT, CONFERENCE, LOW_SUPPRESSION,
+ MODERATE_SUPPRESSION, HIGH_SUPPRESSION, VERY_HIGH_SUPPRESSION
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:AgcModes
+ public enum AgcModes {
+ UNCHANGED, DEFAULT, ADAPTIVE_ANALOG, ADAPTIVE_DIGITAL,
+ FIXED_DIGITAL
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:AecmModes
+ public enum AecmModes {
+ QUIET_EARPIECE_OR_HEADSET, EARPIECE, LOUD_EARPIECE,
+ SPEAKERPHONE, LOUD_SPEAKERPHONE
+ }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:EcModes
+ public enum EcModes { UNCHANGED, DEFAULT, CONFERENCE, AEC, AECM }
+
+ // Keep in sync (including this comment) with
+ // webrtc/common_types.h:RtpDirections
+ public enum RtpDirections { INCOMING, OUTGOING }
+
+ public static class AgcConfig {
+ AgcConfig(int targetLevelDbOv, int digitalCompressionGaindB,
+ boolean limiterEnable) {
+ this.targetLevelDbOv = targetLevelDbOv;
+ this.digitalCompressionGaindB = digitalCompressionGaindB;
+ this.limiterEnable = limiterEnable;
+ }
+ private final int targetLevelDbOv;
+ private final int digitalCompressionGaindB;
+ private final boolean limiterEnable;
+ }
+
+ public VoiceEngine() {
+ nativeVoiceEngine = create();
+ }
+ private static native long create();
+ public native int init();
+ public native void dispose();
+ public native int createChannel();
+ public native int deleteChannel(int channel);
+ public native int setLocalReceiver(int channel, int port);
+ public native int setSendDestination(int channel, int port, String ipaddr);
+ public native int startListen(int channel);
+ public native int startPlayout(int channel);
+ public native int startSend(int channel);
+ public native int stopListen(int channel);
+ public native int stopPlayout(int channel);
+ public native int stopSend(int channel);
+ public native int setSpeakerVolume(int volume);
+ public native int setLoudspeakerStatus(boolean enable);
+ public native int startPlayingFileLocally(
+ int channel,
+ String fileName,
+ boolean loop);
+ public native int stopPlayingFileLocally(int channel);
+ public native int startPlayingFileAsMicrophone(
+ int channel,
+ String fileName,
+ boolean loop);
+ public native int stopPlayingFileAsMicrophone(int channel);
+ public native int numOfCodecs();
+ public native CodecInst getCodec(int index);
+ public native int setSendCodec(int channel, CodecInst codec);
+ public int setEcStatus(boolean enable, EcModes mode) {
+ return setEcStatus(enable, mode.ordinal());
+ }
+ private native int setEcStatus(boolean enable, int ec_mode);
+ public int setAecmMode(AecmModes aecm_mode, boolean cng) {
+ return setAecmMode(aecm_mode.ordinal(), cng);
+ }
+ private native int setAecmMode(int aecm_mode, boolean cng);
+ public int setAgcStatus(boolean enable, AgcModes agc_mode) {
+ return setAgcStatus(enable, agc_mode.ordinal());
+ }
+ private native int setAgcStatus(boolean enable, int agc_mode);
+ public native int setAgcConfig(AgcConfig agc_config);
+ public int setNsStatus(boolean enable, NsModes ns_mode) {
+ return setNsStatus(enable, ns_mode.ordinal());
+ }
+ private native int setNsStatus(boolean enable, int ns_mode);
+ public native int startDebugRecording(String file);
+ public native int stopDebugRecording();
+ public int startRtpDump(int channel, String file,
+ RtpDirections direction) {
+ return startRtpDump(channel, file, direction.ordinal());
+ }
+ private native int startRtpDump(int channel, String file,
+ int direction);
+ public int stopRtpDump(int channel, RtpDirections direction) {
+ return stopRtpDump(channel, direction.ordinal());
+ }
+ private native int stopRtpDump(int channel, int direction);
+}
\ No newline at end of file
diff --git a/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
new file mode 100644
index 000000000..3badf18c7
--- /dev/null
+++ b/webrtc/examples/android/media_demo/src/org/webrtc/webrtcdemo/WebRTCDemo.java
@@ -0,0 +1,234 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.webrtcdemo;
+
+import android.app.ActionBar.Tab;
+import android.app.ActionBar.TabListener;
+import android.app.ActionBar;
+import android.app.Activity;
+import android.app.Fragment;
+import android.app.FragmentTransaction;
+import android.content.pm.ActivityInfo;
+import android.media.AudioManager;
+import android.os.Bundle;
+import android.os.Handler;
+import android.view.KeyEvent;
+import android.view.Menu;
+import android.view.MenuInflater;
+import android.view.MenuItem;
+import android.view.WindowManager;
+
+public class WebRTCDemo extends Activity implements MenuStateProvider {
+
+ // From http://developer.android.com/guide/topics/ui/actionbar.html
+ public static class TabListener
+ implements ActionBar.TabListener {
+ private Fragment fragment;
+ private final Activity activity;
+ private final String tag;
+ private final Class instance;
+ private final Bundle args;
+
+ public TabListener(Activity activity, String tag, Class clz) {
+ this(activity, tag, clz, null);
+ }
+
+ public TabListener(Activity activity, String tag, Class clz,
+ Bundle args) {
+ this.activity = activity;
+ this.tag = tag;
+ this.instance = clz;
+ this.args = args;
+ }
+
+ public void onTabSelected(Tab tab, FragmentTransaction ft) {
+ // Check if the fragment is already initialized
+ if (fragment == null) {
+ // If not, instantiate and add it to the activity
+ fragment = Fragment.instantiate(activity, instance.getName(), args);
+ ft.add(android.R.id.content, fragment, tag);
+ } else {
+ // If it exists, simply attach it in order to show it
+ ft.attach(fragment);
+ }
+ }
+
+ public void onTabUnselected(Tab tab, FragmentTransaction ft) {
+ if (fragment != null) {
+ // Detach the fragment, because another one is being attached
+ ft.detach(fragment);
+ }
+ }
+
+ public void onTabReselected(Tab tab, FragmentTransaction ft) {
+ // User selected the already selected tab. Do nothing.
+ }
+ }
+
+ private NativeWebRtcContextRegistry contextRegistry = null;
+ private MediaEngine mediaEngine = null;
+ private Handler handler;
+ public MediaEngine getEngine() { return mediaEngine; }
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // Global settings.
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+
+ // State.
+ // Must be instantiated before MediaEngine.
+ contextRegistry = new NativeWebRtcContextRegistry();
+ contextRegistry.register(this);
+
+ // Load all settings dictated in xml.
+ mediaEngine = new MediaEngine(this);
+ mediaEngine.setRemoteIp(getResources().getString(R.string.loopbackIp));
+ mediaEngine.setTrace(getResources().getBoolean(
+ R.bool.trace_enabled_default));
+
+ mediaEngine.setAudio(getResources().getBoolean(
+ R.bool.audio_enabled_default));
+ mediaEngine.setAudioCodec(mediaEngine.getIsacIndex());
+ mediaEngine.setAudioRxPort(getResources().getInteger(
+ R.integer.aRxPortDefault));
+ mediaEngine.setAudioTxPort(getResources().getInteger(
+ R.integer.aTxPortDefault));
+ mediaEngine.setSpeaker(getResources().getBoolean(
+ R.bool.speaker_enabled_default));
+ mediaEngine.setDebuging(getResources().getBoolean(
+ R.bool.apm_debug_enabled_default));
+
+ mediaEngine.setReceiveVideo(getResources().getBoolean(
+ R.bool.video_receive_enabled_default));
+ mediaEngine.setSendVideo(getResources().getBoolean(
+ R.bool.video_send_enabled_default));
+ mediaEngine.setVideoCodec(getResources().getInteger(
+ R.integer.video_codec_default));
+ // TODO(hellner): resolutions should probably be in the xml as well.
+ mediaEngine.setResolutionIndex(MediaEngine.numberOfResolutions() - 2);
+ mediaEngine.setVideoTxPort(getResources().getInteger(
+ R.integer.vTxPortDefault));
+ mediaEngine.setVideoRxPort(getResources().getInteger(
+ R.integer.vRxPortDefault));
+ mediaEngine.setNack(getResources().getBoolean(R.bool.nack_enabled_default));
+ mediaEngine.setViewSelection(getResources().getInteger(
+ R.integer.defaultView));
+
+ // Create action bar with all tabs.
+ ActionBar actionBar = getActionBar();
+ actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
+ actionBar.setDisplayShowTitleEnabled(false);
+
+ Tab tab = actionBar.newTab()
+ .setText("Main")
+ .setTabListener(new TabListener(
+ this, "main", MainMenuFragment.class));
+ actionBar.addTab(tab);
+
+ tab = actionBar.newTab()
+ .setText("Settings")
+ .setTabListener(new TabListener(
+ this, "Settings", SettingsMenuFragment.class));
+ actionBar.addTab(tab);
+
+ tab = actionBar.newTab()
+ .setText("Video")
+ .setTabListener(new TabListener(
+ this, "video", VideoMenuFragment.class));
+ actionBar.addTab(tab);
+
+ tab = actionBar.newTab()
+ .setText("Audio")
+ .setTabListener(new TabListener(
+ this, "Audio", AudioMenuFragment.class));
+ actionBar.addTab(tab);
+
+ enableTimedStartStop();
+
+ // Hint that voice call audio stream should be used for hardware volume
+ // controls.
+ setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
+ }
+
+ @Override
+ public boolean onCreateOptionsMenu(Menu menu) {
+ MenuInflater inflater = getMenuInflater();
+ inflater.inflate(R.menu.main_activity_actions, menu);
+ return super.onCreateOptionsMenu(menu);
+ }
+
+ @Override
+ public boolean onOptionsItemSelected(MenuItem item) {
+ // Handle presses on the action bar items
+ switch (item.getItemId()) {
+ case R.id.action_exit:
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.stopAll();
+ finish();
+ return true;
+ default:
+ return super.onOptionsItemSelected(item);
+ }
+ }
+
+ @Override
+ public void onDestroy() {
+ disableTimedStartStop();
+ mediaEngine.dispose();
+ contextRegistry.unRegister();
+ super.onDestroy();
+ }
+
+ @Override
+ public boolean onKeyDown(int keyCode, KeyEvent event) {
+ if (keyCode == KeyEvent.KEYCODE_BACK) {
+ // Prevent app from running in the background.
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.stopAll();
+ finish();
+ return true;
+ }
+ return super.onKeyDown(keyCode, event);
+ }
+
+ private int getCallRestartPeriodicity() {
+ return getResources().getInteger(R.integer.call_restart_periodicity_ms);
+ }
+
+ // Thread repeatedly calling start/stop.
+ void enableTimedStartStop() {
+ if (getCallRestartPeriodicity() > 0) {
+ // Periodicity == 0 <-> Disabled.
+ handler = new Handler();
+ handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
+ }
+ }
+
+ void disableTimedStartStop() {
+ if (handler != null) {
+ handler.removeCallbacks(startOrStopCallback);
+ }
+ }
+
+ private Runnable startOrStopCallback = new Runnable() {
+ public void run() {
+ MainMenuFragment main = (MainMenuFragment)getFragmentManager()
+ .findFragmentByTag("main");
+ main.toggleStart();
+ handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
+ }
+ };
+}
\ No newline at end of file
diff --git a/webrtc/video_engine/test/android/android_video_demo.gypi b/webrtc/video_engine/test/android/android_video_demo.gypi
deleted file mode 100644
index 4b2105532..000000000
--- a/webrtc/video_engine/test/android/android_video_demo.gypi
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS. All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-{
- 'targets': [
- {
- 'target_name': 'libwebrtc-video-demo-jni',
- 'type': 'loadable_module',
- 'dependencies': [
- '<(webrtc_root)/modules/modules.gyp:*',
- '<(webrtc_root)/test/test.gyp:channel_transport',
- '<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
- '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
- ],
- 'sources': [
- 'jni/android_media_codec_decoder.cc',
- 'jni/vie_android_java_api.cc',
- ],
- 'link_settings': {
- 'libraries': [
- '-llog',
- '-lGLESv2',
- '-lOpenSLES',
- ],
- }
- },
- {
- 'target_name': 'WebRTCDemo',
- 'type': 'none',
- 'dependencies': [
- 'libwebrtc-video-demo-jni',
- '<(modules_java_gyp_path):*',
- ],
- 'actions': [
- {
- # TODO(yujie.mao): Convert building of the demo to a proper GYP target
- # so this action is not needed once chromium's apk-building machinery
- # can be used. (crbug.com/225101)
- 'action_name': 'build_webrtcdemo_apk',
- 'variables': {
- 'android_webrtc_demo_root': '<(webrtc_root)/video_engine/test/android',
- },
- 'inputs' : [
- '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
- '<(PRODUCT_DIR)/lib.java/video_capture_module_java.jar',
- '<(PRODUCT_DIR)/lib.java/video_render_module_java.jar',
- '<(PRODUCT_DIR)/libwebrtc-video-demo-jni.so',
- '
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/webrtc/video_engine/test/android/jni/android_media_codec_decoder.cc b/webrtc/video_engine/test/android/jni/android_media_codec_decoder.cc
deleted file mode 100644
index b9712ead1..000000000
--- a/webrtc/video_engine/test/android/jni/android_media_codec_decoder.cc
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include
-#define LOG_TAG "AndroidMediaCodecDecoder"
-
-#include
-
-#include "webrtc/video_engine/test/android/jni/android_media_codec_decoder.h"
-
-namespace webrtc {
-
-AndroidMediaCodecDecoder::AndroidMediaCodecDecoder(
- JavaVM* vm, jobject surface, jclass decoderClass)
- : vm_(vm),
- surface_(NULL),
- mediaCodecDecoder_(NULL),
- decoderClass_(NULL),
- env_(NULL),
- setEncodedImageID_(NULL),
- vm_attached_(false) {
- Initialize(vm, surface, decoderClass);
-}
-
-AndroidMediaCodecDecoder::~AndroidMediaCodecDecoder() {
- env_->DeleteGlobalRef(decoderClass_);
- env_->DeleteGlobalRef(surface_);
-}
-
-void AndroidMediaCodecDecoder::Initialize(
- JavaVM* vm, jobject surface, jclass decoderClass) {
- int ret = vm->GetEnv(reinterpret_cast(&env_), JNI_VERSION_1_4);
- if ((ret < 0) || !env_) {
- __android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
- "Could not get JNI env (%d, %p)", ret, env_);
- assert(false);
- }
- surface_ = env_->NewGlobalRef(surface);
- decoderClass_ = reinterpret_cast(env_->NewGlobalRef(decoderClass));
-}
-
-int32_t AndroidMediaCodecDecoder::InitDecode(
- const VideoCodec* codecSettings, int32_t numberOfCores) {
- __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
-
- // TODO(dwkang): Detach this thread from VM. => this leads to a crash on
- // "StopCall".
- int ret = vm_->AttachCurrentThread(&env_, NULL);
- // Get the JNI env for this thread
- if ((ret < 0) || !env_) {
- __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG,
- "Could not attach thread to JVM (%d, %p)", ret,
- env_);
- return WEBRTC_VIDEO_CODEC_ERROR;
- } else {
- vm_attached_ = true;
- }
-
- // Initialize the media codec java decoder class.
- jmethodID mid = env_->GetMethodID(decoderClass_, "", "()V");
- mediaCodecDecoder_ = env_->NewGlobalRef(env_->NewObject(decoderClass_, mid));
-
- mid = env_->GetMethodID(
- decoderClass_, "configure", "(Landroid/view/SurfaceView;II)Z");
- bool success = env_->CallBooleanMethod(
- mediaCodecDecoder_, mid, surface_, codecSettings->width,
- codecSettings->height);
- if (!success) {
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
-
- setEncodedImageID_ = env_->GetMethodID(
- decoderClass_, "setEncodedImage", "(Ljava/nio/ByteBuffer;J)V");
-
- // Call start()
- jmethodID startID = env_->GetMethodID(decoderClass_, "start", "()V");
- env_->CallVoidMethod(mediaCodecDecoder_, startID);
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-int32_t AndroidMediaCodecDecoder::Decode(
- const EncodedImage& inputImage,
- bool missingFrames,
- const RTPFragmentationHeader* fragmentation,
- const CodecSpecificInfo* codecSpecificInfo,
- int64_t renderTimeMs) {
- if (!vm_attached_) {
- return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
- }
-
- jobject byteBuffer =
- env_->NewDirectByteBuffer(inputImage._buffer, inputImage._length);
- env_->CallVoidMethod(
- mediaCodecDecoder_, setEncodedImageID_, byteBuffer, renderTimeMs);
- env_->DeleteLocalRef(byteBuffer);
-
- return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
-}
-
-int32_t AndroidMediaCodecDecoder::RegisterDecodeCompleteCallback(
- DecodedImageCallback* callback) {
- __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-int32_t AndroidMediaCodecDecoder::Release() {
- __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
- env_->DeleteGlobalRef(mediaCodecDecoder_);
- mediaCodecDecoder_ = NULL;
-
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-int32_t AndroidMediaCodecDecoder::Reset() {
- __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
- return WEBRTC_VIDEO_CODEC_OK;
-}
-
-} // namespace webrtc
diff --git a/webrtc/video_engine/test/android/jni/android_media_codec_decoder.h b/webrtc/video_engine/test/android/jni/android_media_codec_decoder.h
deleted file mode 100644
index 5fd242142..000000000
--- a/webrtc/video_engine/test/android/jni/android_media_codec_decoder.h
+++ /dev/null
@@ -1,108 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
-#define WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
-
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
-
-namespace webrtc {
-
-class AndroidMediaCodecDecoder : public VideoDecoder {
- public:
- AndroidMediaCodecDecoder(JavaVM* vm, jobject surface, jclass decoderClass);
- virtual ~AndroidMediaCodecDecoder();
-
- // Initialize the decoder with the information from the VideoCodec.
- //
- // Input:
- // - inst : Codec settings
- // - numberOfCores : Number of cores available for the decoder
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t InitDecode(
- const VideoCodec* codecSettings, int32_t numberOfCores);
-
- // Decode encoded image (as a part of a video stream). The decoded image
- // will be returned to the user through the decode complete callback.
- //
- // Input:
- // - inputImage : Encoded image to be decoded
- // - missingFrames : True if one or more frames have been lost
- // since the previous decode call.
- // - fragmentation : Specifies where the encoded frame can be
- // split into separate fragments. The meaning
- // of fragment is codec specific, but often
- // means that each fragment is decodable by
- // itself.
- // - codecSpecificInfo : Pointer to codec specific data
- // - renderTimeMs : System time to render in milliseconds. Only
- // used by decoders with internal rendering.
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t
- Decode(const EncodedImage& inputImage,
- bool missingFrames,
- const RTPFragmentationHeader* fragmentation,
- const CodecSpecificInfo* codecSpecificInfo = NULL,
- int64_t renderTimeMs = -1);
-
- // Register an decode complete callback object.
- //
- // Input:
- // - callback : Callback object which handles decoded images.
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t RegisterDecodeCompleteCallback(
- DecodedImageCallback* callback);
-
- // Free decoder memory.
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t Release();
-
- // Reset decoder state and prepare for a new call.
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t Reset();
-
- // Codec configuration data sent out-of-band, i.e. in SIP call setup
- //
- // Input/Output:
- // - buffer : Buffer pointer to the configuration data
- // - size : The size of the configuration data in
- // bytes
- //
- // Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
- virtual int32_t SetCodecConfigParameters(
- const uint8_t* /*buffer*/, int32_t /*size*/) {
- return WEBRTC_VIDEO_CODEC_ERROR;
- }
-
- // Create a copy of the codec and its internal state.
- //
- // Return value : A copy of the instance if OK, NULL otherwise.
- virtual VideoDecoder* Copy() { return NULL; }
-
- private:
- void Initialize(JavaVM* vm, jobject surface, jclass decoderClass);
-
- JavaVM* vm_;
- jobject surface_;
- jobject mediaCodecDecoder_;
- jclass decoderClass_;
- JNIEnv* env_;
- jmethodID setEncodedImageID_;
- bool vm_attached_;
-};
-
-} // namespace webrtc
-
-#endif // WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
diff --git a/webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h b/webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h
deleted file mode 100644
index 7fad82b41..000000000
--- a/webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h
+++ /dev/null
@@ -1,495 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-/* DO NOT EDIT THIS FILE - it is machine generated */
-#include
-/* Header for class org_webrtc_videoengineapp_ViEAndroidJavaAPI */
-
-#ifndef _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
-#define _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
-#ifdef __cplusplus
-extern "C" {
-#endif
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: NativeInit
- * Signature: (Landroid/content/Context;)Z
- */
-JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit
- (JNIEnv *, jobject, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: GetVideoEngine
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: Init
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init
- (JNIEnv *, jobject, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: Terminate
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopRender
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartReceive
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopReceive
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: CreateChannel
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetLocalReceiver
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver
- (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetSendDestination
- * Signature: (IILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination
- (JNIEnv *, jobject, jint, jint, jstring);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: GetCodecs
- * Signature: ()[Ljava/lang/String;
- */
-JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetReceiveCodec
- * Signature: (IIIIII)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec
- (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetSendCodec
- * Signature: (IIIIII)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec
- (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: AddRemoteRenderer
- * Signature: (ILjava/lang/Object;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer
- (JNIEnv *, jobject, jint, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: RemoveRemoteRenderer
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartRender
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartCamera
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera
- (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopCamera
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: GetCameraOrientation
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetRotation
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation
- (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetExternalMediaCodecDecoderRenderer
- * Signature: (ILjava/lang/Object;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer
- (JNIEnv *, jobject, jint, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: EnableNACK
- * Signature: (IZ)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK
- (JNIEnv *, jobject, jint, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: EnablePLI
- * Signature: (IZ)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI
- (JNIEnv *, jobject, jint, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetCallback
- * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback
- (JNIEnv *, jobject, jint, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartIncomingRTPDump
- * Signature: (ILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump
- (JNIEnv *, jobject, jint, jstring);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopIncomingRTPDump
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Create
- * Signature: (Landroid/content/Context;)Z
- */
-JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create
- (JNIEnv *, jobject, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Delete
- * Signature: ()Z
- */
-JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Init
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init
- (JNIEnv *, jobject, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Terminate
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_CreateChannel
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_DeleteChannel
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: ViE_DeleteChannel
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_ViE_1DeleteChannel
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetLocalReceiver
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver
- (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetSendDestination
- * Signature: (IILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination
- (JNIEnv *, jobject, jint, jint, jstring);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartListen
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartPlayout
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopListen
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopPlayout
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetSpeakerVolume
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetLoudspeakerStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus
- (JNIEnv *, jobject, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartPlayingFileLocally
- * Signature: (ILjava/lang/String;Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally
- (JNIEnv *, jobject, jint, jstring, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopPlayingFileLocally
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartPlayingFileAsMicrophone
- * Signature: (ILjava/lang/String;Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone
- (JNIEnv *, jobject, jint, jstring, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopPlayingFileAsMicrophone
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone
- (JNIEnv *, jobject, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_NumOfCodecs
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_GetCodecs
- * Signature: ()[Ljava/lang/String;
- */
-JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetSendCodec
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec
- (JNIEnv *, jobject, jint, jint);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetECStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus
- (JNIEnv *, jobject, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetAGCStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus
- (JNIEnv *, jobject, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetNSStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus
- (JNIEnv *, jobject, jboolean);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartDebugRecording
- * Signature: (Ljava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording
- (JNIEnv *, jobject, jstring);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopDebugRecording
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording
- (JNIEnv *, jobject);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartIncomingRTPDump
- * Signature: (ILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump
- (JNIEnv *, jobject, jint, jstring);
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopIncomingRTPDump
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopIncomingRTPDump
- (JNIEnv *, jobject, jint);
-
-#ifdef __cplusplus
-}
-#endif
-#endif
diff --git a/webrtc/video_engine/test/android/jni/vie_android_java_api.cc b/webrtc/video_engine/test/android/jni/vie_android_java_api.cc
deleted file mode 100644
index 61c731809..000000000
--- a/webrtc/video_engine/test/android/jni/vie_android_java_api.cc
+++ /dev/null
@@ -1,2017 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include
-#include
-#include
-
-#include "webrtc/video_engine/test/android/jni/org_webrtc_videoengineapp_vie_android_java_api.h"
-
-#include "webrtc/voice_engine/include/voe_audio_processing.h"
-#include "webrtc/voice_engine/include/voe_base.h"
-#include "webrtc/voice_engine/include/voe_codec.h"
-#include "webrtc/voice_engine/include/voe_file.h"
-#include "webrtc/voice_engine/include/voe_hardware.h"
-#include "webrtc/voice_engine/include/voe_network.h"
-#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
-#include "webrtc/voice_engine/include/voe_volume_control.h"
-
-#include "webrtc/video_engine/include/vie_base.h"
-#include "webrtc/video_engine/include/vie_capture.h"
-#include "webrtc/video_engine/include/vie_codec.h"
-#include "webrtc/video_engine/include/vie_external_codec.h"
-#include "webrtc/video_engine/include/vie_network.h"
-#include "webrtc/video_engine/include/vie_render.h"
-#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
-
-#include "webrtc/common_types.h"
-#include "webrtc/video_engine/test/android/jni/android_media_codec_decoder.h"
-
-#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
-#include "webrtc/system_wrappers/interface/scoped_ptr.h"
-#include "webrtc/test/channel_transport/include/channel_transport.h"
-
-#define WEBRTC_LOG_TAG "*WEBRTCN*"
-#define VALIDATE_BASE_POINTER \
- if (!voeData.base) \
- { \
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "Base pointer doesn't exist"); \
- return -1; \
- }
-#define VALIDATE_CODEC_POINTER \
- if (!voeData.codec) \
- { \
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "Codec pointer doesn't exist"); \
- return -1; \
- }
-#define VALIDATE_FILE_POINTER \
- if (!voeData.file) \
- { \
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "File pointer doesn't exist"); \
- return -1; \
- }
-#define VALIDATE_APM_POINTER \
- if (!voeData.codec) \
- { \
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "Apm pointer doesn't exist"); \
- return -1; \
- }
-#define VALIDATE_HARDWARE_POINTER \
- if (!voeData.hardware) \
- { \
- __android_log_write( \
- ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "Hardware pointer doesn't exist"); \
- return -1; \
- }
-#define VALIDATE_VOLUME_POINTER \
- if (!voeData.volume) \
- { \
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "Volume pointer doesn't exist"); \
- return -1; \
- }
-
-#define VALIDATE_RTP_POINTER \
- if (!voeData.rtp) \
- { \
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "rtp pointer doesn't exist"); \
- return -1; \
- }
-
-using namespace webrtc;
-
-//Forward declaration.
-class VideoCallbackAndroid;
-
-// VoiceEngine data struct
-typedef struct
-{
- // VoiceEngine
- VoiceEngine* ve;
- // Sub-APIs
- VoEBase* base;
- VoECodec* codec;
- VoEFile* file;
- VoENetwork* netw;
- VoEAudioProcessing* apm;
- VoEVolumeControl* volume;
- VoEHardware* hardware;
- VoERTP_RTCP* rtp;
-
- JavaVM* jvm;
- scoped_ptr transport;
-} VoiceEngineData;
-
-class AndroidVideoRenderCallback;
-// VideoEngine data struct
-typedef struct
-{
- VideoEngine* vie;
- ViEBase* base;
- ViECodec* codec;
- ViENetwork* netw;
- ViERTP_RTCP* rtp;
- ViERender* render;
- ViECapture* capture;
- ViEExternalCodec* externalCodec;
-
- VideoCallbackAndroid* callback;
- scoped_ptr transport;
-} VideoEngineData;
-
-// Global variables
-JavaVM* webrtcGlobalVM;
-
-// Global variables visible in this file
-static VoiceEngineData voeData;
-static VideoEngineData vieData;
-
-// "Local" functions (i.e. not Java accessible)
-#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024
-static bool VE_GetSubApis();
-static bool VE_ReleaseSubApis();
-
-#define CHECK_API_RETURN(ret) \
- if (ret!=0) \
- { \
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
- "Return error %d",ret); \
- break; \
- }
-
-class VideoCallbackAndroid: public ViEDecoderObserver,
- public ViEEncoderObserver
-{
-
- // Implements ViEDecoderObserver
- virtual void IncomingRate(const int videoChannel,
- const unsigned int framerate,
- const unsigned int bitrate)
- {
- // Let's print out the network statistics from this call back as well
- unsigned short fraction_lost;
- unsigned int dummy;
- int intdummy;
- _vieData.rtp->GetReceivedRTCPStatistics(videoChannel, fraction_lost,
- dummy, dummy, dummy, intdummy);
- unsigned short packetLossRate = 0;
- if (fraction_lost > 0)
- {
- // Change from frac to %
- packetLossRate = (fraction_lost * 100) >> 8;
- }
-
- JNIEnv* threadEnv = NULL;
- int ret = webrtcGlobalVM->AttachCurrentThread(&threadEnv, NULL);
- // Get the JNI env for this thread
- if ((ret < 0) || !threadEnv)
- {
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "Could not attach thread to JVM (%d, %p)", ret,
- threadEnv);
- return;
- }
- threadEnv->CallIntMethod(_callbackObj, _callbackId, framerate, bitrate,
- packetLossRate, _frameRateO, _bitRateO);
- webrtcGlobalVM->DetachCurrentThread();
- }
-
- virtual void DecoderTiming(int decode_ms,
- int max_decode_ms,
- int current_delay_ms,
- int target_delay_ms,
- int jitter_buffer_ms,
- int min_playout_delay_ms,
- int render_delay_ms)
- {
- // TODO(fischman): consider plumbing this through to Java.
- }
-
- virtual void IncomingCodecChanged(const int videoChannel,
- const webrtc::VideoCodec& videoCodec)
- {
- JNIEnv* threadEnv = NULL;
- int ret = webrtcGlobalVM->AttachCurrentThread(&threadEnv, NULL);
- // Get the JNI env for this thread
- if ((ret < 0) || !threadEnv)
- {
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "Could not attach thread to JVM (%d, %p)", ret,
- threadEnv);
- return;
- }
- threadEnv->CallIntMethod(_callbackObj, _incomingResolutionId,
- videoCodec.width, videoCodec.height);
- webrtcGlobalVM->DetachCurrentThread();
- }
-
- virtual void RequestNewKeyFrame(const int videoChannel)
- {
- }
-
- virtual void OutgoingRate(const int videoChannel,
- const unsigned int framerate,
- const unsigned int bitrate)
- {
- _frameRateO = framerate;
- _bitRateO = bitrate;
- //__android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- // "SendRate frameRate %d bitrate %d\n",frameRate,bitrate);
- }
-
- virtual void SuspendChange(int video_channel, bool is_suspended) {}
-
-public:
- VideoEngineData& _vieData;
- JNIEnv * _env;
- jobject _callbackObj;
- jclass _callbackCls;
- jmethodID _callbackId;
- jmethodID _incomingResolutionId;
- int _frameRateO, _bitRateO;
- VideoCallbackAndroid(VideoEngineData& vieData, JNIEnv * env,
- jobject callback) :
- _vieData(vieData), _env(env), _callbackObj(callback),
- _frameRateO(0), _bitRateO(0) {
- _callbackCls = _env->GetObjectClass(_callbackObj);
- _callbackId
- = _env->GetMethodID(_callbackCls, "updateStats", "(IIIII)I");
- _incomingResolutionId
- = _env->GetMethodID(_callbackCls, "newIncomingResolution", "(II)I");
- if (_callbackId == NULL) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to get jid");
- }
- _callbackObj = _env->NewGlobalRef(_callbackObj);
- }
-};
-
-// JNI_OnLoad
-jint JNI_OnLoad(JavaVM* vm, void* reserved) {
- webrtcGlobalVM = vm;
- if (!webrtcGlobalVM)
- {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "JNI_OnLoad did not receive a valid VM pointer");
- return -1;
- }
-
- // Get JNI
- JNIEnv* env;
- if (JNI_OK != vm->GetEnv(reinterpret_cast (&env),
- JNI_VERSION_1_4)) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "JNI_OnLoad could not get JNI env");
- return -1;
- }
-
- // Init VoiceEngine data
- memset(&voeData, 0, sizeof(voeData));
- // Store the JVM
- voeData.jvm = vm;
-
- // Init VideoEngineData data
- memset(&vieData, 0, sizeof(vieData));
-
- return JNI_VERSION_1_4;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: NativeInit
- * Signature: (Landroid/content/Context;)Z
- */
-JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit(
- JNIEnv * env,
- jobject,
- jobject context)
-{
- return true;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: GetVideoEngine
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine(
- JNIEnv *,
- jobject context) {
-
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "GetVideoEngine");
-
- // Check if already got
- if (vieData.vie) {
- __android_log_write(ANDROID_LOG_INFO, WEBRTC_LOG_TAG,
- "ViE already got");
- return 0;
- }
-
- VideoEngine::SetAndroidObjects(webrtcGlobalVM);
-
- // Create
- vieData.vie = VideoEngine::Create();
- if (!vieData.vie) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, "Get ViE failed");
- return -1;
- }
- vieData.base = ViEBase::GetInterface(vieData.vie);
- if (!vieData.base) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get base sub-API failed");
- return -1;
- }
-
- vieData.codec = ViECodec::GetInterface(vieData.vie);
- if (!vieData.codec) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get codec sub-API failed");
- return -1;
- }
-
- vieData.netw = ViENetwork::GetInterface(vieData.vie);
- if (!vieData.netw) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get network sub-API failed");
- return -1;
- }
-
- vieData.rtp = ViERTP_RTCP::GetInterface(vieData.vie);
- if (!vieData.rtp) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get RTP sub-API failed");
- return -1;
- }
-
- vieData.render = ViERender::GetInterface(vieData.vie);
- if (!vieData.render) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get Render sub-API failed");
- return -1;
- }
-
- vieData.capture = ViECapture::GetInterface(vieData.vie);
- if (!vieData.capture) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get Capture sub-API failed");
- return -1;
- }
-
- vieData.externalCodec = ViEExternalCodec::GetInterface(vieData.vie);
- if (!vieData.capture) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get External Codec sub-API failed");
- return -1;
- }
-
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: Init
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init(
- JNIEnv *,
- jobject,
- jboolean enableTrace)
-{
- if (vieData.vie) {
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Init");
-
- int ret = vieData.base->Init();
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "Init return %d", ret);
- if (enableTrace)
- {
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "SetTraceFile");
- if (0 != vieData.vie->SetTraceFile(("/sdcard/trace.txt"), false))
- {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Video Engine could not enable trace");
- }
-
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "SetTraceFilter");
- if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceError))
- {
- __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
- "Could not set trace filter");
- }
- }
- else
- {
- if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceNone))
- {
- __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
- "Could not set trace filter");
- }
- }
- if (voeData.ve) // VoiceEngine is enabled
- {
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "SetVoiceEngine");
- if (0 != vieData.base->SetVoiceEngine(voeData.ve))
- {
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "SetVoiceEngine failed");
- }
- }
- return ret;
- }
- else
- {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: Terminate
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate(
- JNIEnv *,
- jobject)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Terminate");
-
- if (vieData.vie) {
- if (!vieData.rtp || vieData.rtp->Release() != 0) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to release RTP sub-API");
- }
-
- if (!vieData.netw || vieData.netw->Release() != 0) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to release Network sub-API");
- }
-
- if (!vieData.codec || vieData.codec->Release() != 0) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to release Codec sub-API");
- }
-
- if (!vieData.render || vieData.render->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to release Render sub-API");
- }
-
- if (!vieData.capture || vieData.capture->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to release Capture sub-API");
- }
-
- if (!vieData.base || vieData.base->Release() != 0) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to release Base sub-API");
- }
-
- if (!vieData.externalCodec || vieData.externalCodec->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to release External Codec sub-API");
- }
-
- // Delete Vie
- if (!VideoEngine::Delete(vieData.vie)) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to delete ViE ");
- return -1;
- }
- memset(&vieData, 0, sizeof(vieData));
- return 0;
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend");
-
- if (vieData.base) {
- int ret = vieData.base->StartSend(channel);
- return ret;
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopRender
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopRender");
-
- if (vieData.render) {
- return vieData.render->StopRender(channel);
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopSend");
-
- if (vieData.base) {
- return vieData.base->StopSend(channel);
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartReceive
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartReceive");
-
- if (vieData.base) {
- return vieData.base->StartReceive(channel);
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopReceive
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopReceive");
- if (vieData.base) {
- return vieData.base->StopReceive(channel);
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: CreateChannel
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel(
- JNIEnv *,
- jobject,
- jint voiceChannel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "CreateChannel");
-
- if (vieData.vie) {
- int channel = 0;
- if (vieData.base->CreateChannel(channel) != 0) {
- return -1;
- }
- if (voiceChannel >= 0) {
- vieData.base->ConnectAudioChannel(channel, voiceChannel);
- }
- vieData.transport.reset(new test::VideoChannelTransport(vieData.netw,
- channel));
- return channel;
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetLocalReceiver
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver(
- JNIEnv *,
- jobject,
- jint channel,
- jint port)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver");
-
- if (vieData.transport.get()) {
- return vieData.transport->SetLocalReceiver(port);
- }
- return -1;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetSendDestination
- * Signature: (IILjava/lang/String)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination(
- JNIEnv * env,
- jobject,
- jint channel,
- jint port,
- jstring ipaddr)
-{
-
- if (NULL == vieData.vie)
- return -1;
-
- const char* ip = env->GetStringUTFChars(ipaddr, NULL);
- if (!ip) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Could not get UTF string");
- return -1;
- }
-
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "SetSendDestination: channel=%d, port=%d, ip=%s\n",
- channel, port, ip);
-
- if (vieData.transport.get()) {
- return vieData.transport->SetSendDestination(ip, port);
- }
- return -1;
-}
-
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetReceiveCodec
- * Signature: (IIIIII)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec(
- JNIEnv *,
- jobject,
- jint channel,
- jint codecNum,
- jint intbitRate,
- jint width,
- jint height,
- jint frameRate)
-{
- if (NULL == vieData.codec)
- return -1;
-
- //Create codec
- webrtc::VideoCodec codec;
- vieData.codec->GetCodec(codecNum, codec);
-
- __android_log_print(
- ANDROID_LOG_DEBUG,
- WEBRTC_LOG_TAG,
- "SetReceiveCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
- " width=%d, height=%d, frameRate=%d \n",
- codec.plName, codec.plType, codec.startBitrate,
- codec.maxBitrate, codec.width, codec.height,
- codec.maxFramerate);
- int ret = vieData.codec->SetReceiveCodec(channel, codec);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "SetReceiveCodec return %d", ret);
- return ret;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetSendCodec
- * Signature: (IIIIII)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec(
- JNIEnv *,
- jobject,
- jint channel,
- jint codecNum,
- jint intbitRate,
- jint width,
- jint height,
- jint frameRate)
-{
- if (NULL == vieData.codec)
- return -1;
-
- //Create codec
- webrtc::VideoCodec codec;
- vieData.codec->GetCodec(codecNum, codec);
- codec.startBitrate = intbitRate;
- codec.maxBitrate = 600;
- codec.width = width;
- codec.height = height;
- codec.maxFramerate = frameRate;
-
- for (int i = 0; i < vieData.codec->NumberOfCodecs(); ++i) {
- webrtc::VideoCodec codecToList;
- vieData.codec->GetCodec(i, codecToList);
- __android_log_print(
- ANDROID_LOG_DEBUG,
- WEBRTC_LOG_TAG,
- "Codec list %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
- " width=%d, height=%d, frameRate=%d\n",
- codecToList.plName, codecToList.plType,
- codecToList.startBitrate, codecToList.maxBitrate,
- codecToList.width, codecToList.height,
- codecToList.maxFramerate);
- }
- __android_log_print(
- ANDROID_LOG_DEBUG,
- WEBRTC_LOG_TAG,
- "SetSendCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d, "
- "width=%d, height=%d, frameRate=%d\n",
- codec.plName, codec.plType, codec.startBitrate,
- codec.maxBitrate, codec.width, codec.height,
- codec.maxFramerate);
-
- return vieData.codec->SetSendCodec(channel, codec);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetSendCodec
- * Signature: ()Z
- */
-JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs(
- JNIEnv *env,
- jobject)
-{
- if (NULL == vieData.codec) {
- return NULL;
- }
-
- jobjectArray ret;
- int num = vieData.codec->NumberOfCodecs();
- char info[32];
-
- ret = (jobjectArray)env->NewObjectArray(
- num,
- env->FindClass("java/lang/String"),
- env->NewStringUTF(""));
-
- for (int i = 0; i < num; ++i) {
- webrtc::VideoCodec codecToList;
- vieData.codec->GetCodec(i, codecToList);
- sprintf(info, "%s pltype:%d", codecToList.plName, codecToList.plType);
- env->SetObjectArrayElement(ret, i, env->NewStringUTF( info ));
-
- __android_log_print(
- ANDROID_LOG_DEBUG,
- WEBRTC_LOG_TAG,
- "Codec[%d] %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
- " width=%d, height=%d, frameRate=%d\n",
- i, codecToList.plName, codecToList.plType,
- codecToList.startBitrate, codecToList.maxBitrate,
- codecToList.width, codecToList.height,
- codecToList.maxFramerate);
- }
-
- return ret;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: AddRemoteRenderer
- * Signature: (ILjava/lang/Object;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer(
- JNIEnv *,
- jobject,
- jint channel,
- jobject glSurface)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "AddRemoteRenderer");
- if (vieData.vie) {
- return vieData.render->AddRenderer(channel, glSurface, 0, 0, 0, 1, 1);
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: RemoveRemoteRenderer
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "RemoveRemoteRenderer");
-
- if (vieData.vie) {
- return vieData.render->RemoveRenderer(channel);
- }
- else {
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartRender
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartRender");
-
- if (vieData.render) {
- return vieData.render->StartRender(channel);
- }
- else {
- return -1;
- }
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartCamera
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera(
- JNIEnv * env,
- jobject,
- jint channel,
- jint cameraNum)
-{
- if (NULL == vieData.vie)
- return -1;
-
- int i = 0;
- char deviceName[64];
- char deviceUniqueName[64];
- int re;
- do {
- re = vieData.capture->GetCaptureDevice(i, deviceName,
- sizeof(deviceName),
- deviceUniqueName,
- sizeof(deviceUniqueName));
- __android_log_print(
- ANDROID_LOG_DEBUG,
- WEBRTC_LOG_TAG,
- "GetCaptureDevice ret %d devicenum %d deviceUniqueName %s",
- re, i, deviceUniqueName);
- i++;
- } while (re == 0);
-
- int ret;
- int cameraId;
- vieData.capture->GetCaptureDevice(cameraNum, deviceName,
- sizeof(deviceName), deviceUniqueName,
- sizeof(deviceUniqueName));
- vieData.capture->AllocateCaptureDevice(deviceUniqueName,
- sizeof(deviceUniqueName), cameraId);
-
- if (cameraId >= 0) { //Connect the
- ret = vieData.capture->ConnectCaptureDevice(cameraId, channel);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "ConnectCaptureDevice ret %d ", ret);
-
- ret = vieData.capture->StartCapture(cameraId);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "StartCapture ret %d ", ret);
- }
-
- return cameraId;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopCamera
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera(
- JNIEnv *,
- jobject,
- jint cameraId)
-{
- if (NULL == vieData.capture)
- return -1;
-
- int ret = vieData.capture->StopCapture(cameraId);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "StopCapture ret %d ", ret);
- ret = vieData.capture->ReleaseCaptureDevice(cameraId);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "ReleaseCaptureDevice ret %d ", ret);
-
- return ret;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: GetCameraOrientation
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation(
- JNIEnv *,
- jobject,
- jint cameraNum)
-{
- char deviceName[64];
- char deviceUniqueName[64];
- int ret;
-
- ret = vieData.capture->GetCaptureDevice(cameraNum, deviceName,
- sizeof(deviceName),
- deviceUniqueName,
- sizeof(deviceUniqueName));
- if (ret != 0) {
- return -1;
- }
-
- RotateCapturedFrame orientation;
- ret = vieData.capture->GetOrientation(deviceUniqueName, orientation);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "GetOrientation ret %d orientation %d", ret,
- orientation);
-
- return (jint) orientation;
-
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetRotation
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation(
- JNIEnv *,
- jobject,
- jint captureId,
- jint degrees)
-{
-
- if (NULL == vieData.capture)
- return -1;
- RotateCapturedFrame rotation = RotateCapturedFrame_0;
- if (degrees == 90)
- rotation = RotateCapturedFrame_90;
- else if (degrees == 180)
- rotation = RotateCapturedFrame_180;
- else if (degrees == 270)
- rotation = RotateCapturedFrame_270;
-
- int ret = vieData.capture->SetRotateCapturedFrames(captureId, rotation);
- return ret;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetExternalMediaCodecDecoderRenderer
- * Signature: (ILjava/lang/Object;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer(
- JNIEnv *env,
- jobject,
- jint channel,
- jobject glSurface)
-{
- __android_log_write(
- ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetExternalMediaCodecDecoder");
-
- jclass cls = env->FindClass("org/webrtc/videoengine/ViEMediaCodecDecoder");
-
- AndroidMediaCodecDecoder* mediaCodecDecoder =
- new AndroidMediaCodecDecoder(webrtcGlobalVM, glSurface, cls);
-
- // TODO(dwkang): Check the ownership of decoder object and release it
- // if needed.
- return vieData.externalCodec->RegisterExternalReceiveCodec(
- channel, VCM_VP8_PAYLOAD_TYPE, mediaCodecDecoder, true);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: EnableNACK
- * Signature: (IZ)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK(
- JNIEnv *,
- jobject,
- jint channel,
- jboolean enable)
-{
- if (NULL == vieData.rtp)
- return -1;
-
- int ret = vieData.rtp->SetNACKStatus(channel, enable);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "EnableNACK(%d) ret:%d", enable, ret);
- return ret;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: EnablePLI
- * Signature: (IZ)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI(
- JNIEnv *,
- jobject,
- jint channel,
- jboolean enable)
-{
- if (NULL == vieData.rtp)
- return -1;
-
- if (enable)
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "EnablePLI enable");
- else
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "EnablePLI disable");
-
- int ret = vieData.rtp->SetKeyFrameRequestMethod(channel,
- kViEKeyFrameRequestPliRtcp);
- return ret;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: SetCallback
- * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback(
- JNIEnv * env,
- jobject,
- jint channel,
- jobject callback)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetCallback");
-
- if (NULL == vieData.codec)
- return -1;
- if (vieData.callback == NULL) {
- vieData.callback = new VideoCallbackAndroid(vieData, env, callback);
- }
- else if (vieData.codec) {
- vieData.codec->DeregisterDecoderObserver(channel); // Wrong channel?
- vieData.codec->DeregisterEncoderObserver(channel);
- }
-
- vieData.codec->RegisterDecoderObserver(channel, *vieData.callback);
- vieData.codec->RegisterEncoderObserver(channel, *vieData.callback);
-
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StartIncomingRTPDump
- * Signature: (ILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump(
- JNIEnv* env,
- jobject,
- jint channel,
- jstring filename) {
- if (NULL == vieData.rtp) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "video RTP_RTCP interface is null");
- return -1;
- }
- const char* file = env->GetStringUTFChars(filename, NULL);
- if (!file) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Video StartRTPDump file name error");
- return -1;
- }
- if (vieData.rtp->StartRTPDump(channel, file, kRtpIncoming) != 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Video StartRTPDump error");
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: StopIncomingRTPDump
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump(
- JNIEnv *,
- jobject,
- jint channel) {
- if (NULL == vieData.rtp) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "video RTP_RTCP interface is null");
- return -1;
- }
- if (vieData.rtp->StopRTPDump(channel, kRtpIncoming) != 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Video StopRTPDump error");
- return -1;
- }
- return 0;
-}
-
-//
-// VoiceEngine API wrapper functions
-//
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Create
- * Signature: (Landroid/content/Context)Z
- */
-JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create(
- JNIEnv *env,
- jobject context,
- jobject ctx) {
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create VoiceEngine");
-
- VoiceEngine::SetAndroidObjects(webrtcGlobalVM, env, ctx);
-
- // Check if already created
- if (voeData.ve) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "VoE already created");
- return false;
- }
-
- // Create
- voeData.ve = VoiceEngine::Create();
- if (!voeData.ve) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Create VoE failed");
- return false;
- }
-
- // Get sub-APIs
- if (!VE_GetSubApis()) {
- // If not OK, release all sub-APIs and delete VoE
- VE_ReleaseSubApis();
- if (!VoiceEngine::Delete(voeData.ve)) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Delete VoE failed");
- }
- return false;
- }
-
- return true;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Delete
- * Signature: ()Z
- */
-JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete(
- JNIEnv *,
- jobject)
-{
- // Check if exists
- if (!voeData.ve) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "VoE does not exist");
- return false;
- }
-
- // Release sub-APIs
- VE_ReleaseSubApis();
-
- // Delete
- if (!VoiceEngine::Delete(voeData.ve)) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Delete VoE failed");
- return false;
- }
-
- voeData.ve = NULL;
-
- // Clear instance independent Java objects
- VoiceEngine::SetAndroidObjects(NULL, NULL, NULL);
-
- return true;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Init
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init(
- JNIEnv *,
- jobject,
- jboolean enableTrace)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "VE_Init");
-
- VALIDATE_BASE_POINTER;
-
- return voeData.base->Init();
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_Terminate
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate(
- JNIEnv *,
- jobject)
-{
- VALIDATE_BASE_POINTER;
-
- jint retVal = voeData.base->Terminate();
- return retVal;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_CreateChannel
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel(
- JNIEnv *,
- jobject)
-{
- VALIDATE_BASE_POINTER;
-
- webrtc::CodecInst voiceCodec;
- int numOfVeCodecs = voeData.codec->NumOfCodecs();
-
- //enum all the supported codec
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "Supported Voice Codec:\n");
- for (int i = 0; i < numOfVeCodecs; ++i) {
- if (voeData.codec->GetCodec(i, voiceCodec) != -1) {
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "num: %d name: %s\n", i, voiceCodec.plname);
- }
- }
-
- jint channel = voeData.base->CreateChannel();
- voeData.transport.reset(new test::VoiceChannelTransport(voeData.netw,
- channel));
- return channel;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_DeleteChannel
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel(
- JNIEnv *,
- jobject,
- jint channel)
-{
- VALIDATE_BASE_POINTER;
- voeData.transport.reset(NULL);
- return voeData.base->DeleteChannel(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: ViE_DeleteChannel
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_ViE_1DeleteChannel(
- JNIEnv *,
- jobject,
- jint channel)
-{
- VALIDATE_BASE_POINTER;
- vieData.transport.reset(NULL);
- return vieData.base->DeleteChannel(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetLocalReceiver
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver(
- JNIEnv *,
- jobject,
- jint channel,
- jint port)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver");
- VALIDATE_BASE_POINTER;
- if (voeData.transport.get()) {
- return voeData.transport->SetLocalReceiver(port);
- }
- return -1;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetSendDestination
- * Signature: (IILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination(
- JNIEnv *env,
- jobject,
- jint channel,
- jint port,
- jstring ipaddr)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendDestination");
- VALIDATE_BASE_POINTER;
-
- const char* ipaddrNative = env->GetStringUTFChars(ipaddr, NULL);
- if (!ipaddrNative) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Could not get UTF string");
- return -1;
- }
- if (voeData.transport.get()) {
- jint retVal = voeData.transport->SetSendDestination(ipaddrNative, port);
- env->ReleaseStringUTFChars(ipaddr, ipaddrNative);
- return retVal;
- }
- env->ReleaseStringUTFChars(ipaddr, ipaddrNative);
- return -1;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartListen
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartListen");
- VALIDATE_BASE_POINTER;
- return voeData.base->StartReceive(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartPlayout
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartPlayout");
- VALIDATE_BASE_POINTER;
- return voeData.base->StartPlayout(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend(
- JNIEnv *,
- jobject,
- jint channel)
-{
- __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend");
- VALIDATE_BASE_POINTER;
- return voeData.base->StartSend(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopListen
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen(
- JNIEnv *,
- jobject,
- jint channel)
-{
- VALIDATE_BASE_POINTER;
- return voeData.base->StartReceive(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopPlayout
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout(
- JNIEnv *,
- jobject,
- jint channel)
-{
- VALIDATE_BASE_POINTER;
- return voeData.base->StopPlayout(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopSend
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend(
- JNIEnv *,
- jobject,
- jint channel)
-{
- VALIDATE_BASE_POINTER;
- return voeData.base->StopSend(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetSpeakerVolume
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume(
- JNIEnv *,
- jobject,
- jint level)
-{
- VALIDATE_VOLUME_POINTER;
-
- if (voeData.volume->SetSpeakerVolume(level) != 0) {
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetLoudspeakerStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus(
- JNIEnv *,
- jobject,
- jboolean enable) {
- VALIDATE_HARDWARE_POINTER;
- if (voeData.hardware->SetLoudspeakerStatus(enable) != 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "SetLoudspeakerStatus(%d) failed", enable);
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartPlayingFileLocally
- * Signature: (ILjava/lang/String;Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally(
- JNIEnv * env,
- jobject,
- jint channel,
- jstring fileName,
- jboolean loop)
-{
- VALIDATE_FILE_POINTER;
-
- const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
- if (!fileNameNative) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Could not get UTF string");
- return -1;
- }
-
- jint retVal = voeData.file->StartPlayingFileLocally(channel,
- fileNameNative,
- loop);
-
- env->ReleaseStringUTFChars(fileName, fileNameNative);
-
- return retVal;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopPlayingFileLocally
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally(
- JNIEnv *,
- jobject,
- jint channel)
-{
- VALIDATE_FILE_POINTER;
- return voeData.file->StopPlayingFileLocally(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartPlayingFileAsMicrophone
- * Signature: (ILjava/lang/String;Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone(
- JNIEnv *env,
- jobject,
- jint channel,
- jstring fileName,
- jboolean loop)
-{
- VALIDATE_FILE_POINTER;
-
- const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
- if (!fileNameNative) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Could not get UTF string");
- return -1;
- }
-
- jint retVal = voeData.file->StartPlayingFileAsMicrophone(channel,
- fileNameNative,
- loop);
-
- env->ReleaseStringUTFChars(fileName, fileNameNative);
-
- return retVal;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopPlayingFileAsMicrophone
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone(
- JNIEnv *,
- jobject,
- jint channel)
-{
- VALIDATE_FILE_POINTER;
- return voeData.file->StopPlayingFileAsMicrophone(channel);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_NumOfCodecs
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs(
- JNIEnv *,
- jobject)
-{
- VALIDATE_CODEC_POINTER;
- return voeData.codec->NumOfCodecs();
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_NumOfCodecs
- * Signature: ()I
- */
-JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs(
- JNIEnv *env,
- jobject)
-{
- if (!voeData.codec) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Codec pointer doesn't exist");
- return NULL;
- }
-
- jobjectArray ret;
- int i;
- int num = voeData.codec->NumOfCodecs();
- char info[256];
-
- ret = (jobjectArray)env->NewObjectArray(
- num,
- env->FindClass("java/lang/String"),
- env->NewStringUTF(""));
-
- for(i = 0; i < num; i++) {
- webrtc::CodecInst codecToList;
- voeData.codec->GetCodec(i, codecToList);
- int written = snprintf(info, sizeof(info),
- "%s type:%d freq:%d pac:%d ch:%d rate:%d",
- codecToList.plname, codecToList.pltype,
- codecToList.plfreq, codecToList.pacsize,
- codecToList.channels, codecToList.rate);
- if (written < 0 || written >= static_cast(sizeof(info))) {
- assert(false);
- }
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "VoiceEgnine Codec[%d] %s", i, info);
- env->SetObjectArrayElement(ret, i, env->NewStringUTF( info ));
- }
-
- return ret;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetSendCodec
- * Signature: (II)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec(
- JNIEnv *,
- jobject,
- jint channel,
- jint index)
-{
- VALIDATE_CODEC_POINTER;
-
- webrtc::CodecInst codec;
-
- for (int i = 0; i < voeData.codec->NumOfCodecs(); ++i) {
- webrtc::CodecInst codecToList;
- voeData.codec->GetCodec(i, codecToList);
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
- "VE Codec list %s, pltype=%d\n",
- codecToList.plname, codecToList.pltype);
- }
-
- if (voeData.codec->GetCodec(index, codec) != 0) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed to get codec");
- return -1;
- }
- __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendCodec %s\n",
- codec.plname);
-
- return voeData.codec->SetSendCodec(channel, codec);
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetECStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus(
- JNIEnv *,
- jobject,
- jboolean enable) {
- VALIDATE_APM_POINTER;
- if (voeData.apm->SetEcStatus(enable, kEcAecm) < 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed SetECStatus(%d,%d)", enable, kEcAecm);
- return -1;
- }
- if (voeData.apm->SetAecmMode(kAecmSpeakerphone, false) != 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed SetAecmMode(%d,%d)", kAecmSpeakerphone, 0);
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetAGCStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus(
- JNIEnv *,
- jobject,
- jboolean enable) {
- VALIDATE_APM_POINTER;
- if (voeData.apm->SetAgcStatus(enable, kAgcFixedDigital) < 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed SetAgcStatus(%d,%d)", enable, kAgcFixedDigital);
- return -1;
- }
- webrtc::AgcConfig config;
- // The following settings are by default, explicitly set here.
- config.targetLeveldBOv = 3;
- config.digitalCompressionGaindB = 9;
- config.limiterEnable = true;
- if (voeData.apm->SetAgcConfig(config) != 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed SetAgcConfig(%d,%d,%d)",
- config.targetLeveldBOv,
- config.digitalCompressionGaindB,
- config.limiterEnable);
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_SetNSStatus
- * Signature: (Z)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus(
- JNIEnv *,
- jobject,
- jboolean enable) {
- VALIDATE_APM_POINTER;
- if (voeData.apm->SetNsStatus(enable, kNsModerateSuppression) < 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Failed SetNsStatus(%d,%d)",
- enable, kNsModerateSuppression);
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartDebugRecording
- * Signature: (Ljava/lang/String)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording(
- JNIEnv* env,
- jobject,
- jstring filename) {
- VALIDATE_APM_POINTER;
-
- const char* file = env->GetStringUTFChars(filename, NULL);
- if (!file) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Voice StartDebugRecording file error");
- return -1;
- }
- if (voeData.apm->StartDebugRecording(file) != 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Voice StartDebugRecording error");
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopDebugRecording
- * Signature: ()I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording(
- JNIEnv *,
- jobject) {
- VALIDATE_APM_POINTER;
- if (voeData.apm->StopDebugRecording() < 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Voice StopDebugRecording error");
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StartIncomingRTPDump
- * Signature: (ILjava/lang/String;)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump(
- JNIEnv* env,
- jobject,
- jint channel,
- jstring filename) {
- VALIDATE_RTP_POINTER;
- const char* file = env->GetStringUTFChars(filename, NULL);
- if (!file) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Voice StartRTPDump file error");
- return -1;
- }
- if (voeData.rtp->StartRTPDump(channel, file, kRtpIncoming) != 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Voice StartRTPDump error");
- return -1;
- }
- return 0;
-}
-
-/*
- * Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
- * Method: VoE_StopIncomingRTPDump
- * Signature: (I)I
- */
-JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopIncomingRTPDump(
- JNIEnv *,
- jobject,
- jint channel) {
- VALIDATE_RTP_POINTER;
- if (voeData.rtp->StopRTPDump(channel) < 0) {
- __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Voice StopRTPDump error");
- return -1;
- }
- return 0;
-}
-
-//
-// local function
-//
-
-// Get all sub-APIs
-bool VE_GetSubApis() {
- bool getOK = true;
-
- // Base
- voeData.base = VoEBase::GetInterface(voeData.ve);
- if (!voeData.base) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get base sub-API failed");
- getOK = false;
- }
-
- // Codec
- voeData.codec = VoECodec::GetInterface(voeData.ve);
- if (!voeData.codec) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get codec sub-API failed");
- getOK = false;
- }
-
- // File
- voeData.file = VoEFile::GetInterface(voeData.ve);
- if (!voeData.file) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get file sub-API failed");
- getOK = false;
- }
-
- // Network
- voeData.netw = VoENetwork::GetInterface(voeData.ve);
- if (!voeData.netw) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get network sub-API failed");
- getOK = false;
- }
-
- // audioprocessing
- voeData.apm = VoEAudioProcessing::GetInterface(voeData.ve);
- if (!voeData.apm) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get VoEAudioProcessing sub-API failed");
- getOK = false;
- }
-
- // Volume
- voeData.volume = VoEVolumeControl::GetInterface(voeData.ve);
- if (!voeData.volume) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get volume sub-API failed");
- getOK = false;
- }
-
- // Hardware
- voeData.hardware = VoEHardware::GetInterface(voeData.ve);
- if (!voeData.hardware) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get hardware sub-API failed");
- getOK = false;
- }
-
- // RTP
- voeData.rtp = VoERTP_RTCP::GetInterface(voeData.ve);
- if (!voeData.rtp) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Get rtp sub-API failed");
- getOK = false;
- }
-
- return getOK;
-}
-
-// Release all sub-APIs
-bool VE_ReleaseSubApis() {
- bool releaseOK = true;
-
- // Base
- if (voeData.base) {
- if (0 != voeData.base->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release base sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.base = NULL;
- }
- }
-
- // Codec
- if (voeData.codec) {
- if (0 != voeData.codec->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release codec sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.codec = NULL;
- }
- }
-
- // File
- if (voeData.file) {
- if (0 != voeData.file->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release file sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.file = NULL;
- }
- }
-
- // Network
- if (voeData.netw) {
- if (0 != voeData.netw->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release network sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.netw = NULL;
- }
- }
-
- // apm
- if (voeData.apm) {
- if (0 != voeData.apm->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release apm sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.apm = NULL;
- }
- }
-
- // Volume
- if (voeData.volume) {
- if (0 != voeData.volume->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release volume sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.volume = NULL;
- }
- }
-
- // Hardware
- if (voeData.hardware) {
- if (0 != voeData.hardware->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release hardware sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.hardware = NULL;
- }
- }
-
- if (voeData.rtp) {
- if (0 != voeData.rtp->Release()) {
- __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
- "Release rtp sub-API failed");
- releaseOK = false;
- }
- else {
- voeData.rtp = NULL;
- }
- }
-
- return releaseOK;
-}
diff --git a/webrtc/video_engine/test/android/res/drawable/logo.png b/webrtc/video_engine/test/android/res/drawable/logo.png
deleted file mode 100644
index a07c69fa5..000000000
Binary files a/webrtc/video_engine/test/android/res/drawable/logo.png and /dev/null differ
diff --git a/webrtc/video_engine/test/android/res/layout/aconfig.xml b/webrtc/video_engine/test/android/res/layout/aconfig.xml
deleted file mode 100644
index 5f995c6fb..000000000
--- a/webrtc/video_engine/test/android/res/layout/aconfig.xml
+++ /dev/null
@@ -1,87 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/webrtc/video_engine/test/android/res/layout/both.xml b/webrtc/video_engine/test/android/res/layout/both.xml
deleted file mode 100644
index ab174ba4d..000000000
--- a/webrtc/video_engine/test/android/res/layout/both.xml
+++ /dev/null
@@ -1,43 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/webrtc/video_engine/test/android/res/layout/main.xml b/webrtc/video_engine/test/android/res/layout/main.xml
deleted file mode 100644
index a845a860a..000000000
--- a/webrtc/video_engine/test/android/res/layout/main.xml
+++ /dev/null
@@ -1,94 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/webrtc/video_engine/test/android/res/layout/row.xml b/webrtc/video_engine/test/android/res/layout/row.xml
deleted file mode 100644
index aa4f0ca44..000000000
--- a/webrtc/video_engine/test/android/res/layout/row.xml
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-
-
diff --git a/webrtc/video_engine/test/android/res/layout/send.xml b/webrtc/video_engine/test/android/res/layout/send.xml
deleted file mode 100644
index ee230f507..000000000
--- a/webrtc/video_engine/test/android/res/layout/send.xml
+++ /dev/null
@@ -1,17 +0,0 @@
-
-
-
-
-
-
-
-
-
-
diff --git a/webrtc/video_engine/test/android/res/layout/tabhost.xml b/webrtc/video_engine/test/android/res/layout/tabhost.xml
deleted file mode 100644
index 42383fd6d..000000000
--- a/webrtc/video_engine/test/android/res/layout/tabhost.xml
+++ /dev/null
@@ -1,24 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/webrtc/video_engine/test/android/res/layout/vconfig.xml b/webrtc/video_engine/test/android/res/layout/vconfig.xml
deleted file mode 100644
index d72f25720..000000000
--- a/webrtc/video_engine/test/android/res/layout/vconfig.xml
+++ /dev/null
@@ -1,73 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/webrtc/video_engine/test/android/res/values/strings.xml b/webrtc/video_engine/test/android/res/values/strings.xml
deleted file mode 100644
index 106d691a7..000000000
--- a/webrtc/video_engine/test/android/res/values/strings.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-
-
- WebRTC
- Error
- Camera Error
-Choose a codec type
-Video Engine Android Demo
-Codec Type
-Codec Size
-Remote IP address
-Loopback
-Stats
-Start Listen
-Start Send
-Start Both
-Voice
-Video Receive
-Video Send
-Global Settings
-Video Settings
-Video Tx Port
-Video Rx Port
-Audio Tx Port
-Audio Rx Port
-AGC
-VAD
-AECM
-NS
-NACK
-SwitchToFront
-SwitchToBack
-StartCall
-StopCall
-Exit
-Speaker
-APMRecord
-rtpdump
-SurfaceView
-MediaCodec Decoder/Renderer
-OpenGL
-
diff --git a/webrtc/video_engine/test/android/src/org/webrtc/videoengine/ViEMediaCodecDecoder.java b/webrtc/video_engine/test/android/src/org/webrtc/videoengine/ViEMediaCodecDecoder.java
deleted file mode 100644
index 1d5bb3957..000000000
--- a/webrtc/video_engine/test/android/src/org/webrtc/videoengine/ViEMediaCodecDecoder.java
+++ /dev/null
@@ -1,417 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.videoengine;
-
-import android.media.MediaCodec;
-import android.media.MediaExtractor;
-import android.media.MediaFormat;
-import android.os.Handler;
-import android.os.Looper;
-import android.os.Message;
-import android.util.Log;
-import android.view.Surface;
-import android.view.SurfaceView;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.LinkedList;
-
-class CodecState {
- private static final String TAG = "CodecState";
-
- private ViEMediaCodecDecoder mView;
- private MediaFormat mFormat;
- private boolean mSawInputEOS, mSawOutputEOS;
-
- private MediaCodec mCodec;
- private MediaFormat mOutputFormat;
- private ByteBuffer[] mCodecInputBuffers;
- private ByteBuffer[] mCodecOutputBuffers;
-
- private LinkedList mAvailableInputBufferIndices;
- private LinkedList mAvailableOutputBufferIndices;
- private LinkedList mAvailableOutputBufferInfos;
-
- private long mLastMediaTimeUs;
-
- public CodecState(
- ViEMediaCodecDecoder view,
- MediaFormat format,
- MediaCodec codec) {
- mView = view;
- mFormat = format;
- mSawInputEOS = mSawOutputEOS = false;
-
- mCodec = codec;
-
- mCodec.start();
- mCodecInputBuffers = mCodec.getInputBuffers();
- mCodecOutputBuffers = mCodec.getOutputBuffers();
-
- mAvailableInputBufferIndices = new LinkedList();
- mAvailableOutputBufferIndices = new LinkedList();
- mAvailableOutputBufferInfos = new LinkedList();
-
- mLastMediaTimeUs = 0;
- }
-
- public void release() {
- mCodec.stop();
- mCodecInputBuffers = null;
- mCodecOutputBuffers = null;
- mOutputFormat = null;
-
- mAvailableOutputBufferInfos = null;
- mAvailableOutputBufferIndices = null;
- mAvailableInputBufferIndices = null;
-
- mCodec.release();
- mCodec = null;
- }
-
- public void start() {
- }
-
- public void pause() {
- }
-
- public long getCurrentPositionUs() {
- return mLastMediaTimeUs;
- }
-
- public void flush() {
- mAvailableInputBufferIndices.clear();
- mAvailableOutputBufferIndices.clear();
- mAvailableOutputBufferInfos.clear();
-
- mSawInputEOS = false;
- mSawOutputEOS = false;
-
- mCodec.flush();
- }
-
- public void doSomeWork() {
- int index = mCodec.dequeueInputBuffer(0 /* timeoutUs */);
-
- if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
- mAvailableInputBufferIndices.add(new Integer(index));
- }
-
- while (feedInputBuffer()) {}
-
- MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
- index = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
-
- if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
- mOutputFormat = mCodec.getOutputFormat();
- } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
- mCodecOutputBuffers = mCodec.getOutputBuffers();
- } else if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
- mAvailableOutputBufferIndices.add(new Integer(index));
- mAvailableOutputBufferInfos.add(info);
- }
-
- while (drainOutputBuffer()) {}
- }
-
- /** returns true if more input data could be fed */
- private boolean feedInputBuffer() {
- if (mSawInputEOS || mAvailableInputBufferIndices.isEmpty()) {
- return false;
- }
-
- int index = mAvailableInputBufferIndices.peekFirst().intValue();
-
- ByteBuffer codecData = mCodecInputBuffers[index];
-
- if (mView.hasFrame()) {
- Frame frame = mView.dequeueFrame();
- ByteBuffer buffer = frame.mBuffer;
- if (buffer == null) {
- return false;
- }
- if (codecData.capacity() < buffer.capacity()) {
- Log.e(TAG, "Buffer is too small to copy a frame.");
- // TODO(dwkang): split the frame into the multiple buffer.
- }
- buffer.rewind();
- codecData.rewind();
- codecData.put(buffer);
- codecData.rewind();
-
- try {
- mCodec.queueInputBuffer(
- index, 0 /* offset */, buffer.capacity(), frame.mTimeStampUs,
- 0 /* flags */);
-
- mAvailableInputBufferIndices.removeFirst();
- } catch (MediaCodec.CryptoException e) {
- Log.d(TAG, "CryptoException w/ errorCode "
- + e.getErrorCode() + ", '" + e.getMessage() + "'");
- }
-
- return true;
- }
- return false;
- }
-
-
- /** returns true if more output data could be drained */
- private boolean drainOutputBuffer() {
- if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()) {
- return false;
- }
-
- int index = mAvailableOutputBufferIndices.peekFirst().intValue();
- MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst();
-
- if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
- Log.d(TAG, "saw output EOS.");
-
- mSawOutputEOS = true;
- return false;
- }
-
- long realTimeUs =
- mView.getRealTimeUsForMediaTime(info.presentationTimeUs);
- long nowUs = System.currentTimeMillis() * 1000;
- long lateUs = nowUs - realTimeUs;
-
- // video
- boolean render;
-
- // TODO(dwkang): For some extreme cases, just not doing rendering is not enough.
- // Need to seek to the next key frame.
- if (lateUs < -10000) {
- // too early;
- return false;
- } else if (lateUs > 30000) {
- Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
- render = false;
- } else {
- render = true;
- mLastMediaTimeUs = info.presentationTimeUs;
- }
-
- MediaFormat format= mCodec.getOutputFormat();
- Log.d(TAG, "Video output format :" + format.getInteger(MediaFormat.KEY_COLOR_FORMAT));
- mCodec.releaseOutputBuffer(index, render);
-
- mAvailableOutputBufferIndices.removeFirst();
- mAvailableOutputBufferInfos.removeFirst();
- return true;
- }
-}
-
-class Frame {
- public ByteBuffer mBuffer;
- public long mTimeStampUs;
-
- Frame(ByteBuffer buffer, long timeStampUs) {
- mBuffer = buffer;
- mTimeStampUs = timeStampUs;
- }
-}
-
-class ViEMediaCodecDecoder {
- private static final String TAG = "ViEMediaCodecDecoder";
-
- private MediaExtractor mExtractor;
-
- private CodecState mCodecState;
-
- private int mState;
- private static final int STATE_IDLE = 1;
- private static final int STATE_PREPARING = 2;
- private static final int STATE_PLAYING = 3;
- private static final int STATE_PAUSED = 4;
-
- private Handler mHandler;
- private static final int EVENT_PREPARE = 1;
- private static final int EVENT_DO_SOME_WORK = 2;
-
- private long mDeltaTimeUs;
- private long mDurationUs;
-
- private SurfaceView mSurfaceView;
- private LinkedList mFrameQueue = new LinkedList ();
-
- private Thread mLooperThread;
-
- public boolean configure(SurfaceView surfaceView, int width, int height) {
- mSurfaceView = surfaceView;
- Log.d(TAG, "configure " + "width" + width + "height" + height + mSurfaceView.toString());
-
- MediaFormat format = new MediaFormat();
- format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
- format.setInteger(MediaFormat.KEY_WIDTH, width);
- format.setInteger(MediaFormat.KEY_HEIGHT, height);
-
- Surface surface = mSurfaceView.getHolder().getSurface();
- Log.d(TAG, "Surface " + surface.isValid());
- MediaCodec codec;
- try {
- codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
- // SW VP8 decoder
- // codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
- // Nexus10 HW VP8 decoder
- // codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
- } catch (Exception e) {
- // TODO(dwkang): replace this instanceof/throw with a narrower catch clause
- // once the SDK advances.
- if (e instanceof IOException) {
- Log.e(TAG, "Failed to create MediaCodec for VP8.", e);
- return false;
- }
- throw new RuntimeException(e);
- }
-
- codec.configure(format, surface, null, 0);
- mCodecState = new CodecState(this, format, codec);
- initMediaCodecView();
- return true;
- }
-
- public void setEncodedImage(ByteBuffer buffer, long renderTimeMs) {
- // TODO(dwkang): figure out why exceptions just make this thread finish.
- try {
- final long renderTimeUs = renderTimeMs * 1000;
- ByteBuffer buf = ByteBuffer.allocate(buffer.capacity());
- buf.put(buffer);
- buf.rewind();
- synchronized(mFrameQueue) {
- mFrameQueue.add(new Frame(buf, renderTimeUs));
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- public boolean hasFrame() {
- synchronized(mFrameQueue) {
- return !mFrameQueue.isEmpty();
- }
- }
-
- public Frame dequeueFrame() {
- synchronized(mFrameQueue) {
- return mFrameQueue.removeFirst();
- }
- }
-
- private void initMediaCodecView() {
- Log.d(TAG, "initMediaCodecView");
- mState = STATE_IDLE;
-
- mLooperThread = new Thread()
- {
- @Override
- public void run() {
- Log.d(TAG, "Looper prepare");
- Looper.prepare();
- mHandler = new Handler() {
- @Override
- public void handleMessage(Message msg) {
- // TODO(dwkang): figure out exceptions just make this thread finish.
- try {
- switch (msg.what) {
- case EVENT_PREPARE:
- {
- mState = STATE_PAUSED;
- ViEMediaCodecDecoder.this.start();
- break;
- }
-
- case EVENT_DO_SOME_WORK:
- {
- ViEMediaCodecDecoder.this.doSomeWork();
-
- mHandler.sendMessageDelayed(
- mHandler.obtainMessage(EVENT_DO_SOME_WORK), 5);
- break;
- }
-
- default:
- break;
- }
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- };
- Log.d(TAG, "Looper loop");
- synchronized(ViEMediaCodecDecoder.this) {
- ViEMediaCodecDecoder.this.notify();
- }
- Looper.loop();
- }
- };
- mLooperThread.start();
-
- // Wait until handler is set up.
- synchronized(ViEMediaCodecDecoder.this) {
- try {
- ViEMediaCodecDecoder.this.wait(1000);
- } catch (InterruptedException e) {
- e.printStackTrace();
- }
- }
- Log.d(TAG, "initMediaCodecView end");
- }
-
- public void start() {
- Log.d(TAG, "start");
-
- if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
- return;
- } else if (mState == STATE_IDLE) {
- mState = STATE_PREPARING;
- Log.d(TAG, "Sending EVENT_PREPARE");
- mHandler.sendMessage(mHandler.obtainMessage(EVENT_PREPARE));
- return;
- } else if (mState != STATE_PAUSED) {
- throw new IllegalStateException();
- }
-
- mCodecState.start();
-
- mHandler.sendMessage(mHandler.obtainMessage(EVENT_DO_SOME_WORK));
-
- mDeltaTimeUs = -1;
- mState = STATE_PLAYING;
-
- Log.d(TAG, "start end");
- }
-
- public void reset() {
- if (mState == STATE_PLAYING) {
- mCodecState.pause();
- }
-
- mCodecState.release();
-
- mDurationUs = -1;
- mState = STATE_IDLE;
- }
-
- private void doSomeWork() {
- mCodecState.doSomeWork();
- }
-
- public long getRealTimeUsForMediaTime(long mediaTimeUs) {
- if (mDeltaTimeUs == -1) {
- long nowUs = System.currentTimeMillis() * 1000;
- mDeltaTimeUs = nowUs - mediaTimeUs;
- }
-
- return mDeltaTimeUs + mediaTimeUs;
- }
-}
diff --git a/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java b/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
deleted file mode 100644
index 9ce4128ec..000000000
--- a/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.videoengineapp;
-
-import android.app.Activity;
-import android.content.Context;
-import android.util.Log;
-import android.view.SurfaceHolder;
-import android.view.SurfaceView;
-
-public class ViEAndroidJavaAPI {
-
- public ViEAndroidJavaAPI(Context context) {
- Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI...");
- System.loadLibrary("webrtc-video-demo-jni");
-
- Log.d("*WEBRTCJ*", "Calling native init...");
- if (!NativeInit(context)) {
- Log.e("*WEBRTCJ*", "Native init failed");
- throw new RuntimeException("Native init failed");
- }
- else {
- Log.d("*WEBRTCJ*", "Native init successful");
- }
- String a = "";
- a.getBytes();
- }
-
- // API Native
- private native boolean NativeInit(Context context);
-
- // Video Engine API
- // Initialization and Termination functions
- public native int GetVideoEngine();
- public native int Init(boolean enableTrace);
- public native int Terminate();
-
- public native int StartSend(int channel);
- public native int StopRender(int channel);
- public native int StopSend(int channel);
- public native int StartReceive(int channel);
- public native int StopReceive(int channel);
- // Channel functions
- public native int CreateChannel(int voiceChannel);
- // Receiver & Destination functions
- public native int SetLocalReceiver(int channel, int port);
- public native int SetSendDestination(int channel, int port, String ipaddr);
- // Codec
- public native String[] GetCodecs();
- public native int SetReceiveCodec(int channel, int codecNum,
- int intbitRate, int width,
- int height, int frameRate);
- public native int SetSendCodec(int channel, int codecNum,
- int intbitRate, int width,
- int height, int frameRate);
- // Rendering
- public native int AddRemoteRenderer(int channel, Object glSurface);
- public native int RemoveRemoteRenderer(int channel);
- public native int StartRender(int channel);
-
- // Capture
- public native int StartCamera(int channel, int cameraNum);
- public native int StopCamera(int cameraId);
- public native int GetCameraOrientation(int cameraNum);
- public native int SetRotation(int cameraId,int degrees);
-
- // External Codec
- public native int SetExternalMediaCodecDecoderRenderer(
- int channel, Object glSurface);
-
- // NACK
- public native int EnableNACK(int channel, boolean enable);
-
- // PLI
- public native int EnablePLI(int channel, boolean enable);
-
- // Enable stats callback
- public native int SetCallback(int channel, IViEAndroidCallback callback);
-
- public native int StartIncomingRTPDump(int channel, String file);
- public native int StopIncomingRTPDump(int channel);
-
- // Voice Engine API
- // Create and Delete functions
- public native boolean VoE_Create(Context context);
- public native boolean VoE_Delete();
-
- // Initialization and Termination functions
- public native int VoE_Init(boolean enableTrace);
- public native int VoE_Terminate();
-
- // Channel functions
- public native int VoE_CreateChannel();
- public native int VoE_DeleteChannel(int channel);
- public native int ViE_DeleteChannel(int channel);
-
- // Receiver & Destination functions
- public native int VoE_SetLocalReceiver(int channel, int port);
- public native int VoE_SetSendDestination(int channel, int port,
- String ipaddr);
-
- // Media functions
- public native int VoE_StartListen(int channel);
- public native int VoE_StartPlayout(int channel);
- public native int VoE_StartSend(int channel);
- public native int VoE_StopListen(int channel);
- public native int VoE_StopPlayout(int channel);
- public native int VoE_StopSend(int channel);
-
- // Volume
- public native int VoE_SetSpeakerVolume(int volume);
-
- // Hardware
- public native int VoE_SetLoudspeakerStatus(boolean enable);
-
- // Playout file locally
- public native int VoE_StartPlayingFileLocally(
- int channel,
- String fileName,
- boolean loop);
- public native int VoE_StopPlayingFileLocally(int channel);
-
- // Play file as microphone
- public native int VoE_StartPlayingFileAsMicrophone(
- int channel,
- String fileName,
- boolean loop);
- public native int VoE_StopPlayingFileAsMicrophone(int channel);
-
- // Codec-setting functions
- public native int VoE_NumOfCodecs();
- public native String[] VoE_GetCodecs();
- public native int VoE_SetSendCodec(int channel, int index);
-
- //VoiceEngine funtions
- public native int VoE_SetECStatus(boolean enable);
- public native int VoE_SetAGCStatus(boolean enable);
- public native int VoE_SetNSStatus(boolean enable);
- public native int VoE_StartDebugRecording(String file);
- public native int VoE_StopDebugRecording();
- public native int VoE_StartIncomingRTPDump(int channel, String file);
- public native int VoE_StopIncomingRTPDump(int channel);
-}
diff --git a/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java b/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
deleted file mode 100644
index 84291d7c6..000000000
--- a/webrtc/video_engine/test/android/src/org/webrtc/videoengineapp/WebRTCDemo.java
+++ /dev/null
@@ -1,1078 +0,0 @@
-/*
- * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-package org.webrtc.videoengineapp;
-
-import android.app.AlertDialog;
-import android.app.TabActivity;
-import android.content.BroadcastReceiver;
-import android.content.Context;
-import android.content.DialogInterface;
-import android.content.Intent;
-import android.content.IntentFilter;
-import android.content.pm.ActivityInfo;
-import android.content.res.Configuration;
-import android.graphics.Canvas;
-import android.graphics.Paint;
-import android.graphics.PixelFormat;
-import android.hardware.Camera;
-import android.hardware.Camera.CameraInfo;
-import android.hardware.SensorManager;
-import android.media.AudioManager;
-import android.media.MediaPlayer;
-import android.net.Uri;
-import android.os.Bundle;
-import android.os.Environment;
-import android.os.Handler;
-import android.util.Log;
-import android.view.Display;
-import android.view.Gravity;
-import android.view.KeyEvent;
-import android.view.LayoutInflater;
-import android.view.OrientationEventListener;
-import android.view.Surface;
-import android.view.SurfaceView;
-import android.view.View;
-import android.view.ViewGroup;
-import android.view.Window;
-import android.view.WindowManager;
-import android.widget.AdapterView;
-import android.widget.AdapterView.OnItemSelectedListener;
-import android.widget.ArrayAdapter;
-import android.widget.Button;
-import android.widget.CheckBox;
-import android.widget.EditText;
-import android.widget.LinearLayout;
-import android.widget.RadioGroup;
-import android.widget.Spinner;
-import android.widget.TabHost;
-import android.widget.TabHost.TabSpec;
-import android.widget.TextView;
-
-import org.webrtc.videoengine.ViERenderer;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.NetworkInterface;
-import java.net.SocketException;
-import java.util.Enumeration;
-
-public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
- View.OnClickListener,
- OnItemSelectedListener {
- private ViEAndroidJavaAPI vieAndroidAPI = null;
-
- // remote renderer
- private SurfaceView remoteSurfaceView = null;
-
- // local renderer and camera
- private SurfaceView svLocal = null;
-
- // channel number
- private int channel = -1;
- private int cameraId;
- private int voiceChannel = -1;
-
- // flags
- private boolean viERunning = false;
- private boolean voERunning = false;
-
- // debug
- private boolean enableTrace = true;
-
- // Constant
- private static final String TAG = "WEBRTC";
- private static final int RECEIVE_CODEC_FRAMERATE = 15;
- private static final int SEND_CODEC_FRAMERATE = 15;
- private static final int INIT_BITRATE = 500;
- private static final String LOOPBACK_IP = "127.0.0.1";
- // Zero means don't automatically start/stop calls.
- private static final long AUTO_CALL_RESTART_DELAY_MS = 0;
-
- private Handler handler = new Handler();
- private Runnable startOrStopCallback = new Runnable() {
- public void run() {
- startOrStop();
- }
- };
-
- private int volumeLevel = 204;
-
- private TabHost mTabHost = null;
-
- private TabSpec mTabSpecConfig;
- private TabSpec mTabSpecVideo;
-
- private LinearLayout mLlRemoteSurface = null;
- private LinearLayout mLlLocalSurface = null;
-
- private Button btStartStopCall;
- private Button btSwitchCamera;
-
- // Global Settings
- private CheckBox cbVideoSend;
- private boolean enableVideoSend = true;
- private CheckBox cbVideoReceive;
- private boolean enableVideoReceive = true;
- private boolean enableVideo = true;
- private CheckBox cbVoice;
- private boolean enableVoice = true;
- private EditText etRemoteIp;
- private String remoteIp = "";
- private CheckBox cbLoopback;
- private boolean loopbackMode = true;
- private CheckBox cbStats;
- private boolean isStatsOn = true;
- public enum RenderType {
- OPENGL,
- SURFACE,
- MEDIACODEC
- }
- RenderType renderType = RenderType.OPENGL;
-
- // Video settings
- private Spinner spCodecType;
- private int codecType = 0;
- private Spinner spCodecSize;
- private int codecSizeWidth = 0;
- private int codecSizeHeight = 0;
- private TextView etVRxPort;
- private int receivePortVideo = 11111;
- private TextView etVTxPort;
- private int destinationPortVideo = 11111;
- private CheckBox cbEnableNack;
- private boolean enableNack = true;
- private CheckBox cbEnableVideoRTPDump;
-
- // Audio settings
- private Spinner spVoiceCodecType;
- private int voiceCodecType = 0;
- private TextView etARxPort;
- private int receivePortVoice = 11113;
- private TextView etATxPort;
- private int destinationPortVoice = 11113;
- private CheckBox cbEnableSpeaker;
- private CheckBox cbEnableAGC;
- private boolean enableAGC = false;
- private CheckBox cbEnableAECM;
- private boolean enableAECM = false;
- private CheckBox cbEnableNS;
- private boolean enableNS = false;
- private CheckBox cbEnableDebugAPM;
- private CheckBox cbEnableVoiceRTPDump;
-
- // Stats variables
- private int frameRateI;
- private int bitRateI;
- private int packetLoss;
- private int frameRateO;
- private int bitRateO;
- private int numCalls = 0;
-
- private int widthI;
- private int heightI;
-
- // Variable for storing variables
- private String webrtcName = "/webrtc";
- private String webrtcDebugDir = null;
-
- private boolean usingFrontCamera = true;
- // The orientations (in degrees) of each of the cameras CCW-relative to the
- // device, indexed by CameraInfo.CAMERA_FACING_{BACK,FRONT}, and -1
- // for unrepresented |facing| values (i.e. single-camera device).
- private int[] cameraOrientations = new int[] { -1, -1 };
-
- private String[] mVideoCodecsStrings = null;
- private String[] mVideoCodecsSizeStrings = { "176x144", "320x240",
- "352x288", "640x480" };
- private String[] mVoiceCodecsStrings = null;
-
- private OrientationEventListener orientationListener;
- int currentDeviceOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
-
- private StatsView statsView = null;
-
- private BroadcastReceiver receiver;
-
- // Rounds rotation to the nearest 90 degree rotation.
- private static int roundRotation(int rotation) {
- return (int)(Math.round((double)rotation / 90) * 90) % 360;
- }
-
- // Populate |cameraOrientations| with the first cameras that have each of
- // the facing values.
- private void populateCameraOrientations() {
- CameraInfo info = new CameraInfo();
- for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
- Camera.getCameraInfo(i, info);
- if (cameraOrientations[info.facing] != -1) {
- continue;
- }
- cameraOrientations[info.facing] = info.orientation;
- }
- }
-
- // Return the |CameraInfo.facing| value appropriate for |usingFrontCamera|.
- private static int facingOf(boolean usingFrontCamera) {
- return usingFrontCamera ? CameraInfo.CAMERA_FACING_FRONT
- : CameraInfo.CAMERA_FACING_BACK;
- }
-
- // This function ensures that egress streams always send real world up
- // streams.
- // Note: There are two components of the camera rotation. The rotation of
- // the capturer relative to the device. I.e. up for the camera might not be
- // device up. When rotating the device the camera is also rotated.
- // The former is called orientation and the second is called rotation here.
- public void compensateCameraRotation() {
- int cameraOrientation = cameraOrientations[facingOf(usingFrontCamera)];
- // The device orientation is the device's rotation relative to its
- // natural position.
- int cameraRotation = roundRotation(currentDeviceOrientation);
-
- int totalCameraRotation = 0;
- if (usingFrontCamera) {
- // The front camera rotates in the opposite direction of the
- // device.
- int inverseCameraRotation = (360 - cameraRotation) % 360;
- totalCameraRotation =
- (inverseCameraRotation + cameraOrientation) % 360;
- } else {
- totalCameraRotation =
- (cameraRotation + cameraOrientation) % 360;
- }
- vieAndroidAPI.SetRotation(cameraId, totalCameraRotation);
- }
-
- // Called when the activity is first created.
- @Override
- public void onCreate(Bundle savedInstanceState) {
- Log.d(TAG, "onCreate");
-
- super.onCreate(savedInstanceState);
- requestWindowFeature(Window.FEATURE_NO_TITLE);
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
- getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
-
- populateCameraOrientations();
-
- setContentView(R.layout.tabhost);
-
- IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
-
- receiver = new BroadcastReceiver() {
- @Override
- public void onReceive(Context context, Intent intent) {
- if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG)
- == 0) {
- int state = intent.getIntExtra("state", 0);
- Log.v(TAG, "Intent.ACTION_HEADSET_PLUG state: " + state +
- " microphone: " + intent.getIntExtra("microphone", 0));
- if (voERunning) {
- routeAudio(state == 0 && cbEnableSpeaker.isChecked());
- }
- }
- }
- };
- registerReceiver(receiver, receiverFilter);
-
- mTabHost = getTabHost();
-
- // Main tab
- mTabSpecVideo = mTabHost.newTabSpec("tab_video");
- mTabSpecVideo.setIndicator("Main");
- mTabSpecVideo.setContent(R.id.tab_video);
- mTabHost.addTab(mTabSpecVideo);
-
- // Shared config tab
- mTabHost = getTabHost();
- mTabSpecConfig = mTabHost.newTabSpec("tab_config");
- mTabSpecConfig.setIndicator("Settings");
- mTabSpecConfig.setContent(R.id.tab_config);
- mTabHost.addTab(mTabSpecConfig);
-
- TabSpec mTabv;
- mTabv = mTabHost.newTabSpec("tab_vconfig");
- mTabv.setIndicator("Video");
- mTabv.setContent(R.id.tab_vconfig);
- mTabHost.addTab(mTabv);
- TabSpec mTaba;
- mTaba = mTabHost.newTabSpec("tab_aconfig");
- mTaba.setIndicator("Audio");
- mTaba.setContent(R.id.tab_aconfig);
- mTabHost.addTab(mTaba);
-
- int childCount = mTabHost.getTabWidget().getChildCount();
- for (int i = 0; i < childCount; i++) {
- mTabHost.getTabWidget().getChildAt(i).getLayoutParams().height = 50;
- }
- orientationListener =
- new OrientationEventListener(this, SensorManager.SENSOR_DELAY_UI) {
- public void onOrientationChanged (int orientation) {
- if (orientation != ORIENTATION_UNKNOWN) {
- currentDeviceOrientation = orientation;
- compensateCameraRotation();
- }
- }
- };
- orientationListener.enable ();
-
- // Create a folder named webrtc in /scard for debugging
- webrtcDebugDir = Environment.getExternalStorageDirectory().toString() +
- webrtcName;
- File webrtcDir = new File(webrtcDebugDir);
- if (!webrtcDir.exists() && webrtcDir.mkdir() == false) {
- Log.v(TAG, "Failed to create " + webrtcDebugDir);
- } else if (!webrtcDir.isDirectory()) {
- Log.v(TAG, webrtcDebugDir + " exists but not a folder");
- webrtcDebugDir = null;
- }
-
- startMain();
-
- if (AUTO_CALL_RESTART_DELAY_MS > 0)
- startOrStop();
- }
-
- // Called before the activity is destroyed.
- @Override
- public void onDestroy() {
- Log.d(TAG, "onDestroy");
- handler.removeCallbacks(startOrStopCallback);
- unregisterReceiver(receiver);
- super.onDestroy();
- }
-
- private class StatsView extends View{
- public StatsView(Context context){
- super(context);
- }
-
- @Override protected void onDraw(Canvas canvas) {
- super.onDraw(canvas);
- // Only draw Stats in Main tab.
- if(mTabHost.getCurrentTabTag() == "tab_video") {
- Paint loadPaint = new Paint();
- loadPaint.setAntiAlias(true);
- loadPaint.setTextSize(16);
- loadPaint.setARGB(255, 255, 255, 255);
-
- canvas.drawText("#calls " + numCalls, 4, 222, loadPaint);
-
- String loadText;
- loadText = "> " + frameRateI + " fps/" +
- bitRateI/1024 + " kbps/ " + packetLoss;
- canvas.drawText(loadText, 4, 242, loadPaint);
- loadText = "< " + frameRateO + " fps/ " +
- bitRateO/1024 + " kbps";
- canvas.drawText(loadText, 4, 262, loadPaint);
- loadText = "Incoming resolution " + widthI + "x" + heightI;
- canvas.drawText(loadText, 4, 282, loadPaint);
- }
- updateDisplay();
- }
-
- void updateDisplay() {
- invalidate();
- }
- }
-
- private String getLocalIpAddress() {
- String localIPs = "";
- try {
- for (Enumeration en = NetworkInterface
- .getNetworkInterfaces(); en.hasMoreElements();) {
- NetworkInterface intf = en.nextElement();
- for (Enumeration enumIpAddr =
- intf.getInetAddresses();
- enumIpAddr.hasMoreElements(); ) {
- InetAddress inetAddress = enumIpAddr.nextElement();
- if (!inetAddress.isLoopbackAddress()) {
- localIPs +=
- inetAddress.getHostAddress().toString() + " ";
- // Set the remote ip address the same as
- // the local ip address of the last netif
- remoteIp = inetAddress.getHostAddress().toString();
- }
- }
- }
- } catch (SocketException ex) {
- Log.e(TAG, ex.toString());
- }
- return localIPs;
- }
-
- @Override
- public boolean onKeyDown(int keyCode, KeyEvent event) {
- if (keyCode == KeyEvent.KEYCODE_BACK) {
- if (viERunning) {
- stopAll();
- startMain();
- }
- finish();
- return true;
- }
- return super.onKeyDown(keyCode, event);
- }
-
- private void stopAll() {
- Log.d(TAG, "stopAll");
-
- if (vieAndroidAPI != null) {
-
- if (voERunning) {
- voERunning = false;
- stopVoiceEngine();
- }
-
- if (viERunning) {
- viERunning = false;
- vieAndroidAPI.StopRender(channel);
- vieAndroidAPI.StopReceive(channel);
- vieAndroidAPI.StopSend(channel);
- vieAndroidAPI.RemoveRemoteRenderer(channel);
- vieAndroidAPI.ViE_DeleteChannel(channel);
- channel = -1;
- vieAndroidAPI.StopCamera(cameraId);
- vieAndroidAPI.Terminate();
- mLlRemoteSurface.removeView(remoteSurfaceView);
- mLlLocalSurface.removeView(svLocal);
- remoteSurfaceView = null;
- svLocal = null;
- }
- }
- }
-
- /** {@ArrayAdapter} */
- public class SpinnerAdapter extends ArrayAdapter {
- private String[] mCodecString = null;
- public SpinnerAdapter(Context context, int textViewResourceId, String[] objects) {
- super(context, textViewResourceId, objects);
- mCodecString = objects;
- }
-
- @Override public View getDropDownView(int position, View convertView, ViewGroup parent) {
- return getCustomView(position, convertView, parent);
- }
-
- @Override public View getView(int position, View convertView, ViewGroup parent) {
- return getCustomView(position, convertView, parent);
- }
-
- public View getCustomView(int position, View convertView, ViewGroup parent) {
- LayoutInflater inflater = getLayoutInflater();
- View row = inflater.inflate(R.layout.row, parent, false);
- TextView label = (TextView) row.findViewById(R.id.spinner_row);
- label.setText(mCodecString[position]);
- return row;
- }
- }
-
- private void startMain() {
- mTabHost.setCurrentTab(0);
-
- mLlRemoteSurface = (LinearLayout) findViewById(R.id.llRemoteView);
- mLlLocalSurface = (LinearLayout) findViewById(R.id.llLocalView);
-
- if (null == vieAndroidAPI) {
- vieAndroidAPI = new ViEAndroidJavaAPI(this);
- }
- if (0 > setupVoE() || 0 > vieAndroidAPI.GetVideoEngine() ||
- 0 > vieAndroidAPI.Init(enableTrace)) {
- // Show dialog
- AlertDialog alertDialog = new AlertDialog.Builder(this).create();
- alertDialog.setTitle("WebRTC Error");
- alertDialog.setMessage("Can not init video engine.");
- alertDialog.setButton(
- DialogInterface.BUTTON_POSITIVE,
- "OK", new DialogInterface.OnClickListener() {
- public void onClick(DialogInterface dialog, int which) {
- return;
- } });
- alertDialog.show();
- }
-
- btSwitchCamera = (Button) findViewById(R.id.btSwitchCamera);
- if (cameraOrientations[0] != -1 && cameraOrientations[1] != -1) {
- btSwitchCamera.setOnClickListener(this);
- } else {
- btSwitchCamera.setEnabled(false);
- }
- btStartStopCall = (Button) findViewById(R.id.btStartStopCall);
- btStartStopCall.setOnClickListener(this);
- findViewById(R.id.btExit).setOnClickListener(this);
-
- // cleaning
- remoteSurfaceView = null;
- svLocal = null;
-
- // Video codec
- mVideoCodecsStrings = vieAndroidAPI.GetCodecs();
- spCodecType = (Spinner) findViewById(R.id.spCodecType);
- spCodecType.setOnItemSelectedListener(this);
- spCodecType.setAdapter(new SpinnerAdapter(this,
- R.layout.row,
- mVideoCodecsStrings));
- spCodecType.setSelection(0);
-
- // Video Codec size
- spCodecSize = (Spinner) findViewById(R.id.spCodecSize);
- spCodecSize.setOnItemSelectedListener(this);
- spCodecSize.setAdapter(new SpinnerAdapter(this,
- R.layout.row,
- mVideoCodecsSizeStrings));
- spCodecSize.setSelection(mVideoCodecsSizeStrings.length - 1);
-
- // Voice codec
- mVoiceCodecsStrings = vieAndroidAPI.VoE_GetCodecs();
- spVoiceCodecType = (Spinner) findViewById(R.id.spVoiceCodecType);
- spVoiceCodecType.setOnItemSelectedListener(this);
- spVoiceCodecType.setAdapter(new SpinnerAdapter(this,
- R.layout.row,
- mVoiceCodecsStrings));
- spVoiceCodecType.setSelection(0);
- // Find ISAC and use it
- for (int i = 0; i < mVoiceCodecsStrings.length; ++i) {
- if (mVoiceCodecsStrings[i].contains("ISAC")) {
- spVoiceCodecType.setSelection(i);
- break;
- }
- }
-
- RadioGroup radioGroup = (RadioGroup) findViewById(R.id.radio_group1);
- radioGroup.clearCheck();
- if (renderType == RenderType.OPENGL) {
- radioGroup.check(R.id.radio_opengl);
- } else if (renderType == RenderType.SURFACE) {
- radioGroup.check(R.id.radio_surface);
- } else if (renderType == RenderType.MEDIACODEC) {
- radioGroup.check(R.id.radio_mediacodec);
- }
-
- etRemoteIp = (EditText) findViewById(R.id.etRemoteIp);
- etRemoteIp.setText(remoteIp);
-
- cbLoopback = (CheckBox) findViewById(R.id.cbLoopback);
- cbLoopback.setChecked(loopbackMode);
-
- cbStats = (CheckBox) findViewById(R.id.cbStats);
- cbStats.setChecked(isStatsOn);
-
- cbVoice = (CheckBox) findViewById(R.id.cbVoice);
- cbVoice.setChecked(enableVoice);
-
- cbVideoSend = (CheckBox) findViewById(R.id.cbVideoSend);
- cbVideoSend.setChecked(enableVideoSend);
- cbVideoReceive = (CheckBox) findViewById(R.id.cbVideoReceive);
- cbVideoReceive.setChecked(enableVideoReceive);
-
- etVTxPort = (EditText) findViewById(R.id.etVTxPort);
- etVTxPort.setText(Integer.toString(destinationPortVideo));
-
- etVRxPort = (EditText) findViewById(R.id.etVRxPort);
- etVRxPort.setText(Integer.toString(receivePortVideo));
-
- etATxPort = (EditText) findViewById(R.id.etATxPort);
- etATxPort.setText(Integer.toString(destinationPortVoice));
-
- etARxPort = (EditText) findViewById(R.id.etARxPort);
- etARxPort.setText(Integer.toString(receivePortVoice));
-
- cbEnableNack = (CheckBox) findViewById(R.id.cbNack);
- cbEnableNack.setChecked(enableNack);
-
- cbEnableSpeaker = (CheckBox) findViewById(R.id.cbSpeaker);
- cbEnableAGC = (CheckBox) findViewById(R.id.cbAutoGainControl);
- cbEnableAGC.setChecked(enableAGC);
- cbEnableAECM = (CheckBox) findViewById(R.id.cbAECM);
- cbEnableAECM.setChecked(enableAECM);
- cbEnableNS = (CheckBox) findViewById(R.id.cbNoiseSuppression);
- cbEnableNS.setChecked(enableNS);
-
- cbEnableDebugAPM = (CheckBox) findViewById(R.id.cbDebugRecording);
- cbEnableDebugAPM.setChecked(false); // Disable APM debugging by default
-
- cbEnableVideoRTPDump = (CheckBox) findViewById(R.id.cbVideoRTPDump);
- cbEnableVideoRTPDump.setChecked(false); // Disable Video RTP Dump
-
- cbEnableVoiceRTPDump = (CheckBox) findViewById(R.id.cbVoiceRTPDump);
- cbEnableVoiceRTPDump.setChecked(false); // Disable Voice RTP Dump
-
- etRemoteIp.setOnClickListener(this);
- cbLoopback.setOnClickListener(this);
- cbStats.setOnClickListener(this);
- cbEnableNack.setOnClickListener(this);
- cbEnableSpeaker.setOnClickListener(this);
- cbEnableAECM.setOnClickListener(this);
- cbEnableAGC.setOnClickListener(this);
- cbEnableNS.setOnClickListener(this);
- cbEnableDebugAPM.setOnClickListener(this);
- cbEnableVideoRTPDump.setOnClickListener(this);
- cbEnableVoiceRTPDump.setOnClickListener(this);
-
- if (loopbackMode) {
- remoteIp = LOOPBACK_IP;
- etRemoteIp.setText(remoteIp);
- } else {
- getLocalIpAddress();
- etRemoteIp.setText(remoteIp);
- }
-
- // Read settings to refresh each configuration
- readSettings();
- }
-
- private String getRemoteIPString() {
- return etRemoteIp.getText().toString();
- }
-
- private void startCall() {
- int ret = 0;
-
- if (enableVoice) {
- startVoiceEngine();
- }
-
- if (enableVideo) {
- if (enableVideoSend) {
- // camera and preview surface
- svLocal = ViERenderer.CreateLocalRenderer(this);
- }
-
- channel = vieAndroidAPI.CreateChannel(voiceChannel);
- ret = vieAndroidAPI.SetLocalReceiver(channel,
- receivePortVideo);
- ret = vieAndroidAPI.SetSendDestination(channel,
- destinationPortVideo,
- getRemoteIPString());
-
- if (enableVideoReceive) {
- if (renderType == RenderType.OPENGL) {
- Log.v(TAG, "Create OpenGL Render");
- remoteSurfaceView = ViERenderer.CreateRenderer(this, true);
- } else if (renderType == RenderType.SURFACE) {
- Log.v(TAG, "Create SurfaceView Render");
- remoteSurfaceView = ViERenderer.CreateRenderer(this, false);
- } else if (renderType == RenderType.MEDIACODEC) {
- Log.v(TAG, "Create MediaCodec Decoder/Renderer");
- remoteSurfaceView = new SurfaceView(this);
- }
-
- if (mLlRemoteSurface != null) {
- mLlRemoteSurface.addView(remoteSurfaceView);
- }
-
- if (renderType == RenderType.MEDIACODEC) {
- ret = vieAndroidAPI.SetExternalMediaCodecDecoderRenderer(
- channel, remoteSurfaceView);
- } else {
- ret = vieAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView);
- }
-
- ret = vieAndroidAPI.SetReceiveCodec(channel,
- codecType,
- INIT_BITRATE,
- codecSizeWidth,
- codecSizeHeight,
- RECEIVE_CODEC_FRAMERATE);
- ret = vieAndroidAPI.StartRender(channel);
- ret = vieAndroidAPI.StartReceive(channel);
- }
-
- if (enableVideoSend) {
- ret = vieAndroidAPI.SetSendCodec(channel, codecType, INIT_BITRATE,
- codecSizeWidth, codecSizeHeight, SEND_CODEC_FRAMERATE);
- int camId = vieAndroidAPI.StartCamera(channel, usingFrontCamera ? 1 : 0);
-
- if (camId >= 0) {
- cameraId = camId;
- compensateCameraRotation();
- } else {
- ret = camId;
- }
- ret = vieAndroidAPI.StartSend(channel);
- }
-
- // TODO(leozwang): Add more options besides PLI, currently use pli
- // as the default. Also check return value.
- ret = vieAndroidAPI.EnablePLI(channel, true);
- ret = vieAndroidAPI.EnableNACK(channel, enableNack);
- ret = vieAndroidAPI.SetCallback(channel, this);
-
- if (enableVideoSend) {
- if (mLlLocalSurface != null) {
- mLlLocalSurface.addView(svLocal);
- }
- }
-
- isStatsOn = cbStats.isChecked();
- if (isStatsOn) {
- addStatusView();
- } else {
- removeStatusView();
- }
-
- viERunning = true;
- }
- }
-
- private void stopVoiceEngine() {
- // Stop send
- if (0 != vieAndroidAPI.VoE_StopSend(voiceChannel)) {
- Log.d(TAG, "VoE stop send failed");
- }
-
- // Stop listen
- if (0 != vieAndroidAPI.VoE_StopListen(voiceChannel)) {
- Log.d(TAG, "VoE stop listen failed");
- }
-
- // Stop playout
- if (0 != vieAndroidAPI.VoE_StopPlayout(voiceChannel)) {
- Log.d(TAG, "VoE stop playout failed");
- }
-
- if (0 != vieAndroidAPI.VoE_DeleteChannel(voiceChannel)) {
- Log.d(TAG, "VoE delete channel failed");
- }
- voiceChannel = -1;
-
- // Terminate
- if (0 != vieAndroidAPI.VoE_Terminate()) {
- Log.d(TAG, "VoE terminate failed");
- }
- }
-
- private int setupVoE() {
- // Create VoiceEngine
- // Error logging is done in native API wrapper
- vieAndroidAPI.VoE_Create(getApplicationContext());
-
- // Initialize
- if (0 != vieAndroidAPI.VoE_Init(enableTrace)) {
- Log.d(TAG, "VoE init failed");
- return -1;
- }
-
- // Suggest to use the voice call audio stream for hardware volume controls
- setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
- return 0;
- }
-
- private int startVoiceEngine() {
- // Create channel
- voiceChannel = vieAndroidAPI.VoE_CreateChannel();
- if (0 > voiceChannel) {
- Log.d(TAG, "VoE create channel failed");
- return -1;
- }
-
- // Set local receiver
- if (0 != vieAndroidAPI.VoE_SetLocalReceiver(voiceChannel,
- receivePortVoice)) {
- Log.d(TAG, "VoE set local receiver failed");
- }
-
- if (0 != vieAndroidAPI.VoE_StartListen(voiceChannel)) {
- Log.d(TAG, "VoE start listen failed");
- }
-
- // Route audio
- routeAudio(cbEnableSpeaker.isChecked());
-
- // set volume to default value
- if (0 != vieAndroidAPI.VoE_SetSpeakerVolume(volumeLevel)) {
- Log.d(TAG, "VoE set speaker volume failed");
- }
-
- // Start playout
- if (0 != vieAndroidAPI.VoE_StartPlayout(voiceChannel)) {
- Log.d(TAG, "VoE start playout failed");
- }
-
- if (0 != vieAndroidAPI.VoE_SetSendDestination(voiceChannel,
- destinationPortVoice,
- getRemoteIPString())) {
- Log.d(TAG, "VoE set send destination failed");
- }
-
- if (0 != vieAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) {
- Log.d(TAG, "VoE set send codec failed");
- }
-
- if (0 != vieAndroidAPI.VoE_SetECStatus(enableAECM)) {
- Log.d(TAG, "VoE set EC Status failed");
- }
-
- if (0 != vieAndroidAPI.VoE_SetAGCStatus(enableAGC)) {
- Log.d(TAG, "VoE set AGC Status failed");
- }
-
- if (0 != vieAndroidAPI.VoE_SetNSStatus(enableNS)) {
- Log.d(TAG, "VoE set NS Status failed");
- }
-
- if (0 != vieAndroidAPI.VoE_StartSend(voiceChannel)) {
- Log.d(TAG, "VoE start send failed");
- }
-
- voERunning = true;
- return 0;
- }
-
- private void routeAudio(boolean enableSpeaker) {
- if (0 != vieAndroidAPI.VoE_SetLoudspeakerStatus(enableSpeaker)) {
- Log.d(TAG, "VoE set louspeaker status failed");
- }
- }
-
- private void startOrStop() {
- readSettings();
- if (viERunning || voERunning) {
- stopAll();
- startMain();
- btStartStopCall.setText(R.string.startCall);
- } else if (enableVoice || enableVideo){
- ++numCalls;
- startCall();
- btStartStopCall.setText(R.string.stopCall);
- }
- if (AUTO_CALL_RESTART_DELAY_MS > 0) {
- handler.postDelayed(startOrStopCallback, AUTO_CALL_RESTART_DELAY_MS);
- }
- }
-
- public void onClick(View arg0) {
- switch (arg0.getId()) {
- case R.id.btSwitchCamera:
- if (usingFrontCamera) {
- btSwitchCamera.setText(R.string.frontCamera);
- } else {
- btSwitchCamera.setText(R.string.backCamera);
- }
- usingFrontCamera = !usingFrontCamera;
-
- if (viERunning) {
- vieAndroidAPI.StopCamera(cameraId);
- mLlLocalSurface.removeView(svLocal);
-
- vieAndroidAPI.StartCamera(channel, usingFrontCamera ? 1 : 0);
- mLlLocalSurface.addView(svLocal);
- compensateCameraRotation();
- }
- break;
- case R.id.btStartStopCall:
- startOrStop();
- break;
- case R.id.btExit:
- stopAll();
- finish();
- break;
- case R.id.cbLoopback:
- loopbackMode = cbLoopback.isChecked();
- if (loopbackMode) {
- remoteIp = LOOPBACK_IP;
- etRemoteIp.setText(LOOPBACK_IP);
- } else {
- getLocalIpAddress();
- etRemoteIp.setText(remoteIp);
- }
- break;
- case R.id.etRemoteIp:
- remoteIp = etRemoteIp.getText().toString();
- break;
- case R.id.cbStats:
- isStatsOn = cbStats.isChecked();
- if (isStatsOn) {
- addStatusView();
- } else {
- removeStatusView();
- }
- break;
- case R.id.radio_surface:
- renderType = RenderType.SURFACE;
- break;
- case R.id.radio_opengl:
- renderType = RenderType.OPENGL;
- break;
- case R.id.radio_mediacodec:
- renderType = RenderType.MEDIACODEC;
- break;
- case R.id.cbNack:
- enableNack = cbEnableNack.isChecked();
- if (viERunning) {
- vieAndroidAPI.EnableNACK(channel, enableNack);
- }
- break;
- case R.id.cbSpeaker:
- if (voERunning) {
- routeAudio(cbEnableSpeaker.isChecked());
- }
- break;
- case R.id.cbDebugRecording:
- if (voERunning && webrtcDebugDir != null) {
- if (cbEnableDebugAPM.isChecked()) {
- vieAndroidAPI.VoE_StartDebugRecording(
- webrtcDebugDir + String.format("/apm_%d.dat",
- System.currentTimeMillis()));
- } else {
- vieAndroidAPI.VoE_StopDebugRecording();
- }
- }
- break;
- case R.id.cbVoiceRTPDump:
- if (voERunning && webrtcDebugDir != null) {
- if (cbEnableVoiceRTPDump.isChecked()) {
- vieAndroidAPI.VoE_StartIncomingRTPDump(channel,
- webrtcDebugDir + String.format("/voe_%d.rtp",
- System.currentTimeMillis()));
- } else {
- vieAndroidAPI.VoE_StopIncomingRTPDump(channel);
- }
- }
- break;
- case R.id.cbVideoRTPDump:
- if (viERunning && webrtcDebugDir != null) {
- if (cbEnableVideoRTPDump.isChecked()) {
- vieAndroidAPI.StartIncomingRTPDump(channel,
- webrtcDebugDir + String.format("/vie_%d.rtp",
- System.currentTimeMillis()));
- } else {
- vieAndroidAPI.StopIncomingRTPDump(channel);
- }
- }
- break;
- case R.id.cbAutoGainControl:
- enableAGC = cbEnableAGC.isChecked();
- if (voERunning) {
- vieAndroidAPI.VoE_SetAGCStatus(enableAGC);
- }
- break;
- case R.id.cbNoiseSuppression:
- enableNS = cbEnableNS.isChecked();
- if (voERunning) {
- vieAndroidAPI.VoE_SetNSStatus(enableNS);
- }
- break;
- case R.id.cbAECM:
- enableAECM = cbEnableAECM.isChecked();
- if (voERunning) {
- vieAndroidAPI.VoE_SetECStatus(enableAECM);
- }
- break;
- }
- }
-
- private void readSettings() {
- codecType = spCodecType.getSelectedItemPosition();
- voiceCodecType = spVoiceCodecType.getSelectedItemPosition();
-
- String sCodecSize = spCodecSize.getSelectedItem().toString();
- String[] aCodecSize = sCodecSize.split("x");
- codecSizeWidth = Integer.parseInt(aCodecSize[0]);
- codecSizeHeight = Integer.parseInt(aCodecSize[1]);
-
- loopbackMode = cbLoopback.isChecked();
- enableVoice = cbVoice.isChecked();
- enableVideoSend = cbVideoSend.isChecked();
- enableVideoReceive = cbVideoReceive.isChecked();
- enableVideo = enableVideoSend || enableVideoReceive;
-
- destinationPortVideo =
- Integer.parseInt(etVTxPort.getText().toString());
- receivePortVideo =
- Integer.parseInt(etVRxPort.getText().toString());
- destinationPortVoice =
- Integer.parseInt(etATxPort.getText().toString());
- receivePortVoice =
- Integer.parseInt(etARxPort.getText().toString());
-
- enableNack = cbEnableNack.isChecked();
- enableAGC = cbEnableAGC.isChecked();
- enableAECM = cbEnableAECM.isChecked();
- enableNS = cbEnableNS.isChecked();
- }
-
- public void onItemSelected(AdapterView> adapterView, View view,
- int position, long id) {
- if ((adapterView == spCodecType || adapterView == spCodecSize) &&
- viERunning) {
- readSettings();
- // change the codectype
- if (enableVideoReceive) {
- if (0 != vieAndroidAPI.SetReceiveCodec(channel, codecType,
- INIT_BITRATE, codecSizeWidth,
- codecSizeHeight,
- RECEIVE_CODEC_FRAMERATE)) {
- Log.d(TAG, "ViE set receive codec failed");
- }
- }
- if (enableVideoSend) {
- if (0 != vieAndroidAPI.SetSendCodec(channel, codecType,
- INIT_BITRATE, codecSizeWidth, codecSizeHeight,
- SEND_CODEC_FRAMERATE)) {
- Log.d(TAG, "ViE set send codec failed");
- }
- }
- } else if ((adapterView == spVoiceCodecType) && voERunning) {
- // change voice engine codec
- readSettings();
- if (0 != vieAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) {
- Log.d(TAG, "VoE set send codec failed");
- }
- }
- }
-
- public void onNothingSelected(AdapterView> arg0) {
- Log.d(TAG, "No setting selected");
- }
-
- public int updateStats(int inFrameRateI, int inBitRateI,
- int inPacketLoss, int inFrameRateO, int inBitRateO) {
- frameRateI = inFrameRateI;
- bitRateI = inBitRateI;
- packetLoss = inPacketLoss;
- frameRateO = inFrameRateO;
- bitRateO = inBitRateO;
- return 0;
- }
-
- public int newIncomingResolution(int width, int height) {
- widthI = width;
- heightI = height;
- return 0;
- }
-
- private void addStatusView() {
- if (statsView != null) {
- return;
- }
- statsView = new StatsView(this);
- WindowManager.LayoutParams params = new WindowManager.LayoutParams(
- WindowManager.LayoutParams.MATCH_PARENT,
- WindowManager.LayoutParams.WRAP_CONTENT,
- WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
- WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE |
- WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE,
- PixelFormat.TRANSLUCENT);
- params.gravity = Gravity.RIGHT | Gravity.TOP;
- params.setTitle("Load Average");
- mTabHost.addView(statsView, params);
- statsView.setBackgroundColor(0);
- }
-
- private void removeStatusView() {
- mTabHost.removeView(statsView);
- statsView = null;
- }
-
-}
diff --git a/webrtc/video_engine/video_engine.gyp b/webrtc/video_engine/video_engine.gyp
index c0be24585..6e72dd551 100644
--- a/webrtc/video_engine/video_engine.gyp
+++ b/webrtc/video_engine/video_engine.gyp
@@ -18,13 +18,6 @@
'test/libvietest/libvietest.gypi',
'test/auto_test/vie_auto_test.gypi',
],
- 'conditions': [
- ['OS=="android"', {
- 'includes': [
- 'test/android/android_video_demo.gypi',
- ],
- }],
- ],
}],
],
}
diff --git a/webrtc/webrtc_examples.gyp b/webrtc/webrtc_examples.gyp
new file mode 100644
index 000000000..84ac3ba34
--- /dev/null
+++ b/webrtc/webrtc_examples.gyp
@@ -0,0 +1,85 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS. All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+ 'includes': ['build/common.gypi'],
+
+ 'conditions': [
+ ['OS=="android"', {
+ 'targets': [
+ {
+ 'target_name': 'libwebrtcdemo-jni',
+ 'type': 'loadable_module',
+ 'dependencies': [
+ '<(DEPTH)/third_party/icu/icu.gyp:icuuc',
+ '<(webrtc_root)/modules/modules.gyp:*',
+ '<(webrtc_root)/test/test.gyp:channel_transport',
+ '<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
+ '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
+ ],
+ 'sources': [
+ 'examples/android/media_demo/jni/jni_helpers.cc',
+ 'examples/android/media_demo/jni/on_load.cc',
+ 'examples/android/media_demo/jni/video_engine_jni.cc',
+ 'examples/android/media_demo/jni/voice_engine_jni.cc',
+ 'examples/android/media_demo/jni/media_codec_video_decoder.cc',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '-llog',
+ '-lGLESv2',
+ '-lOpenSLES',
+ ],
+ }
+ },
+ {
+ 'target_name': 'WebRTCDemo',
+ 'type': 'none',
+ 'dependencies': [
+ 'libwebrtcdemo-jni',
+ '<(modules_java_gyp_path):*',
+ ],
+ 'actions': [
+ {
+ # TODO(yujie.mao): Convert building of the demo to a proper GYP
+ # target so this action is not needed once chromium's
+ # apk-building machinery can be used. (crbug.com/225101)
+ 'action_name': 'build_webrtcdemo_apk',
+ 'variables': {
+ 'android_webrtc_demo_root': '<(webrtc_root)/examples/android/media_demo',
+ },
+ 'inputs' : [
+ '<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
+ '<(PRODUCT_DIR)/lib.java/video_capture_module_java.jar',
+ '<(PRODUCT_DIR)/lib.java/video_render_module_java.jar',
+ '<(PRODUCT_DIR)/libwebrtcdemo-jni.so',
+ '