Complete rewrite of demo application.

BUG=2122
R=andrew@webrtc.org, fischman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/3669004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5273 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
henrike@webrtc.org 2013-12-12 16:55:37 +00:00
parent 88ac63abc6
commit 451745ec05
63 changed files with 4496 additions and 4973 deletions

View File

@ -18,6 +18,13 @@
'talk/libjingle_examples.gyp:*',
'talk/libjingle_tests.gyp:*',
],
'conditions': [
['OS=="android"', {
'dependencies': [
'webrtc/webrtc_examples.gyp:*',
],
}],
],
},
],
}

View File

@ -1,11 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
android:versionCode="1" package="org.webrtc.videoengineapp" android:versionName="1.07">
android:versionCode="1" package="org.webrtc.webrtcdemo" android:versionName="1.07">
<application android:icon="@drawable/logo"
android:label="@string/app_name"
android:label="@string/appName"
android:debuggable="true">
<activity android:name=".WebRTCDemo"
android:label="@string/app_name"
android:theme="@android:style/Theme.Holo"
android:label="@string/appName"
android:screenOrientation="landscape"
>
<intent-filter>

View File

@ -0,0 +1,24 @@
This directory contains a sample app for sending and receiving video and audio
on Android. It further lets you enable and disable some call quality
enhancements such as echo cancellation, noise suppression etc.
Prerequisites:
- Make sure gclient is checking out tools necessary to target Android: your
.gclient file should contain a line like:
target_os = ['android']
Make sure to re-run gclient sync after adding this to download the tools.
- Env vars need to be set up to target Android; easiest way to do this is to run
(from the libjingle trunk directory):
. ./build/android/envsetup.sh
Note that this clobbers any previously-set $GYP_DEFINES so it must be done
before the next item.
- Set up webrtc-related GYP variables:
export GYP_DEFINES="$GYP_DEFINES java_home=</path/to/JDK>"
- Finally, run "gclient runhooks" to generate Android-targeting .ninja files.
Example of building the app:
cd <path/to/repository>/trunk
ninja -C out/Debug WebRTCDemo
It can then be installed and run on the device:
adb install -r out/Debug/WebRTCDemo-debug.apk

View File

@ -0,0 +1,15 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="WebRTCDemo" default="help">
<property file="local.properties" />
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
<isset property="env.ANDROID_SDK_ROOT" />
</condition>
<loadproperties srcFile="project.properties" />
<fail
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
unless="sdk.dir"
/>
<import file="custom_rules.xml" optional="true" />
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

View File

@ -0,0 +1,82 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
#include <limits>
#include "third_party/icu/source/common/unicode/unistr.h"
using icu::UnicodeString;
jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
const char* signature) {
jmethodID m = jni->GetMethodID(c, name.c_str(), signature);
CHECK_EXCEPTION(jni, "error during GetMethodID");
return m;
}
jlong jlongFromPointer(void* ptr) {
CHECK(sizeof(intptr_t) <= sizeof(jlong), "Time to rethink the use of jlongs");
// Going through intptr_t to be obvious about the definedness of the
// conversion from pointer to integral type. intptr_t to jlong is a standard
// widening by the COMPILE_ASSERT above.
jlong ret = reinterpret_cast<intptr_t>(ptr);
CHECK(reinterpret_cast<void*>(ret) == ptr,
"jlong does not convert back to pointer");
return ret;
}
// Given a (UTF-16) jstring return a new UTF-8 native string.
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string) {
const jchar* jchars = jni->GetStringChars(j_string, NULL);
CHECK_EXCEPTION(jni, "Error during GetStringChars");
UnicodeString ustr(jchars, jni->GetStringLength(j_string));
CHECK_EXCEPTION(jni, "Error during GetStringLength");
jni->ReleaseStringChars(j_string, jchars);
CHECK_EXCEPTION(jni, "Error during ReleaseStringChars");
std::string ret;
return ustr.toUTF8String(ret);
}
ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni, const char** classes,
int size) {
for (int i = 0; i < size; ++i) {
LoadClass(jni, classes[i]);
}
}
ClassReferenceHolder::~ClassReferenceHolder() {
CHECK(classes_.empty(), "Must call FreeReferences() before dtor!");
}
void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
it != classes_.end(); ++it) {
jni->DeleteGlobalRef(it->second);
}
classes_.clear();
}
jclass ClassReferenceHolder::GetClass(const std::string& name) {
std::map<std::string, jclass>::iterator it = classes_.find(name);
CHECK(it != classes_.end(), "Could not find class");
return it->second;
}
void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
jclass localRef = jni->FindClass(name.c_str());
CHECK_EXCEPTION(jni, "Could not load class");
CHECK(localRef, name.c_str());
jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
CHECK_EXCEPTION(jni, "error during NewGlobalRef");
CHECK(globalRef, name.c_str());
bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
CHECK(inserted, "Duplicate class name");
}

View File

@ -0,0 +1,81 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_
// TODO(henrike): this file contains duplication with regards to
// talk/app/webrtc/java/jni/peerconnection_jni.cc. When/if code can be shared
// between trunk/talk and trunk/webrtc remove the duplication.
#include <android/log.h>
#include <jni.h>
#include <map>
#include <string>
#define TAG "WEBRTC-NATIVE"
// Abort the process if |x| is false, emitting |msg| to logcat.
#define CHECK(x, msg) \
if (x) { \
} else { \
__android_log_print(ANDROID_LOG_ERROR, TAG, "%s:%d: %s", __FILE__, \
__LINE__, msg); \
abort(); \
}
// Abort the process if |jni| has a Java exception pending, emitting |msg| to
// logcat.
#define CHECK_EXCEPTION(jni, msg) \
if (0) { \
} else { \
if (jni->ExceptionCheck()) { \
jni->ExceptionDescribe(); \
jni->ExceptionClear(); \
CHECK(0, msg); \
} \
}
#define ARRAYSIZE(instance) \
static_cast<int>(sizeof(instance) / sizeof(instance[0]))
// JNIEnv-helper methods that CHECK success: no Java exception thrown and found
// object/class/method/field is non-null.
jmethodID GetMethodID(JNIEnv* jni, jclass c, const std::string& name,
const char* signature);
// Return a |jlong| that will automatically convert back to |ptr| when assigned
// to a |uint64|
jlong jlongFromPointer(void* ptr);
// Given a (UTF-16) jstring return a new UTF-8 native string.
std::string JavaToStdString(JNIEnv* jni, const jstring& j_string);
// Android's FindClass() is trickier than usual because the app-specific
// ClassLoader is not consulted when there is no app-specific frame on the
// stack. Consequently, we only look up classes once in JNI_OnLoad.
// http://developer.android.com/training/articles/perf-jni.html#faq_FindClass
class ClassReferenceHolder {
public:
ClassReferenceHolder(JNIEnv* jni, const char** classes, int size);
~ClassReferenceHolder();
void FreeReferences(JNIEnv* jni);
jclass GetClass(const std::string& name);
private:
void LoadClass(JNIEnv* jni, const std::string& name);
std::map<std::string, jclass> classes_;
};
#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_JNI_HELPERS_H_

View File

@ -0,0 +1,81 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/examples/android/media_demo/jni/media_codec_video_decoder.h"
#include <android/log.h>
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
#include "webrtc/modules/utility/interface/helpers_android.h"
namespace webrtc {
MediaCodecVideoDecoder::MediaCodecVideoDecoder(JavaVM* vm, jobject decoder)
: vm_(vm), decoder_(NULL), j_start_(NULL), j_push_buffer_(NULL) {
AttachThreadScoped ats(vm_);
JNIEnv* jni = ats.env();
// Make sure that the decoder is not recycled.
decoder_ = jni->NewGlobalRef(decoder);
// Get all function IDs.
jclass decoderClass = jni->GetObjectClass(decoder);
j_push_buffer_ =
jni->GetMethodID(decoderClass, "pushBuffer", "(Ljava/nio/ByteBuffer;J)V");
j_start_ = jni->GetMethodID(decoderClass, "start", "(II)Z");
}
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
AttachThreadScoped ats(vm_);
JNIEnv* jni = ats.env();
jni->DeleteGlobalRef(decoder_);
}
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* codecSettings,
int32_t numberOfCores) {
AttachThreadScoped ats(vm_);
JNIEnv* jni = ats.env();
if (!jni->CallBooleanMethod(decoder_, j_start_, codecSettings->width,
codecSettings->height)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoDecoder::Decode(
const EncodedImage& inputImage, bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo, int64_t renderTimeMs) {
AttachThreadScoped ats(vm_);
JNIEnv* jni = ats.env();
jobject byteBuffer =
jni->NewDirectByteBuffer(inputImage._buffer, inputImage._length);
jni->CallVoidMethod(decoder_, j_push_buffer_, byteBuffer, renderTimeMs);
jni->DeleteLocalRef(byteBuffer);
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) {
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoDecoder::Release() {
// TODO(hellner): this maps nicely to MediaCodecVideoDecoder::dispose().
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t MediaCodecVideoDecoder::Reset() {
// TODO(hellner): implement. MediaCodec::stop() followed by
// MediaCodec::start()?
return WEBRTC_VIDEO_CODEC_OK;
}
} // namespace webrtc

View File

@ -0,0 +1,61 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_MEDIA_CODEC_VIDEO_DECODER_H_
#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_MEDIA_CODEC_VIDEO_DECODER_H_
#include <jni.h>
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
namespace webrtc {
class MediaCodecVideoDecoder : public VideoDecoder {
public:
MediaCodecVideoDecoder(JavaVM* vm, jobject decoder);
virtual ~MediaCodecVideoDecoder();
virtual int32_t InitDecode(const VideoCodec* codecSettings,
int32_t numberOfCores);
virtual int32_t Decode(const EncodedImage& inputImage, bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo,
int64_t renderTimeMs);
virtual int32_t RegisterDecodeCompleteCallback(
DecodedImageCallback* callback);
virtual int32_t Release();
virtual int32_t Reset();
virtual int32_t SetCodecConfigParameters(const uint8_t* /*buffer*/,
int32_t /*size*/) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
virtual VideoDecoder* Copy() {
CHECK(0, "Not implemented");
return NULL;
}
private:
JavaVM* vm_;
// Global reference to a (Java) MediaCodecVideoDecoder object.
jobject decoder_;
jmethodID j_start_;
jmethodID j_push_buffer_;
};
} // namespace webrtc
#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_MEDIA_CODEC_VIDEO_DECODER_H_

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <jni.h>
#include <assert.h>
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
#include "webrtc/examples/android/media_demo/jni/video_engine_jni.h"
#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/voice_engine/include/voe_base.h"
// Macro for native functions that can be found by way of jni-auto discovery.
// Note extern "C" is needed for "discovery" of native methods to work.
#define JOWW(rettype, name) \
extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
static JavaVM* g_vm = NULL;
extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
// Only called once.
CHECK(!g_vm, "OnLoad called more than once");
g_vm = vm;
return JNI_VERSION_1_4;
}
JOWW(void, NativeWebRtcContextRegistry_register)(
JNIEnv* jni,
jclass,
jobject context) {
webrtc_examples::SetVoeDeviceObjects(g_vm);
webrtc_examples::SetVieDeviceObjects(g_vm);
CHECK(webrtc::VideoEngine::SetAndroidObjects(g_vm) == 0,
"Failed to register android objects to video engine");
CHECK(webrtc::VoiceEngine::SetAndroidObjects(g_vm, jni, context) == 0,
"Failed to register android objects to voice engine");
}
JOWW(void, NativeWebRtcContextRegistry_unRegister)(
JNIEnv* jni,
jclass) {
CHECK(webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL, NULL) == 0,
"Failed to unregister android objects from voice engine");
webrtc_examples::ClearVieDeviceObjects();
webrtc_examples::ClearVoeDeviceObjects();
}

View File

@ -0,0 +1,711 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This file contains JNI for the video engine interfaces.
// The native functions are found using jni's auto discovery.
#include "webrtc/examples/android/media_demo/jni/video_engine_jni.h"
#include <map>
#include <string>
#include "webrtc/common_types.h"
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
#include "webrtc/examples/android/media_demo/jni/media_codec_video_decoder.h"
#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
#include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/test/channel_transport/include/channel_transport.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
#include "webrtc/video_engine/include/vie_external_codec.h"
#include "webrtc/video_engine/include/vie_network.h"
#include "webrtc/video_engine/include/vie_render.h"
#include "webrtc/video_engine/include/vie_rtp_rtcp.h"
// Macro for native functions that can be found by way of jni-auto discovery.
// Note extern "C" is needed for "discovery" of native methods to work.
#define JOWW(rettype, name) \
extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
namespace {
static JavaVM* g_vm = NULL;
static ClassReferenceHolder* g_class_reference_holder = NULL;
jclass GetClass(const char* name) {
CHECK(g_class_reference_holder, "Class reference holder NULL");
return g_class_reference_holder->GetClass(name);
}
// C(++) description of a camera. This class is created by Java native calls
// and associated with the CameraDesc Java class. The Java class is used in the
// Java code but it is just a thin wrapper of the C(++) class that contain the
// actual information. The information is stored in C(++) as it is used to
// call video engine APIs.
struct CameraDesc {
// The name and id corresponds to ViECapture's |device_nameUTF8| and
// |unique_idUTF8|.
char name[64];
char unique_id[64];
};
// C++ callback class that can be used to register for callbacks from the
// video engine. It further propagates the callbacks to
// VideoDecodeEncodeObserver.java interface. The memory associated with this
// class is managed globally by the VideoEngineData class when registering and
// unregistering VideoDecodeEncodeObserver.java to receive callbacks.
class VideoDecodeEncodeObserver : public webrtc::ViEDecoderObserver,
public webrtc::ViEEncoderObserver {
public:
explicit VideoDecodeEncodeObserver(jobject j_observer)
: j_observer_(j_observer) {
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
jclass j_observer_class = jni->GetObjectClass(j_observer_);
incoming_rate_ =
GetMethodID(jni, j_observer_class, "incomingRate", "(III)V");
incoming_codec_changed_ =
GetMethodID(jni, j_observer_class, "incomingCodecChanged",
"(ILorg/webrtc/webrtcdemo/VideoCodecInst;)V");
request_new_keyframe_ =
GetMethodID(jni, j_observer_class, "requestNewKeyFrame", "(I)V");
outgoing_rate_ =
GetMethodID(jni, j_observer_class, "outgoingRate", "(III)V");
j_observer_ = jni->NewGlobalRef(j_observer_);
}
~VideoDecodeEncodeObserver() {
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
jni->DeleteGlobalRef(j_observer_);
}
virtual void IncomingRate(const int video_channel,
const unsigned int framerate,
const unsigned int bitrate) {
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
jni->CallVoidMethod(j_observer_, incoming_rate_, video_channel,
static_cast<int>(framerate), static_cast<int>(bitrate));
}
virtual void DecoderTiming(int decode_ms, int max_decode_ms,
int current_delay_ms, int target_delay_ms,
int jitter_buffer_ms, int min_playout_delay_ms,
int render_delay_ms) {
// TODO(fischman): consider plumbing this through to Java.
}
virtual void IncomingCodecChanged(const int video_channel,
const webrtc::VideoCodec& video_codec) {
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
webrtc::VideoCodec* codec = new webrtc::VideoCodec(video_codec);
jclass j_codec_class =
GetClass("org/webrtc/webrtcdemo/VideoCodecInst");
jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "<init>", "(J)V");
jobject j_codec =
jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
CHECK_EXCEPTION(jni, "error during NewObject");
jni->CallVoidMethod(j_observer_, incoming_codec_changed_, video_channel,
j_codec);
}
virtual void RequestNewKeyFrame(const int video_channel) {
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
jni->CallVoidMethod(j_observer_, request_new_keyframe_, video_channel);
}
virtual void OutgoingRate(const int video_channel,
const unsigned int framerate,
const unsigned int bitrate) {
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
jni->CallVoidMethod(j_observer_, outgoing_rate_, video_channel,
static_cast<int>(framerate), static_cast<int>(bitrate));
}
virtual void SuspendChange(int video_channel, bool is_suspended) {}
private:
jobject j_observer_;
jmethodID incoming_rate_;
jmethodID incoming_codec_changed_;
jmethodID request_new_keyframe_;
jmethodID outgoing_rate_;
};
template<typename T>
void ReleaseSubApi(T instance) {
CHECK(instance->Release() == 0, "failed to release instance")
}
class VideoEngineData {
public:
VideoEngineData()
: vie(webrtc::VideoEngine::Create()),
base(webrtc::ViEBase::GetInterface(vie)),
codec(webrtc::ViECodec::GetInterface(vie)),
network(webrtc::ViENetwork::GetInterface(vie)),
rtp(webrtc::ViERTP_RTCP::GetInterface(vie)),
render(webrtc::ViERender::GetInterface(vie)),
capture(webrtc::ViECapture::GetInterface(vie)),
externalCodec(webrtc::ViEExternalCodec::GetInterface(vie)) {
CHECK(vie != NULL, "Video engine instance failed to be created");
CHECK(base != NULL, "Failed to acquire base interface");
CHECK(codec != NULL, "Failed to acquire codec interface");
CHECK(network != NULL, "Failed to acquire network interface");
CHECK(rtp != NULL, "Failed to acquire rtp interface");
CHECK(render != NULL, "Failed to acquire render interface");
CHECK(capture != NULL, "Failed to acquire capture interface");
CHECK(externalCodec != NULL, "Failed to acquire externalCodec interface");
}
~VideoEngineData() {
CHECK(channel_transports_.empty(),
"ViE transports must be deleted before terminating");
CHECK(observers_.empty(),
"ViE observers must be deleted before terminating");
CHECK(external_decoders_.empty(),
"ViE external decoders must be deleted before terminating");
ReleaseSubApi(externalCodec);
ReleaseSubApi(capture);
ReleaseSubApi(render);
ReleaseSubApi(rtp);
ReleaseSubApi(network);
ReleaseSubApi(codec);
ReleaseSubApi(base);
webrtc::VideoEngine* vie_pointer = vie;
CHECK(webrtc::VideoEngine::Delete(vie_pointer), "ViE failed to be deleted");
}
int CreateChannel() {
int channel;
CHECK(base->CreateChannel(channel) == 0, "Failed to create channel");
CreateTransport(channel);
return channel;
}
int DeleteChannel(int channel) {
if (base->DeleteChannel(channel) != 0) {
return -1;
}
DeleteTransport(channel);
return 0;
}
webrtc::test::VideoChannelTransport* GetTransport(int channel) {
ChannelTransports::iterator found = channel_transports_.find(channel);
if (found == channel_transports_.end()) {
return NULL;
}
return found->second;
}
int RegisterObserver(int channel, jobject j_observer) {
CHECK(observers_.find(channel) == observers_.end(),
"Observer already created for channel, inconsistent state");
observers_[channel] = new VideoDecodeEncodeObserver(j_observer);
int ret_val = codec->RegisterDecoderObserver(channel, *observers_[channel]);
ret_val |= codec->RegisterEncoderObserver(channel, *observers_[channel]);
return ret_val;
}
int DeregisterObserver(int channel) {
Observers::iterator found = observers_.find(channel);
if (observers_.find(channel) == observers_.end()) {
return -1;
}
int ret_val = codec->DeregisterDecoderObserver(channel);
ret_val |= codec->DeregisterEncoderObserver(channel);
delete found->second;
observers_.erase(found);
return ret_val;
}
int RegisterExternalReceiveCodec(jint channel, jint pl_type, jobject decoder,
bool internal_source) {
CHECK(external_decoders_.find(channel) == external_decoders_.end(),
"External decoder already created for channel, inconsistent state");
external_decoders_[channel] =
new webrtc::MediaCodecVideoDecoder(g_vm, decoder);
return externalCodec->RegisterExternalReceiveCodec(
channel, pl_type, external_decoders_[channel], internal_source);
}
int DeRegisterExternalReceiveCodec(jint channel, jint pl_type) {
ExternalDecoders::iterator found = external_decoders_.find(channel);
CHECK(found != external_decoders_.end(),
"ViE channel missing external decoder, inconsistent state");
CHECK(externalCodec->DeRegisterExternalReceiveCodec(channel, pl_type) == 0,
"Failed to register external receive decoder");
delete found->second;
external_decoders_.erase(found);
return 0;
}
webrtc::VideoEngine* const vie;
webrtc::ViEBase* const base;
webrtc::ViECodec* const codec;
webrtc::ViENetwork* const network;
webrtc::ViERTP_RTCP* const rtp;
webrtc::ViERender* const render;
webrtc::ViECapture* const capture;
webrtc::ViEExternalCodec* const externalCodec;
private:
// Video engine no longer provides a socket implementation. There is,
// however, a socket implementation in webrtc::test.
typedef std::map<int, webrtc::test::VideoChannelTransport*>
ChannelTransports;
typedef std::map<int, VideoDecodeEncodeObserver*> Observers;
typedef std::map<int, webrtc::MediaCodecVideoDecoder*> ExternalDecoders;
void CreateTransport(int channel) {
CHECK(GetTransport(channel) == NULL,
"Transport already created for ViE channel, inconsistent state");
channel_transports_[channel] =
new webrtc::test::VideoChannelTransport(network, channel);
}
void DeleteTransport(int channel) {
CHECK(GetTransport(channel) != NULL,
"ViE channel missing transport, inconsistent state");
delete channel_transports_[channel];
channel_transports_.erase(channel);
}
ChannelTransports channel_transports_;
Observers observers_;
ExternalDecoders external_decoders_;
};
webrtc::VideoCodec* GetCodecInst(JNIEnv* jni, jobject j_codec) {
jclass j_codec_class = jni->GetObjectClass(j_codec);
jfieldID native_codec_id =
jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
jlong j_p = jni->GetLongField(j_codec, native_codec_id);
return reinterpret_cast<webrtc::VideoCodec*>(j_p);
}
CameraDesc* GetCameraDesc(JNIEnv* jni, jobject j_camera) {
jclass j_camera_class = jni->GetObjectClass(j_camera);
jfieldID native_camera_id =
jni->GetFieldID(j_camera_class, "nativeCameraDesc", "J");
jlong j_p = jni->GetLongField(j_camera, native_camera_id);
return reinterpret_cast<CameraDesc*>(j_p);
}
VideoEngineData* GetVideoEngineData(JNIEnv* jni, jobject j_vie) {
jclass j_vie_class = jni->GetObjectClass(j_vie);
jfieldID native_vie_id =
jni->GetFieldID(j_vie_class, "nativeVideoEngine", "J");
jlong j_p = jni->GetLongField(j_vie, native_vie_id);
return reinterpret_cast<VideoEngineData*>(j_p);
}
} // namespace
namespace webrtc_examples {
static const char* g_classes[] = {
"org/webrtc/webrtcdemo/CameraDesc",
"org/webrtc/webrtcdemo/RtcpStatistics",
"org/webrtc/webrtcdemo/VideoCodecInst",
"org/webrtc/webrtcdemo/VideoDecodeEncodeObserver",
"org/webrtc/webrtcdemo/MediaCodecVideoDecoder"};
void SetVieDeviceObjects(JavaVM* vm) {
CHECK(vm, "Trying to register NULL vm");
CHECK(!g_vm, "Trying to re-register vm");
g_vm = vm;
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
g_class_reference_holder = new ClassReferenceHolder(
jni, g_classes, ARRAYSIZE(g_classes));
}
void ClearVieDeviceObjects() {
CHECK(g_vm, "Clearing vm without it being set");
{
webrtc::AttachThreadScoped ats(g_vm);
g_class_reference_holder->FreeReferences(ats.env());
}
g_vm = NULL;
delete g_class_reference_holder;
g_class_reference_holder = NULL;
}
} // namespace webrtc_examples
JOWW(jlong, VideoEngine_create)(JNIEnv* jni, jclass) {
VideoEngineData* vie_data = new VideoEngineData();
return jlongFromPointer(vie_data);
}
JOWW(jint, VideoEngine_init)(JNIEnv* jni, jobject j_vie) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->base->Init();
}
JOWW(jint, VideoEngine_setVoiceEngine)(JNIEnv* jni, jobject j_vie,
jobject j_voe) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
webrtc::VoiceEngine* voe = GetVoiceEngine(jni, j_voe);
return vie_data->base->SetVoiceEngine(voe);
}
JOWW(void, VideoEngine_dispose)(JNIEnv* jni, jobject j_vie) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
delete vie_data;
}
JOWW(jint, VideoEngine_startSend)(JNIEnv* jni, jobject j_vie, jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->base->StartSend(channel);
}
JOWW(jint, VideoEngine_stopRender)(JNIEnv* jni, jobject j_vie, jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->render->StopRender(channel);
}
JOWW(jint, VideoEngine_stopSend)(JNIEnv* jni, jobject j_vie, jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->base->StopSend(channel);
}
JOWW(jint, VideoEngine_startReceive)(JNIEnv* jni, jobject j_vie, jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->base->StartReceive(channel);
}
JOWW(jint, VideoEngine_stopReceive)(JNIEnv* jni, jobject j_vie, jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->base->StopReceive(channel);
}
JOWW(jint, VideoEngine_createChannel)(JNIEnv* jni, jobject j_vie) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->CreateChannel();
}
JOWW(jint, VideoEngine_deleteChannel)(JNIEnv* jni, jobject j_vie,
jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->DeleteChannel(channel);
}
JOWW(jint,
VideoEngine_connectAudioChannel(JNIEnv* jni, jobject j_vie,
jint video_channel, jint audio_channel)) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->base->ConnectAudioChannel(video_channel, audio_channel);
}
JOWW(jint, VideoEngine_setLocalReceiver)(JNIEnv* jni, jobject j_vie,
jint channel, jint port) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->GetTransport(channel)->SetLocalReceiver(port);
}
JOWW(jint, VideoEngine_setSendDestination)(JNIEnv* jni, jobject j_vie,
jint channel, jint port,
jstring j_addr) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
std::string addr = JavaToStdString(jni, j_addr);
webrtc::test::VideoChannelTransport* transport =
vie_data->GetTransport(channel);
return transport->SetSendDestination(addr.c_str(), port);
}
JOWW(jint, VideoEngine_setReceiveCodec)(JNIEnv* jni, jobject j_vie,
jint channel, jobject j_codec) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
webrtc::VideoCodec* codec = GetCodecInst(jni, j_codec);
return vie_data->codec->SetReceiveCodec(channel, *codec);
}
JOWW(jint, VideoEngine_setSendCodec)(JNIEnv* jni, jobject j_vie, jint channel,
jobject j_codec) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
webrtc::VideoCodec* codec = GetCodecInst(jni, j_codec);
return vie_data->codec->SetSendCodec(channel, *codec);
}
JOWW(jint, VideoEngine_numberOfCodecs)(JNIEnv* jni, jobject j_vie) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->codec->NumberOfCodecs();
}
JOWW(jobject, VideoEngine_getCodec)(JNIEnv* jni, jobject j_vie, jint index) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
webrtc::VideoCodec* codec = new webrtc::VideoCodec();
CHECK(vie_data->codec->GetCodec(index, *codec) == 0,
"getCodec must be called with valid index");
jclass j_codec_class = GetClass("org/webrtc/webrtcdemo/VideoCodecInst");
jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "<init>", "(J)V");
jobject j_codec =
jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
CHECK_EXCEPTION(jni, "error during NewObject");
return j_codec;
}
JOWW(jint, VideoEngine_addRenderer)(JNIEnv* jni, jobject j_vie, jint channel,
jobject gl_surface, jint z_order,
jfloat left, jfloat top, jfloat right,
jfloat bottom) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->render->AddRenderer(channel, gl_surface, z_order, left, top,
right, bottom);
}
JOWW(jint, VideoEngine_removeRenderer)(JNIEnv* jni, jobject j_vie,
jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->render->RemoveRenderer(channel);
}
JOWW(jint, VideoEngine_registerExternalReceiveCodec)(JNIEnv* jni, jobject j_vie,
jint channel, jint pl_type,
jobject decoder,
bool internal_source) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->RegisterExternalReceiveCodec(channel, pl_type, decoder,
true);
}
JOWW(jint,
VideoEngine_deRegisterExternalReceiveCodec)(JNIEnv* jni, jobject j_vie,
jint channel, jint pl_type) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->DeRegisterExternalReceiveCodec(channel, pl_type);
}
JOWW(jint, VideoEngine_startRender)(JNIEnv* jni, jobject j_vie, jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->render->StartRender(channel);
}
JOWW(jint, VideoEngine_numberOfCaptureDevices)(JNIEnv* jni, jobject j_vie) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->capture->NumberOfCaptureDevices();
}
JOWW(jobject,
VideoEngine_getCaptureDevice(JNIEnv* jni, jobject j_vie, jint index)) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
CameraDesc* camera_info = new CameraDesc();
if (vie_data->capture->GetCaptureDevice(
index, camera_info->name, sizeof(camera_info->name),
camera_info->unique_id, sizeof(camera_info->unique_id)) != 0) {
delete camera_info;
return NULL;
}
jclass j_camera_class = GetClass("org/webrtc/webrtcdemo/CameraDesc");
jmethodID j_camera_ctor = GetMethodID(jni, j_camera_class, "<init>", "(J)V");
jobject j_camera = jni->NewObject(j_camera_class, j_camera_ctor,
jlongFromPointer(camera_info));
CHECK_EXCEPTION(jni, "error during NewObject");
return j_camera;
}
JOWW(jint, VideoEngine_allocateCaptureDevice)(JNIEnv* jni, jobject j_vie,
jobject j_camera) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
CameraDesc* camera_info = GetCameraDesc(jni, j_camera);
jint capture_id;
if (vie_data->capture->AllocateCaptureDevice(camera_info->unique_id,
sizeof(camera_info->unique_id),
capture_id) != 0) {
return -1;
}
return capture_id;
}
JOWW(jint, VideoEngine_connectCaptureDevice)(JNIEnv* jni, jobject j_vie,
jint camera_num, jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->capture->ConnectCaptureDevice(camera_num, channel);
}
JOWW(jint, VideoEngine_startCapture)(JNIEnv* jni, jobject j_vie,
jint camera_num) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->capture->StartCapture(camera_num);
}
JOWW(jint, VideoEngine_stopCapture)(JNIEnv* jni, jobject j_vie,
jint camera_id) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->capture->StopCapture(camera_id);
}
JOWW(jint, VideoEngine_releaseCaptureDevice)(JNIEnv* jni, jobject j_vie,
jint camera_id) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->capture->ReleaseCaptureDevice(camera_id);
}
JOWW(jint, VideoEngine_getOrientation)(JNIEnv* jni, jobject j_vie,
jobject j_camera) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
CameraDesc* camera_info = GetCameraDesc(jni, j_camera);
webrtc::RotateCapturedFrame orientation;
if (vie_data->capture->GetOrientation(camera_info->unique_id, orientation) !=
0) {
return -1;
}
return static_cast<jint>(orientation);
}
JOWW(jint, VideoEngine_setRotateCapturedFrames)(JNIEnv* jni, jobject j_vie,
jint capture_id, jint degrees) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->capture->SetRotateCapturedFrames(
capture_id, static_cast<webrtc::RotateCapturedFrame>(degrees));
}
JOWW(jint, VideoEngine_setNackStatus)(JNIEnv* jni, jobject j_vie, jint channel,
jboolean enable) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->rtp->SetNACKStatus(channel, enable);
}
JOWW(jint, VideoEngine_setKeyFrameRequestMethod)(JNIEnv* jni, jobject j_vie,
jint channel,
jint request_method) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->rtp->SetKeyFrameRequestMethod(
channel, static_cast<webrtc::ViEKeyFrameRequestMethod>(request_method));
}
JOWW(jobject, VideoEngine_getReceivedRtcpStatistics)(JNIEnv* jni, jobject j_vie,
jint channel) {
unsigned short fraction_lost; // NOLINT
unsigned int cumulative_lost; // NOLINT
unsigned int extended_max; // NOLINT
unsigned int jitter; // NOLINT
int rtt_ms;
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
if (vie_data->rtp->GetReceivedRTCPStatistics(channel, fraction_lost,
cumulative_lost, extended_max,
jitter, rtt_ms) != 0) {
return NULL;
}
jclass j_rtcp_statistics_class =
GetClass("org/webrtc/webrtcdemo/RtcpStatistics");
jmethodID j_rtcp_statistics_ctor =
GetMethodID(jni, j_rtcp_statistics_class, "<init>", "(IIIII)V");
jobject j_rtcp_statistics =
jni->NewObject(j_rtcp_statistics_class, j_rtcp_statistics_ctor,
fraction_lost, cumulative_lost, extended_max, jitter,
rtt_ms);
CHECK_EXCEPTION(jni, "error during NewObject");
return j_rtcp_statistics;
}
JOWW(jint, VideoEngine_registerObserver)(JNIEnv* jni, jobject j_vie,
jint channel, jobject callback) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->RegisterObserver(channel, callback);
}
JOWW(jint, VideoEngine_deregisterObserver)(JNIEnv* jni, jobject j_vie,
jint channel) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->DeregisterObserver(channel);
}
JOWW(jint, VideoEngine_setTraceFile)(JNIEnv* jni, jobject, jstring j_filename,
jboolean file_counter) {
std::string filename = JavaToStdString(jni, j_filename);
return webrtc::VideoEngine::SetTraceFile(filename.c_str(), file_counter);
}
JOWW(jint, VideoEngine_setTraceFilter)(JNIEnv* jni, jobject, jint filter) {
return webrtc::VideoEngine::SetTraceFilter(filter);
}
JOWW(jint, VideoEngine_startRtpDump)(JNIEnv* jni, jobject j_vie, jint channel,
jstring j_filename, jint direction) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
std::string filename = JavaToStdString(jni, j_filename);
return vie_data->rtp->StartRTPDump(
channel, filename.c_str(), static_cast<webrtc::RTPDirections>(direction));
}
JOWW(jint, VideoEngine_stopRtpDump)(JNIEnv* jni, jobject j_vie, jint channel,
jint direction) {
VideoEngineData* vie_data = GetVideoEngineData(jni, j_vie);
return vie_data->rtp->StopRTPDump(
channel, static_cast<webrtc::RTPDirections>(direction));
}
JOWW(void, VideoCodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
delete GetCodecInst(jni, j_codec);
}
JOWW(jint, VideoCodecInst_plType)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->plType;
}
JOWW(jstring, VideoCodecInst_name)(JNIEnv* jni, jobject j_codec) {
return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plName);
}
JOWW(jint, VideoCodecInst_width)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->width;
}
JOWW(void, VideoCodecInst_setWidth)(JNIEnv* jni, jobject j_codec, jint width) {
GetCodecInst(jni, j_codec)->width = width;
}
JOWW(jint, VideoCodecInst_height)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->height;
}
JOWW(void, VideoCodecInst_setHeight)(JNIEnv* jni, jobject j_codec,
jint height) {
GetCodecInst(jni, j_codec)->height = height;
}
JOWW(jint, VideoCodecInst_startBitRate)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->startBitrate;
}
JOWW(void, VideoCodecInst_setStartBitRate)(JNIEnv* jni, jobject j_codec,
jint bitrate) {
GetCodecInst(jni, j_codec)->startBitrate = bitrate;
}
JOWW(jint, VideoCodecInst_maxBitRate)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->maxBitrate;
}
JOWW(void, VideoCodecInst_setMaxBitRate)(JNIEnv* jni, jobject j_codec,
jint bitrate) {
GetCodecInst(jni, j_codec)->maxBitrate = bitrate;
}
JOWW(jint, VideoCodecInst_maxFrameRate)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->maxFramerate;
}
JOWW(void, VideoCodecInst_setMaxFrameRate)(JNIEnv* jni, jobject j_codec,
jint framerate) {
GetCodecInst(jni, j_codec)->maxFramerate = framerate;
}
JOWW(void, CameraDesc_dispose)(JNIEnv* jni, jobject j_camera) {
delete GetCameraDesc(jni, j_camera);
}

View File

@ -0,0 +1,23 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VIDEO_ENGINE_H_
#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VIDEO_ENGINE_H_
#include <jni.h>
namespace webrtc_examples {
void SetVieDeviceObjects(JavaVM* vm);
void ClearVieDeviceObjects();
} // namespace webrtc_examples
#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VIDEO_ENGINE_H_

View File

@ -0,0 +1,444 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This file contains JNI for the voice engine interfaces.
// The native functions are found using jni's auto discovery.
#include "webrtc/examples/android/media_demo/jni/voice_engine_jni.h"
#include <map>
#include <string>
#include "webrtc/examples/android/media_demo/jni/jni_helpers.h"
#include "webrtc/modules/utility/interface/helpers_android.h"
#include "webrtc/test/channel_transport/include/channel_transport.h"
#include "webrtc/voice_engine/include/voe_audio_processing.h"
#include "webrtc/voice_engine/include/voe_base.h"
#include "webrtc/voice_engine/include/voe_codec.h"
#include "webrtc/voice_engine/include/voe_file.h"
#include "webrtc/voice_engine/include/voe_hardware.h"
#include "webrtc/voice_engine/include/voe_network.h"
#include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
#include "webrtc/voice_engine/include/voe_volume_control.h"
// Macro for native functions that can be found by way of jni-auto discovery.
// Note extern "C" is needed for "discovery" of native methods to work.
#define JOWW(rettype, name) \
extern "C" rettype JNIEXPORT JNICALL Java_org_webrtc_webrtcdemo_##name
namespace {
static JavaVM* g_vm = NULL;
static ClassReferenceHolder* g_class_reference_holder = NULL;
jclass GetClass(JNIEnv* jni, const char* name) {
CHECK(g_class_reference_holder, "Class reference holder NULL");
return g_class_reference_holder->GetClass(name);
}
static const char* g_classes[] = {"org/webrtc/webrtcdemo/CodecInst"};
template<typename T>
void ReleaseSubApi(T instance) {
CHECK(instance->Release() >= 0, "failed to release instance")
}
class VoiceEngineData {
public:
VoiceEngineData()
: ve(webrtc::VoiceEngine::Create()),
base(webrtc::VoEBase::GetInterface(ve)),
codec(webrtc::VoECodec::GetInterface(ve)),
file(webrtc::VoEFile::GetInterface(ve)),
netw(webrtc::VoENetwork::GetInterface(ve)),
apm(webrtc::VoEAudioProcessing::GetInterface(ve)),
volume(webrtc::VoEVolumeControl::GetInterface(ve)),
hardware(webrtc::VoEHardware::GetInterface(ve)),
rtp(webrtc::VoERTP_RTCP::GetInterface(ve)) {
CHECK(ve != NULL, "Voice engine instance failed to be created");
CHECK(base != NULL, "Failed to acquire base interface");
CHECK(codec != NULL, "Failed to acquire codec interface");
CHECK(file != NULL, "Failed to acquire file interface");
CHECK(netw != NULL, "Failed to acquire netw interface");
CHECK(apm != NULL, "Failed to acquire apm interface");
CHECK(volume != NULL, "Failed to acquire volume interface");
CHECK(hardware != NULL, "Failed to acquire hardware interface");
CHECK(rtp != NULL, "Failed to acquire rtp interface");
}
~VoiceEngineData() {
CHECK(channel_transports_.empty(),
"VoE transports must be deleted before terminating");
CHECK(base->Terminate() == 0, "VoE failed to terminate");
ReleaseSubApi(base);
ReleaseSubApi(codec);
ReleaseSubApi(file);
ReleaseSubApi(netw);
ReleaseSubApi(apm);
ReleaseSubApi(volume);
ReleaseSubApi(hardware);
ReleaseSubApi(rtp);
webrtc::VoiceEngine* ve_instance = ve;
CHECK(webrtc::VoiceEngine::Delete(ve_instance), "VoE failed to be deleted");
}
int CreateChannel() {
int channel = base->CreateChannel();
if (channel == -1) {
return -1;
}
CreateTransport(channel);
return channel;
}
int DeleteChannel(int channel) {
if (base->DeleteChannel(channel) != 0) {
return -1;
}
DeleteTransport(channel);
return 0;
}
webrtc::test::VoiceChannelTransport* GetTransport(int channel) {
ChannelTransports::iterator found = channel_transports_.find(channel);
if (found == channel_transports_.end()) {
return NULL;
}
return found->second;
}
webrtc::VoiceEngine* const ve;
webrtc::VoEBase* const base;
webrtc::VoECodec* const codec;
webrtc::VoEFile* const file;
webrtc::VoENetwork* const netw;
webrtc::VoEAudioProcessing* const apm;
webrtc::VoEVolumeControl* const volume;
webrtc::VoEHardware* const hardware;
webrtc::VoERTP_RTCP* const rtp;
private:
// Voice engine no longer provides a socket implementation. There is,
// however, a socket implementation in webrtc::test.
typedef std::map<int, webrtc::test::VoiceChannelTransport*>
ChannelTransports;
void CreateTransport(int channel) {
CHECK(GetTransport(channel) == NULL,
"Transport already created for VoE channel, inconsistent state");
channel_transports_[channel] =
new webrtc::test::VoiceChannelTransport(netw, channel);
}
void DeleteTransport(int channel) {
CHECK(GetTransport(channel) != NULL,
"VoE channel missing transport, inconsistent state");
delete channel_transports_[channel];
channel_transports_.erase(channel);
}
ChannelTransports channel_transports_;
};
webrtc::CodecInst* GetCodecInst(JNIEnv* jni, jobject j_codec) {
jclass j_codec_class = jni->GetObjectClass(j_codec);
jfieldID native_codec_id =
jni->GetFieldID(j_codec_class, "nativeCodecInst", "J");
jlong j_p = jni->GetLongField(j_codec, native_codec_id);
return reinterpret_cast<webrtc::CodecInst*>(j_p);
}
} // namespace
namespace webrtc_examples {
void SetVoeDeviceObjects(JavaVM* vm) {
CHECK(vm, "Trying to register NULL vm");
g_vm = vm;
webrtc::AttachThreadScoped ats(g_vm);
JNIEnv* jni = ats.env();
g_class_reference_holder = new ClassReferenceHolder(
jni, g_classes, ARRAYSIZE(g_classes));
}
void ClearVoeDeviceObjects() {
CHECK(g_vm, "Clearing vm without it being set");
{
webrtc::AttachThreadScoped ats(g_vm);
g_class_reference_holder->FreeReferences(ats.env());
}
g_vm = NULL;
delete g_class_reference_holder;
g_class_reference_holder = NULL;
}
} // namespace webrtc_examples
VoiceEngineData* GetVoiceEngineData(JNIEnv* jni, jobject j_voe) {
jclass j_voe_class = jni->GetObjectClass(j_voe);
jfieldID native_voe_id =
jni->GetFieldID(j_voe_class, "nativeVoiceEngine", "J");
jlong j_p = jni->GetLongField(j_voe, native_voe_id);
return reinterpret_cast<VoiceEngineData*>(j_p);
}
webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe) {
return GetVoiceEngineData(jni, j_voe)->ve;
}
JOWW(jlong, VoiceEngine_create)(JNIEnv* jni, jclass) {
VoiceEngineData* voe_data = new VoiceEngineData();
return jlongFromPointer(voe_data);
}
JOWW(void, VoiceEngine_dispose)(JNIEnv* jni, jobject j_voe) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
delete voe_data;
}
JOWW(jint, VoiceEngine_init)(JNIEnv* jni, jobject j_voe) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->base->Init();
}
JOWW(jint, VoiceEngine_createChannel)(JNIEnv* jni, jobject j_voe) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->CreateChannel();
}
JOWW(jint, VoiceEngine_deleteChannel)(JNIEnv* jni, jobject j_voe,
jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->DeleteChannel(channel);
}
JOWW(jint, VoiceEngine_setLocalReceiver)(JNIEnv* jni, jobject j_voe,
jint channel, jint port) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
webrtc::test::VoiceChannelTransport* transport =
voe_data->GetTransport(channel);
return transport->SetLocalReceiver(port);
}
JOWW(jint, VoiceEngine_setSendDestination)(JNIEnv* jni, jobject j_voe,
jint channel, jint port,
jstring j_addr) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
std::string addr = JavaToStdString(jni, j_addr);
webrtc::test::VoiceChannelTransport* transport =
voe_data->GetTransport(channel);
return transport->SetSendDestination(addr.c_str(), port);
}
JOWW(jint, VoiceEngine_startListen)(JNIEnv* jni, jobject j_voe, jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->base->StartReceive(channel);
}
JOWW(jint, VoiceEngine_startPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->base->StartPlayout(channel);
}
JOWW(jint, VoiceEngine_startSend)(JNIEnv* jni, jobject j_voe, jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->base->StartSend(channel);
}
JOWW(jint, VoiceEngine_stopListen)(JNIEnv* jni, jobject j_voe, jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->base->StartReceive(channel);
}
JOWW(jint, VoiceEngine_stopPlayout)(JNIEnv* jni, jobject j_voe, jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->base->StopPlayout(channel);
}
JOWW(jint, VoiceEngine_stopSend)(JNIEnv* jni, jobject j_voe, jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->base->StopSend(channel);
}
JOWW(jint, VoiceEngine_setSpeakerVolume)(JNIEnv* jni, jobject j_voe,
jint level) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->volume->SetSpeakerVolume(level);
}
JOWW(jint, VoiceEngine_setLoudspeakerStatus)(JNIEnv* jni, jobject j_voe,
jboolean enable) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->hardware->SetLoudspeakerStatus(enable);
}
JOWW(jint, VoiceEngine_startPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
jint channel,
jstring j_filename,
jboolean loop) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
std::string filename = JavaToStdString(jni, j_filename);
return voe_data->file->StartPlayingFileLocally(channel,
filename.c_str(),
loop);
}
JOWW(jint, VoiceEngine_stopPlayingFileLocally)(JNIEnv* jni, jobject j_voe,
jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->file->StopPlayingFileLocally(channel);
}
JOWW(jint, VoiceEngine_startPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
jint channel,
jstring j_filename,
jboolean loop) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
std::string filename = JavaToStdString(jni, j_filename);
return voe_data->file->StartPlayingFileAsMicrophone(channel,
filename.c_str(),
loop);
}
JOWW(jint, VoiceEngine_stopPlayingFileAsMicrophone)(JNIEnv* jni, jobject j_voe,
jint channel) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->file->StopPlayingFileAsMicrophone(channel);
}
JOWW(jint, VoiceEngine_numOfCodecs)(JNIEnv* jni, jobject j_voe) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->codec->NumOfCodecs();
}
JOWW(jobject, VoiceEngine_getCodec)(JNIEnv* jni, jobject j_voe, jint index) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
webrtc::CodecInst* codec = new webrtc::CodecInst();
CHECK(voe_data->codec->GetCodec(index, *codec) == 0,
"getCodec must be called with valid index");
jclass j_codec_class = GetClass(jni, "org/webrtc/webrtcdemo/CodecInst");
jmethodID j_codec_ctor = GetMethodID(jni, j_codec_class, "<init>", "(J)V");
jobject j_codec =
jni->NewObject(j_codec_class, j_codec_ctor, jlongFromPointer(codec));
CHECK_EXCEPTION(jni, "error during NewObject");
return j_codec;
}
JOWW(jint, VoiceEngine_setSendCodec)(JNIEnv* jni, jobject j_voe, jint channel,
jobject j_codec) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
webrtc::CodecInst* inst = GetCodecInst(jni, j_codec);
return voe_data->codec->SetSendCodec(channel, *inst);
}
JOWW(jint, VoiceEngine_setEcStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
jint ec_mode) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->apm->SetEcStatus(enable,
static_cast<webrtc::EcModes>(ec_mode));
}
JOWW(jint, VoiceEngine_setAecmMode)(JNIEnv* jni, jobject j_voe, jint aecm_mode,
jboolean cng) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->apm->SetAecmMode(static_cast<webrtc::AecmModes>(aecm_mode),
cng);
}
JOWW(jint, VoiceEngine_setAgcStatus)(JNIEnv* jni, jobject j_voe,
jboolean enable, jint agc_mode) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->apm->SetAgcStatus(enable,
static_cast<webrtc::AgcModes>(agc_mode));
}
// Returns the native AgcConfig object associated with the Java object
// |j_codec|.
void GetNativeAgcConfig(JNIEnv* jni, jobject j_codec,
webrtc::AgcConfig* agc_config) {
jclass j_codec_class = jni->GetObjectClass(j_codec);
jfieldID dBOv_id = jni->GetFieldID(j_codec_class, "targetLevelDbOv", "I");
agc_config->targetLeveldBOv = jni->GetIntField(j_codec, dBOv_id);
jfieldID gain_id =
jni->GetFieldID(j_codec_class, "digitalCompressionGaindB", "I");
agc_config->digitalCompressionGaindB = jni->GetIntField(j_codec, gain_id);
jfieldID limiter_id = jni->GetFieldID(j_codec_class, "limiterEnable", "Z");
agc_config->limiterEnable = jni->GetBooleanField(j_codec, limiter_id);
}
JOWW(jint, VoiceEngine_setAgcConfig)(JNIEnv* jni, jobject j_voe,
jobject j_config) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
webrtc::AgcConfig config;
GetNativeAgcConfig(jni, j_config, &config);
return voe_data->apm->SetAgcConfig(config);
}
JOWW(jint, VoiceEngine_setNsStatus)(JNIEnv* jni, jobject j_voe, jboolean enable,
jint ns_mode) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->apm->SetNsStatus(enable,
static_cast<webrtc::NsModes>(ns_mode));
}
JOWW(jint, VoiceEngine_startDebugRecording)(JNIEnv* jni, jobject j_voe,
jstring j_filename) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
std::string filename = JavaToStdString(jni, j_filename);
return voe_data->apm->StartDebugRecording(filename.c_str());
}
JOWW(jint, VoiceEngine_stopDebugRecording)(JNIEnv* jni, jobject j_voe) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->apm->StopDebugRecording();
}
JOWW(jint, VoiceEngine_startRtpDump)(JNIEnv* jni, jobject j_voe, jint channel,
jstring j_filename, jint direction) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
std::string filename = JavaToStdString(jni, j_filename);
return voe_data->rtp->StartRTPDump(
channel, filename.c_str(),
static_cast<webrtc::RTPDirections>(direction));
}
JOWW(jint, VoiceEngine_stopRtpDump)(JNIEnv* jni, jobject j_voe, jint channel,
jint direction) {
VoiceEngineData* voe_data = GetVoiceEngineData(jni, j_voe);
return voe_data->rtp->StopRTPDump(
channel, static_cast<webrtc::RTPDirections>(direction));
}
JOWW(void, CodecInst_dispose)(JNIEnv* jni, jobject j_codec) {
delete GetCodecInst(jni, j_codec);
}
JOWW(jint, CodecInst_plType)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->pltype;
}
JOWW(jstring, CodecInst_name)(JNIEnv* jni, jobject j_codec) {
return jni->NewStringUTF(GetCodecInst(jni, j_codec)->plname);
}
JOWW(jint, CodecInst_plFrequency)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->plfreq;
}
JOWW(jint, CodecInst_pacSize)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->pacsize;
}
JOWW(jint, CodecInst_channels)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->channels;
}
JOWW(jint, CodecInst_rate)(JNIEnv* jni, jobject j_codec) {
return GetCodecInst(jni, j_codec)->rate;
}

View File

@ -0,0 +1,31 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
#define WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_
#include <jni.h>
namespace webrtc {
class VoiceEngine;
} // namespace webrtc
namespace webrtc_examples {
void SetVoeDeviceObjects(JavaVM* vm);
void ClearVoeDeviceObjects();
} // namespace webrtc_examples
webrtc::VoiceEngine* GetVoiceEngine(JNIEnv* jni, jobject j_voe);
#endif // WEBRTC_EXAMPLES_ANDROID_MEDIA_DEMO_JNI_VOICE_ENGINE_H_

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@ -0,0 +1,80 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textStyle="bold"
android:textSize="24dip"
android:text="Audio Settings">
</TextView>
<TextView android:layout_height="wrap_content"
android:layout_gravity="bottom"
android:layout_width="wrap_content"
android:text="@string/codecType">
</TextView>
<Spinner android:id="@+id/spAudioCodecType"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
</Spinner>
<LinearLayout android:layout_height="wrap_content"
android:layout_width="fill_parent">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/aTxPort">
</TextView>
<EditText android:id="@+id/etATxPort"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:imeOptions="actionDone"
android:inputType="number">
</EditText>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/aRxPort">
</TextView>
<EditText android:id="@+id/etARxPort"
android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:imeOptions="actionDone"
android:inputType="number">
</EditText>
</LinearLayout>
<LinearLayout android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:id="@+id/cbAecm"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/aecm">
</CheckBox>
<CheckBox android:id="@+id/cbNoiseSuppression"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/noiseSuppression">
</CheckBox>
<CheckBox android:id="@+id/cbAutoGainControl"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/autoGainControl">
</CheckBox>
</LinearLayout>
<LinearLayout android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:id="@+id/cbSpeaker"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/speaker">
</CheckBox>
<CheckBox android:id="@+id/cbDebugRecording"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/debugRecording">
</CheckBox>
<CheckBox android:id="@+id/cbAudioRTPDump"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/rtpDump">
</CheckBox>
</LinearLayout>
</LinearLayout>

View File

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:padding="3dip">
<TextView android:id="@+id/spinner_row"
android:layout_toRightOf="@+id/image"
android:padding="3dip"
android:layout_marginTop="2dip"
android:textColor="#FFF"
android:textStyle="bold"
android:text="description"
android:layout_marginLeft="5dip"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
</RelativeLayout>

View File

@ -0,0 +1,39 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<LinearLayout
android:orientation="vertical"
android:layout_width="120dip"
android:layout_height="fill_parent">
<LinearLayout android:id="@+id/llLocalView"
android:layout_width="fill_parent"
android:layout_height="80dip">
</LinearLayout>
<TextView android:id="@+id/tvStats"
android:layout_width="fill_parent"
android:layout_height="60dip"
android:textSize="6sp"
android:text=""/>
<Button android:id="@+id/btStats"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_gravity="bottom"
android:text="@string/stats"/>
<Button android:id="@+id/btSwitchCamera"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_gravity="bottom"/>
<Button android:id="@+id/btStartStopCall"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_gravity="bottom"/>
</LinearLayout>
<LinearLayout android:id="@+id/llRemoteView"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1">
</LinearLayout>
</LinearLayout>

View File

@ -0,0 +1,71 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_gravity="right"
android:orientation="vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textStyle="bold"
android:textSize="24dip"
android:text="@string/gSettings">
</TextView>
<LinearLayout android:orientation="horizontal"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:id="@+id/cbVideoReceive"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/enableVideoReceive">
</CheckBox>
<CheckBox android:id="@+id/cbVideoSend"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/enableVideoSend">
</CheckBox>
<CheckBox android:id="@+id/cbAudio"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/enableAudio">
</CheckBox>
<CheckBox android:id="@+id/cbLoopback"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/loopback">
</CheckBox>
</LinearLayout>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/remoteIp">
</TextView>
<EditText android:id="@+id/etRemoteIp"
android:layout_height="wrap_content"
android:layout_width="fill_parent"
android:imeOptions="actionDone">
</EditText>
<LinearLayout android:orientation="horizontal"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<RadioGroup android:id="@+id/rRenderMechanism"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:orientation="vertical">
<RadioButton android:id="@+id/rOpenGl"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="OpenGL"
android:checked="true"
android:textColor="#fff"/>
<RadioButton android:id="@+id/rSurfaceView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="SurfaceView"
android:textColor="#fff" />
<RadioButton android:id="@+id/rMediaCodec"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Android:MediaCodec"
android:textColor="#fff" />
</RadioGroup>
</LinearLayout>
</LinearLayout>

View File

@ -0,0 +1,64 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textStyle="bold"
android:textSize="24dip"
android:text="@string/vSettings">
</TextView>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/codecType">
</TextView>
<Spinner android:id="@+id/spCodecType"
android:layout_width="fill_parent"
android:layout_height="wrap_content">
</Spinner>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/codecSize">
</TextView>
<Spinner android:id="@+id/spCodecSize"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
</Spinner>
<LinearLayout android:layout_height="wrap_content"
android:layout_width="fill_parent">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/vTxPort">
</TextView>
<EditText android:id="@+id/etVTxPort"
android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:imeOptions="actionDone"
android:inputType="number">
</EditText>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/vRxPort">
</TextView>
<EditText android:id="@+id/etVRxPort"
android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:imeOptions="actionDone"
android:inputType="number">
</EditText>
</LinearLayout>
<LinearLayout android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:id="@+id/cbNack"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/nack">
</CheckBox>
<CheckBox android:id="@+id/cbVideoRTPDump"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/rtpDump">
</CheckBox>
</LinearLayout>
</LinearLayout>

View File

@ -0,0 +1,5 @@
<menu xmlns:android="http://schemas.android.com/apk/res/android" >
<item android:id="@+id/action_exit"
android:icon="@drawable/logo"
android:title="Exit"/>
</menu>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<bool name="apm_debug_enabled_default">false</bool>
<bool name="audio_enabled_default">true</bool>
<bool name="loopback_enabled_default">true</bool>
<bool name="nack_enabled_default">true</bool>
<bool name="opengl_enabled_default">true</bool>
<bool name="speaker_enabled_default">false</bool>
<bool name="stats_enabled_default">true</bool>
<bool name="trace_enabled_default">true</bool>
<bool name="video_receive_enabled_default">true</bool>
<bool name="video_send_enabled_default">true</bool>
</resources>

View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<integer name="aRxPortDefault">11113</integer>
<integer name="aTxPortDefault">11113</integer>
<integer name="openGl">0</integer>
<integer name="surfaceView">1</integer>
<integer name="mediaCodec">2</integer>
<integer name="defaultView">0</integer>
<integer name="call_restart_periodicity_ms">0</integer>
<integer name="video_codec_default">0</integer>
<integer name="vRxPortDefault">11111</integer>
<integer name="vTxPortDefault">11111</integer>
</resources>

View File

@ -0,0 +1,41 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="aecm">AECM</string>
<string name="appName">WebRTC Engine Demo</string>
<string name="aRxPort">Audio Rx Port</string>
<string name="aTxPort">Audio Tx Port</string>
<string name="autoGainControl">AGC</string>
<string name="backCamera">SwitchToBack</string>
<string name="codecSize">Codec Size</string>
<string name="codecType">Codec Type</string>
<string name="debugRecording">APMRecord</string>
<string name="demoTitle">Video Engine Android Demo</string>
<string name="enableVideoReceive">Video Receive</string>
<string name="enableVideoSend">Video Send</string>
<string name="enableAudio">Audio</string>
<string name="error">Error</string>
<string name="errorCamera">Camera Error</string>
<string name="exit">Exit</string>
<string name="frontCamera">SwitchToFront</string>
<string name="gSettings">Global Settings</string>
<string name="loopback">Loopback</string>
<string name="loopbackIp">127.0.0.1</string>
<string name="nack">NACK</string>
<string name="noiseSuppression">NS</string>
<string name="remoteIp">Remote IP address</string>
<string name="rtpDump">rtpdump</string>
<string name="speaker">Speaker</string>
<string name="startBoth">Start Both</string>
<string name="startCall">StartCall</string>
<string name="startListen">Start Listen</string>
<string name="startSend">Start Send</string>
<string name="stats">Stats</string>
<string name="statsOn">Stats on</string>
<string name="statsOff">Stats off</string>
<string name="stopCall">StopCall</string>
<string name="surfaceView">SurfaceView</string>
<string name="tag">WEBRTC</string>
<string name="vRxPort">Video Rx Port</string>
<string name="vSettings">Video Settings</string>
<string name="vTxPort">Video Tx Port</string>
</resources>

View File

@ -0,0 +1,156 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import android.app.Activity;
import android.app.Fragment;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.TextView;
import java.lang.Integer;
public class AudioMenuFragment extends Fragment {
private String TAG;
private MenuStateProvider stateProvider;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.audiomenu, container, false);
TAG = getResources().getString(R.string.tag);
String[] audioCodecsStrings = getEngine().audioCodecsAsString();
Spinner spAudioCodecType = (Spinner) v.findViewById(R.id.spAudioCodecType);
spAudioCodecType.setAdapter(new SpinnerAdapter(getActivity(),
R.layout.dropdownitems,
audioCodecsStrings,
inflater));
spAudioCodecType.setSelection(getEngine().audioCodecIndex());
spAudioCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
public void onItemSelected(AdapterView<?> adapterView, View view,
int position, long id) {
getEngine().setAudioCodec(position);
}
public void onNothingSelected(AdapterView<?> arg0) {
Log.d(TAG, "No setting selected");
}
});
EditText etATxPort = (EditText) v.findViewById(R.id.etATxPort);
etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
etATxPort.setOnClickListener(new View.OnClickListener() {
public void onClick(View editText) {
EditText etATxPort = (EditText) editText;
getEngine()
.setAudioTxPort(Integer.parseInt(etATxPort.getText().toString()));
etATxPort.setText(Integer.toString(getEngine().audioTxPort()));
}
});
EditText etARxPort = (EditText) v.findViewById(R.id.etARxPort);
etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
etARxPort.setOnClickListener(new View.OnClickListener() {
public void onClick(View editText) {
EditText etARxPort = (EditText) editText;
getEngine()
.setAudioRxPort(Integer.parseInt(etARxPort.getText().toString()));
etARxPort.setText(Integer.toString(getEngine().audioRxPort()));
}
});
CheckBox cbEnableAecm = (CheckBox) v.findViewById(R.id.cbAecm);
cbEnableAecm.setChecked(getEngine().aecmEnabled());
cbEnableAecm.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableAecm = (CheckBox) checkBox;
getEngine().setEc(cbEnableAecm.isChecked());
cbEnableAecm.setChecked(getEngine().aecmEnabled());
}
});
CheckBox cbEnableNs = (CheckBox) v.findViewById(R.id.cbNoiseSuppression);
cbEnableNs.setChecked(getEngine().nsEnabled());
cbEnableNs.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableNs = (CheckBox) checkBox;
getEngine().setNs(cbEnableNs.isChecked());
cbEnableNs.setChecked(getEngine().nsEnabled());
}
});
CheckBox cbEnableAgc = (CheckBox) v.findViewById(R.id.cbAutoGainControl);
cbEnableAgc.setChecked(getEngine().agcEnabled());
cbEnableAgc.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableAgc = (CheckBox) checkBox;
getEngine().setAgc(cbEnableAgc.isChecked());
cbEnableAgc.setChecked(getEngine().agcEnabled());
}
});
CheckBox cbEnableSpeaker = (CheckBox) v.findViewById(R.id.cbSpeaker);
cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
cbEnableSpeaker.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableSpeaker = (CheckBox) checkBox;
getEngine().setSpeaker(cbEnableSpeaker.isChecked());
cbEnableSpeaker.setChecked(getEngine().speakerEnabled());
}
});
CheckBox cbEnableDebugAPM =
(CheckBox) v.findViewById(R.id.cbDebugRecording);
cbEnableDebugAPM.setChecked(getEngine().apmRecord());
cbEnableDebugAPM.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableDebugAPM = (CheckBox) checkBox;
getEngine().setDebuging(cbEnableDebugAPM.isChecked());
cbEnableDebugAPM.setChecked(getEngine().apmRecord());
}
});
CheckBox cbEnableAudioRTPDump =
(CheckBox) v.findViewById(R.id.cbAudioRTPDump);
cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
cbEnableAudioRTPDump.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableAudioRTPDump = (CheckBox) checkBox;
getEngine().setIncomingVoeRtpDump(cbEnableAudioRTPDump.isChecked());
cbEnableAudioRTPDump.setChecked(getEngine().audioRtpDump());
}
});
return v;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
// This makes sure that the container activity has implemented
// the callback interface. If not, it throws an exception.
try {
stateProvider = (MenuStateProvider) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity +
" must implement MenuStateProvider");
}
}
private MediaEngine getEngine() {
return stateProvider.getEngine();
}
}

View File

@ -0,0 +1,24 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public class CameraDesc {
private final long nativeCameraDesc;
// CameraDesc can only be created from the native layer.
private CameraDesc(long nativeCameraDesc) {
this.nativeCameraDesc = nativeCameraDesc;
}
// Dispose must be called before all references to CameraDesc are lost as it
// will free memory allocated in the native layer.
public native void dispose();
}

View File

@ -0,0 +1,39 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public class CodecInst {
private final long nativeCodecInst;
// CodecInst can only be created from the native layer.
private CodecInst(long nativeCodecInst) {
this.nativeCodecInst = nativeCodecInst;
}
public String toString() {
return name() + " " +
"PlType: " + plType() + " " +
"PlFreq: " + plFrequency() + " " +
"Size: " + pacSize() + " " +
"Channels: " + channels() + " " +
"Rate: " + rate();
}
// Dispose must be called before all references to CodecInst are lost as it
// will free memory allocated in the native layer.
public native void dispose();
public native int plType();
public native String name();
public native int plFrequency();
public native int pacSize();
public native int channels();
public native int rate();
}

View File

@ -0,0 +1,183 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import android.app.Activity;
import android.app.Fragment;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
public class MainMenuFragment extends Fragment implements MediaEngineObserver {
private String TAG;
private MenuStateProvider stateProvider;
private Button btStartStopCall;
private TextView tvStats;
// Remote and local stream displays.
private LinearLayout llRemoteSurface;
private LinearLayout llLocalSurface;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.mainmenu, container, false);
TAG = getResources().getString(R.string.tag);
llRemoteSurface = (LinearLayout) v.findViewById(R.id.llRemoteView);
llLocalSurface = (LinearLayout) v.findViewById(R.id.llLocalView);
Button btStats = (Button) v.findViewById(R.id.btStats);
boolean stats = getResources().getBoolean(R.bool.stats_enabled_default);
enableStats(btStats, stats);
btStats.setOnClickListener(new View.OnClickListener() {
public void onClick(View button) {
boolean turnOnStats = ((Button) button).getText().equals(
getResources().getString(R.string.statsOn));
enableStats((Button) button, turnOnStats);
}
});
tvStats = (TextView) v.findViewById(R.id.tvStats);
Button btSwitchCamera = (Button) v.findViewById(R.id.btSwitchCamera);
if (getEngine().hasMultipleCameras()) {
btSwitchCamera.setOnClickListener(new View.OnClickListener() {
public void onClick(View button) {
toggleCamera((Button) button);
}
});
} else {
btSwitchCamera.setEnabled(false);
}
btSwitchCamera.setText(getEngine().frontCameraIsSet() ?
R.string.backCamera :
R.string.frontCamera);
btStartStopCall = (Button) v.findViewById(R.id.btStartStopCall);
btStartStopCall.setText(getEngine().isRunning() ?
R.string.stopCall :
R.string.startCall);
btStartStopCall.setOnClickListener(new View.OnClickListener() {
public void onClick(View button) {
toggleStart();
}
});
return v;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
// This makes sure that the container activity has implemented
// the callback interface. If not, it throws an exception.
try {
stateProvider = (MenuStateProvider) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity +
" must implement MenuStateProvider");
}
}
// tvStats need to be updated on the UI thread.
public void newStats(final String stats) {
getActivity().runOnUiThread(new Runnable() {
public void run() {
tvStats.setText(stats);
}
});
}
private MediaEngine getEngine() {
return stateProvider.getEngine();
}
private void setViews() {
SurfaceView remoteSurfaceView = getEngine().getRemoteSurfaceView();
if (remoteSurfaceView != null) {
llRemoteSurface.addView(remoteSurfaceView);
}
SurfaceView svLocal = getEngine().getLocalSurfaceView();
if (svLocal != null) {
llLocalSurface.addView(svLocal);
}
}
private void clearViews() {
SurfaceView remoteSurfaceView = getEngine().getRemoteSurfaceView();
if (remoteSurfaceView != null) {
llRemoteSurface.removeView(remoteSurfaceView);
}
SurfaceView svLocal = getEngine().getLocalSurfaceView();
if (svLocal != null) {
llLocalSurface.removeView(svLocal);
}
}
private void enableStats(Button btStats, boolean enable) {
if (enable) {
getEngine().setObserver(this);
} else {
getEngine().setObserver(null);
// Clear old stats text by posting empty stats.
newStats("");
}
// If stats was true it was just turned on. This means that
// clicking the button again should turn off stats.
btStats.setText(enable ? R.string.statsOff : R.string.statsOn);
}
private void toggleCamera(Button btSwitchCamera) {
SurfaceView svLocal = getEngine().getLocalSurfaceView();
boolean resetLocalView = svLocal != null;
if (resetLocalView) {
llLocalSurface.removeView(svLocal);
}
getEngine().toggleCamera();
if (resetLocalView) {
svLocal = getEngine().getLocalSurfaceView();
llLocalSurface.addView(svLocal);
}
btSwitchCamera.setText(getEngine().frontCameraIsSet() ?
R.string.backCamera :
R.string.frontCamera);
}
public void toggleStart() {
if (getEngine().isRunning()) {
stopAll();
} else {
startCall();
}
btStartStopCall.setText(getEngine().isRunning() ?
R.string.stopCall :
R.string.startCall);
}
public void stopAll() {
clearViews();
getEngine().stop();
}
private void startCall() {
getEngine().start();
setViews();
}
}

View File

@ -0,0 +1,338 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.media.MediaCodec;
import android.media.MediaCrypto;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceView;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.LinkedList;
class MediaCodecVideoDecoder {
public static final int DECODE = 0;
private enum CodecName { ON2_VP8, GOOGLE_VPX, EXYNOX_VP8 }
private void check(boolean value, String message) {
if (value) {
return;
}
Log.e("WEBRTC-CHECK", message);
AlertDialog alertDialog = new AlertDialog.Builder(context).create();
alertDialog.setTitle("WebRTC Error");
alertDialog.setMessage(message);
alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
"OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
return;
}
}
);
alertDialog.show();
}
class Frame {
public ByteBuffer buffer;
public long timestampUs;
Frame(ByteBuffer buffer, long timestampUs) {
this.buffer = buffer;
this.timestampUs = timestampUs;
}
}
// This class enables decoding being run on a separate thread.
class DecodeHandler extends Handler {
@Override
public void handleMessage(Message msg) {
// TODO(dwkang): figure out exceptions just make this thread finish.
try {
switch (msg.what) {
case DECODE:
decodePendingBuffers();
long delayMillis = 5; // Don't busy wait.
handler.sendMessageDelayed(
handler.obtainMessage(DECODE), delayMillis);
break;
default:
break;
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
private static String TAG;
private Context context;
private SurfaceView surfaceView;
private DecodeHandler handler;
private Thread looperThread;
MediaCodec codec;
MediaFormat format;
// Buffers supplied by MediaCodec for pushing encoded data to and pulling
// decoded data from.
private ByteBuffer[] codecInputBuffers;
private ByteBuffer[] codecOutputBuffers;
// Frames from the native layer.
private LinkedList<Frame> frameQueue;
// Indexes to MediaCodec buffers
private LinkedList<Integer> availableInputBufferIndices;
private LinkedList<Integer> availableOutputBufferIndices;
private LinkedList<MediaCodec.BufferInfo> availableOutputBufferInfos;
// Offset between system time and media time.
private long deltaTimeUs;
public MediaCodecVideoDecoder(Context context) {
TAG = context.getString(R.string.tag);
this.context = context;
surfaceView = new SurfaceView(context);
frameQueue = new LinkedList<Frame>();
availableInputBufferIndices = new LinkedList<Integer>();
availableOutputBufferIndices = new LinkedList<Integer>();
availableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
}
public void dispose() {
codec.stop();
codec.release();
}
// Return view that is written to by MediaCodec.
public SurfaceView getView() { return surfaceView; }
// Entry point from the native layer. Called when the class should be ready
// to start receiving raw frames.
private boolean start(int width, int height) {
deltaTimeUs = -1;
if (!setCodecState(width, height, CodecName.ON2_VP8)) {
return false;
}
startLooperThread();
// The decoding must happen on |looperThread| thread.
handler.sendMessage(handler.obtainMessage(DECODE));
return true;
}
private boolean setCodecState(int width, int height, CodecName codecName) {
// TODO(henrike): enable more than ON2_VP8 codec.
format = new MediaFormat();
format.setInteger(MediaFormat.KEY_WIDTH, width);
format.setInteger(MediaFormat.KEY_HEIGHT, height);
try {
switch (codecName) {
case ON2_VP8:
format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
break;
case GOOGLE_VPX:
// SW VP8 decoder
codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
break;
case EXYNOX_VP8:
// Nexus10 HW VP8 decoder
codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
break;
default:
return false;
}
} catch (Exception e) {
// TODO(dwkang): replace this instanceof/throw with a narrower catch
// clause once the SDK advances.
if (e instanceof IOException) {
Log.e(TAG, "Failed to create MediaCodec for VP8.", e);
return false;
}
throw new RuntimeException(e);
}
Surface surface = surfaceView.getHolder().getSurface();
MediaCrypto crypto = null; // No crypto.
int flags = 0; // Decoder (1 for encoder)
codec.configure(format, surface, crypto, flags);
codec.start();
codecInputBuffers = codec.getInputBuffers();
codecOutputBuffers = codec.getOutputBuffers();
return true;
}
private void startLooperThread() {
looperThread = new Thread() {
@Override
public void run() {
Looper.prepare();
// Handler that is run by this thread.
handler = new DecodeHandler();
// Notify that the thread has created a handler.
synchronized(MediaCodecVideoDecoder.this) {
MediaCodecVideoDecoder.this.notify();
}
Looper.loop();
}
};
looperThread.start();
// Wait for thread to notify that Handler has been set up.
synchronized(this) {
try {
wait();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
// Entry point from the native layer. It pushes the raw buffer to this class.
private void pushBuffer(ByteBuffer buffer, long renderTimeMs) {
// TODO(dwkang): figure out why exceptions just make this thread finish.
try {
final long renderTimeUs = renderTimeMs * 1000;
synchronized(frameQueue) {
frameQueue.add(new Frame(buffer, renderTimeUs));
}
} catch (Exception e) {
e.printStackTrace();
}
}
private boolean hasFrame() {
synchronized(frameQueue) {
return !frameQueue.isEmpty();
}
}
private Frame dequeueFrame() {
synchronized(frameQueue) {
return frameQueue.removeFirst();
}
}
private void flush() {
availableInputBufferIndices.clear();
availableOutputBufferIndices.clear();
availableOutputBufferInfos.clear();
codec.flush();
}
// Media time is relative to previous frame.
private long mediaTimeToSystemTime(long mediaTimeUs) {
if (deltaTimeUs == -1) {
long nowUs = System.currentTimeMillis() * 1000;
deltaTimeUs = nowUs - mediaTimeUs;
}
return deltaTimeUs + mediaTimeUs;
}
private void decodePendingBuffers() {
int timeoutUs = 0; // Don't block on dequeuing input buffer.
int index = codec.dequeueInputBuffer(timeoutUs);
if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
availableInputBufferIndices.add(index);
}
while (feedInputBuffer()) {}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
index = codec.dequeueOutputBuffer(info, timeoutUs);
if (index > 0) {
availableOutputBufferIndices.add(index);
availableOutputBufferInfos.add(info);
}
if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = codec.getOutputBuffers();
}
while (drainOutputBuffer()) {}
}
// Returns true if MediaCodec is ready for more data and there was data
// available from the native layer.
private boolean feedInputBuffer() {
if (availableInputBufferIndices.isEmpty()) {
return false;
}
if (!hasFrame()) {
return false;
}
Frame frame = dequeueFrame();
ByteBuffer buffer = frame.buffer;
int index = availableInputBufferIndices.pollFirst();
ByteBuffer codecData = codecInputBuffers[index];
check(codecData.capacity() >= buffer.capacity(),
"Buffer is too small to copy a frame.");
buffer.rewind();
codecData.rewind();
codecData.put(buffer);
try {
int offset = 0;
int flags = 0;
codec.queueInputBuffer(index, offset, buffer.capacity(),
frame.timestampUs, flags);
} catch (MediaCodec.CryptoException e) {
check(false, "CryptoException w/ errorCode " + e.getErrorCode() +
", '" + e.getMessage() + "'");
}
return true;
}
// Returns true if more output data could be drained.MediaCodec has more data
// to deliver.
private boolean drainOutputBuffer() {
if (availableOutputBufferIndices.isEmpty()) {
return false;
}
int index = availableOutputBufferIndices.peekFirst();
MediaCodec.BufferInfo info = availableOutputBufferInfos.peekFirst();
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// End of stream is unexpected with streamed video.
check(false, "Saw output end of stream.");
return false;
}
long realTimeUs = mediaTimeToSystemTime(info.presentationTimeUs);
long nowUs = System.currentTimeMillis() * 1000;
long lateUs = nowUs - realTimeUs;
if (lateUs < -10000) {
// Frame should not be presented yet.
return false;
}
// TODO(dwkang): For some extreme cases, just not doing rendering is not
// enough. Need to seek to the next key frame.
boolean render = lateUs <= 30000;
if (!render) {
Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
}
// Decode and render to surface if desired.
codec.releaseOutputBuffer(index, render);
availableOutputBufferIndices.removeFirst();
availableOutputBufferInfos.removeFirst();
return true;
}
}

View File

@ -0,0 +1,739 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import org.webrtc.videoengine.ViERenderer;
import android.app.AlertDialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.SensorManager;
import android.os.Environment;
import android.util.Log;
import android.view.OrientationEventListener;
import android.view.SurfaceView;
import java.io.File;
public class MediaEngine implements VideoDecodeEncodeObserver {
// TODO(henrike): Most of these should be moved to xml (since static).
private static final int VCM_VP8_PAYLOAD_TYPE = 100;
private static final int SEND_CODEC_FPS = 30;
// TODO(henrike): increase INIT_BITRATE_KBPS to 2000 and ensure that
// 720p30fps can be acheived (on hardware that can handle it). Note that
// setting 2000 currently leads to failure, so that has to be resolved first.
private static final int INIT_BITRATE_KBPS = 500;
private static final int MAX_BITRATE_KBPS = 3000;
private static final String LOG_DIR = "webrtc";
private static final int WIDTH_IDX = 0;
private static final int HEIGHT_IDX = 1;
private static final int[][] RESOLUTIONS = {
{176,144}, {320,240}, {352,288}, {640,480}, {1280,720}
};
// Arbitrary choice of 4/5 volume (204/256).
private static final int volumeLevel = 204;
public static int numberOfResolutions() { return RESOLUTIONS.length; }
public static String[] resolutionsAsString() {
String[] retVal = new String[numberOfResolutions()];
for (int i = 0; i < numberOfResolutions(); ++i) {
retVal[i] = RESOLUTIONS[i][0] + "x" + RESOLUTIONS[i][1];
}
return retVal;
}
// Checks for and communicate failures to user (logcat and popup).
private void check(boolean value, String message) {
if (value) {
return;
}
Log.e("WEBRTC-CHECK", message);
AlertDialog alertDialog = new AlertDialog.Builder(context).create();
alertDialog.setTitle("WebRTC Error");
alertDialog.setMessage(message);
alertDialog.setButton(DialogInterface.BUTTON_POSITIVE,
"OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
return;
}
}
);
alertDialog.show();
}
// This class represent the cameras available on the device.
private class WebrtcCamera {
private final CameraInfo info;
WebrtcCamera(CameraInfo info) {
this.info = info;
}
// Converts device rotation to camera rotation. Rotation depends on if the
// camera is back facing and rotate with the device or front facing and
// rotating in the opposite direction of the device.
public int rotationFromRealWorldUp(int deviceRotation) {
int coarseDeviceOrientation = roundRotation(deviceRotation);
if (frontFacing()) {
// The front camera rotates in the opposite direction of the
// device.
int inverseDeviceOrientation = 360 - coarseDeviceOrientation;
return (inverseDeviceOrientation + orientation()) % 360;
}
return (coarseDeviceOrientation + orientation()) % 360;
}
// Rounds rotation to the nearest 90 degree rotation.
private int roundRotation(int rotation) {
return (int)(Math.round((double)rotation / 90) * 90) % 360;
}
public boolean frontFacing() {
return info.facing == CameraInfo.CAMERA_FACING_FRONT;
}
// Rotation of camera with respect to device up.
private int orientation() {
return info.orientation;
}
}
// Shared Audio/Video members.
private final Context context;
private String remoteIp;
private boolean enableTrace;
// Audio
private VoiceEngine voe;
private int audioChannel;
private boolean audioEnabled;
private boolean voeRunning;
private int audioCodecIndex;
private int audioTxPort;
private int audioRxPort;
private boolean speakerEnabled;
private boolean headsetPluggedIn;
private boolean enableAgc;
private boolean enableNs;
private boolean enableAecm;
private BroadcastReceiver headsetListener;
private boolean audioRtpDump;
private boolean apmRecord;
// Video
private VideoEngine vie;
private int videoChannel;
private boolean receiveVideo;
private boolean sendVideo;
private boolean vieRunning;
private int videoCodecIndex;
private int resolutionIndex;
private int videoTxPort;
private int videoRxPort;
private WebrtcCamera cameras[];
private boolean useFrontCamera;
private int currentCameraHandle;
private boolean enableNack;
// openGl, surfaceView or mediaCodec (integers.xml)
private int viewSelection;
private boolean videoRtpDump;
private SurfaceView svLocal;
private SurfaceView svRemote;
MediaCodecVideoDecoder externalCodec;
private int inFps;
private int inKbps;
private int outFps;
private int outKbps;
private int inWidth;
private int inHeight;
private OrientationEventListener orientationListener;
private int deviceOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
public MediaEngine(Context context) {
this.context = context;
voe = new VoiceEngine();
check(voe.init() == 0, "Failed voe Init");
audioChannel = voe.createChannel();
check(audioChannel >= 0, "Failed voe CreateChannel");
vie = new VideoEngine();
check(vie.init() == 0, "Failed voe Init");
check(vie.setVoiceEngine(voe) == 0, "Failed setVoiceEngine");
videoChannel = vie.createChannel();
check(audioChannel >= 0, "Failed voe CreateChannel");
check(vie.connectAudioChannel(videoChannel, audioChannel) == 0,
"Failed ConnectAudioChannel");
cameras = new WebrtcCamera[Camera.getNumberOfCameras()];
CameraInfo info = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
Camera.getCameraInfo(i, info);
cameras[info.facing] = new WebrtcCamera(info);
}
setDefaultCamera();
check(voe.setSpeakerVolume(volumeLevel) == 0,
"Failed setSpeakerVolume");
check(voe.setAecmMode(VoiceEngine.AecmModes.SPEAKERPHONE, false) == 0,
"VoE set Aecm speakerphone mode failed");
check(vie.setKeyFrameRequestMethod(videoChannel,
VideoEngine.VieKeyFrameRequestMethod.
KEY_FRAME_REQUEST_PLI_RTCP) == 0,
"Failed setKeyFrameRequestMethod");
check(vie.registerObserver(videoChannel, this) == 0,
"Failed registerObserver");
// TODO(hellner): SENSOR_DELAY_NORMAL?
// Listen to changes in device orientation.
orientationListener =
new OrientationEventListener(context, SensorManager.SENSOR_DELAY_UI) {
public void onOrientationChanged (int orientation) {
deviceOrientation = orientation;
compensateRotation();
}
};
orientationListener.enable();
// Listen to headset being plugged in/out.
IntentFilter receiverFilter = new IntentFilter(Intent.ACTION_HEADSET_PLUG);
headsetListener = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().compareTo(Intent.ACTION_HEADSET_PLUG) == 0) {
headsetPluggedIn = intent.getIntExtra("state", 0) == 1;
updateAudioOutput();
}
}
};
context.registerReceiver(headsetListener, receiverFilter);
}
public void dispose() {
check(!voeRunning && !voeRunning, "Engines must be stopped before dispose");
context.unregisterReceiver(headsetListener);
orientationListener.disable();
check(vie.deregisterObserver(videoChannel) == 0,
"Failed deregisterObserver");
if (externalCodec != null) {
check(vie.deRegisterExternalReceiveCodec(videoChannel,
VCM_VP8_PAYLOAD_TYPE) == 0,
"Failed to deregister external decoder");
externalCodec = null;
}
check(vie.deleteChannel(videoChannel) == 0, "DeleteChannel");
vie.dispose();
check(voe.deleteChannel(audioChannel) == 0, "VoE delete channel failed");
voe.dispose();
}
public void start() {
if (audioEnabled) {
startVoE();
}
if (receiveVideo || sendVideo) {
startViE();
}
}
public void stop() {
stopVoe();
stopVie();
}
public boolean isRunning() {
return voeRunning || vieRunning;
}
public void setRemoteIp(String remoteIp) { this.remoteIp = remoteIp; }
public String remoteIp() { return remoteIp; }
public void setTrace(boolean enable) {
if (enable) {
vie.setTraceFile("/sdcard/trace.txt", false);
vie.setTraceFilter(VideoEngine.TraceLevel.TRACE_ERROR);
return;
}
vie.setTraceFilter(VideoEngine.TraceLevel.TRACE_NONE);
}
private String getDebugDirectory() {
// Should create a folder in /scard/|LOG_DIR|
return Environment.getExternalStorageDirectory().toString() + "/" +
LOG_DIR;
}
private boolean createDebugDirectory() {
File webrtc_dir = new File(getDebugDirectory());
if (!webrtc_dir.exists()) {
return webrtc_dir.mkdir();
}
return webrtc_dir.isDirectory();
}
public void startVoE() {
check(!voeRunning, "VoE already started");
check(voe.startListen(audioChannel) == 0, "Failed StartListen");
check(voe.startPlayout(audioChannel) == 0, "VoE start playout failed");
check(voe.startSend(audioChannel) == 0, "VoE start send failed");
voeRunning = true;
}
private void stopVoe() {
check(voeRunning, "VoE not started");
check(voe.stopSend(audioChannel) == 0, "VoE stop send failed");
check(voe.stopPlayout(audioChannel) == 0, "VoE stop playout failed");
check(voe.stopListen(audioChannel) == 0, "VoE stop listen failed");
voeRunning = false;
}
public void setAudio(boolean audioEnabled) {
this.audioEnabled = audioEnabled;
}
public boolean audioEnabled() { return audioEnabled; }
public int audioCodecIndex() { return audioCodecIndex; }
public void setAudioCodec(int codecNumber) {
audioCodecIndex = codecNumber;
CodecInst codec = voe.getCodec(codecNumber);
check(voe.setSendCodec(audioChannel, codec) == 0, "Failed setSendCodec");
codec.dispose();
}
public String[] audioCodecsAsString() {
String[] retVal = new String[voe.numOfCodecs()];
for (int i = 0; i < voe.numOfCodecs(); ++i) {
CodecInst codec = voe.getCodec(i);
retVal[i] = codec.toString();
codec.dispose();
}
return retVal;
}
private CodecInst[] defaultAudioCodecs() {
CodecInst[] retVal = new CodecInst[voe.numOfCodecs()];
for (int i = 0; i < voe.numOfCodecs(); ++i) {
retVal[i] = voe.getCodec(i);
}
return retVal;
}
public int getIsacIndex() {
CodecInst[] codecs = defaultAudioCodecs();
for (int i = 0; i < codecs.length; ++i) {
if (codecs[i].name().contains("ISAC")) {
return i;
}
}
return 0;
}
public void setAudioTxPort(int audioTxPort) {
this.audioTxPort = audioTxPort;
check(remoteIp != null,
"remoteIP must have been set before setting audio send port");
check(voe.setSendDestination(audioChannel, audioTxPort,
remoteIp) == 0, "VoE set send destination failed");
}
public int audioTxPort() { return audioTxPort; }
public void setAudioRxPort(int audioRxPort) {
check(voe.setLocalReceiver(audioChannel, audioRxPort) == 0,
"Failed setLocalReceiver");
this.audioRxPort = audioRxPort;
}
public int audioRxPort() { return audioRxPort; }
public boolean agcEnabled() { return enableAgc; }
public void setAgc(boolean enable) {
enableAgc = enable;
VoiceEngine.AgcConfig agc_config =
new VoiceEngine.AgcConfig(3, 9, true);
check(voe.setAgcConfig(agc_config) == 0, "VoE set AGC Config failed");
check(voe.setAgcStatus(enableAgc, VoiceEngine.AgcModes.FIXED_DIGITAL) == 0,
"VoE set AGC Status failed");
}
public boolean nsEnabled() { return enableNs; }
public void setNs(boolean enable) {
enableNs = enable;
check(voe.setNsStatus(enableNs,
VoiceEngine.NsModes.MODERATE_SUPPRESSION) == 0,
"VoE set NS Status failed");
}
public boolean aecmEnabled() { return enableAecm; }
public void setEc(boolean enable) {
enableAecm = enable;
check(voe.setEcStatus(enable, VoiceEngine.EcModes.AECM) == 0,
"voe setEcStatus");
}
public boolean speakerEnabled() {
return speakerEnabled;
}
public void setSpeaker(boolean enable) {
speakerEnabled = enable;
updateAudioOutput();
}
// Debug helpers.
public boolean apmRecord() { return apmRecord; }
public boolean audioRtpDump() { return audioRtpDump; }
public void setDebuging(boolean enable) {
apmRecord = enable;
if (!enable) {
check(voe.stopDebugRecording() == 0, "Failed stopping debug");
return;
}
if (!createDebugDirectory()) {
check(false, "Unable to create debug directory.");
return;
}
String debugDirectory = getDebugDirectory();
check(voe.startDebugRecording(debugDirectory + String.format("/apm_%d.dat",
System.currentTimeMillis())) == 0,
"Failed starting debug");
}
public void setIncomingVoeRtpDump(boolean enable) {
audioRtpDump = enable;
if (!enable) {
check(voe.stopRtpDump(videoChannel,
VoiceEngine.RtpDirections.INCOMING) == 0,
"voe stopping rtp dump");
return;
}
String debugDirectory = getDebugDirectory();
check(voe.startRtpDump(videoChannel, debugDirectory +
String.format("/voe_%d.rtp", System.currentTimeMillis()),
VoiceEngine.RtpDirections.INCOMING) == 0,
"voe starting rtp dump");
}
private void updateAudioOutput() {
boolean useSpeaker = !headsetPluggedIn && speakerEnabled;
check(voe.setLoudspeakerStatus(useSpeaker) == 0,
"Failed updating loudspeaker");
}
public void startViE() {
check(!vieRunning, "ViE already started");
if (receiveVideo) {
if (viewSelection ==
context.getResources().getInteger(R.integer.openGl)) {
svRemote = ViERenderer.CreateRenderer(context, true);
} else if (viewSelection ==
context.getResources().getInteger(R.integer.surfaceView)) {
svRemote = ViERenderer.CreateRenderer(context, false);
} else {
externalCodec = new MediaCodecVideoDecoder(context);
svRemote = externalCodec.getView();
}
if (externalCodec != null) {
check(vie.registerExternalReceiveCodec(videoChannel,
VCM_VP8_PAYLOAD_TYPE, externalCodec, true) == 0,
"Failed to register external decoder");
} else {
check(vie.addRenderer(videoChannel, svRemote,
0, 0, 0, 1, 1) == 0, "Failed AddRenderer");
check(vie.startRender(videoChannel) == 0, "Failed StartRender");
}
check(vie.startReceive(videoChannel) == 0, "Failed StartReceive");
}
if (sendVideo) {
startCamera();
check(vie.startSend(videoChannel) == 0, "Failed StartSend");
}
vieRunning = true;
}
private void stopVie() {
if (!vieRunning) {
return;
}
check(vie.stopSend(videoChannel) == 0, "StopSend");
stopCamera();
check(vie.stopReceive(videoChannel) == 0, "StopReceive");
if (externalCodec != null) {
check(vie.deRegisterExternalReceiveCodec(videoChannel,
VCM_VP8_PAYLOAD_TYPE) == 0,
"Failed to deregister external decoder");
externalCodec.dispose();
externalCodec = null;
} else {
check(vie.stopRender(videoChannel) == 0, "StopRender");
check(vie.removeRenderer(videoChannel) == 0, "RemoveRenderer");
}
svRemote = null;
vieRunning = false;
}
public void setReceiveVideo(boolean receiveVideo) {
this.receiveVideo = receiveVideo;
}
public boolean receiveVideo() { return receiveVideo; }
public void setSendVideo(boolean sendVideo) { this.sendVideo = sendVideo; }
public boolean sendVideo() { return sendVideo; }
public int videoCodecIndex() { return videoCodecIndex; }
public void setVideoCodec(int codecNumber) {
videoCodecIndex = codecNumber;
updateVideoCodec();
}
public String[] videoCodecsAsString() {
String[] retVal = new String[vie.numberOfCodecs()];
for (int i = 0; i < vie.numberOfCodecs(); ++i) {
VideoCodecInst codec = vie.getCodec(i);
retVal[i] = codec.toString();
codec.dispose();
}
return retVal;
}
public int resolutionIndex() { return resolutionIndex; }
public void setResolutionIndex(int resolution) {
resolutionIndex = resolution;
updateVideoCodec();
}
private void updateVideoCodec() {
VideoCodecInst codec = getVideoCodec(videoCodecIndex, resolutionIndex);
check(vie.setSendCodec(videoChannel, codec) == 0, "Failed setReceiveCodec");
codec.dispose();
}
private VideoCodecInst getVideoCodec(int codecNumber, int resolution) {
VideoCodecInst retVal = vie.getCodec(codecNumber);
retVal.setStartBitRate(INIT_BITRATE_KBPS);
retVal.setMaxBitRate(MAX_BITRATE_KBPS);
retVal.setWidth(RESOLUTIONS[resolution][WIDTH_IDX]);
retVal.setHeight(RESOLUTIONS[resolution][HEIGHT_IDX]);
retVal.setMaxFrameRate(SEND_CODEC_FPS);
return retVal;
}
public void setVideoRxPort(int videoRxPort) {
this.videoRxPort = videoRxPort;
check(vie.setLocalReceiver(videoChannel, videoRxPort) == 0,
"Failed setLocalReceiver");
}
public int videoRxPort() { return videoRxPort; }
public void setVideoTxPort(int videoTxPort) {
this.videoTxPort = videoTxPort;
check(remoteIp != null,
"remoteIP must have been set before setting audio send port");
check(vie.setSendDestination(videoChannel, videoTxPort, remoteIp) == 0,
"Failed setSendDestination");
}
public int videoTxPort() {
return videoTxPort;
}
public boolean hasMultipleCameras() {
return cameras.length > 1;
}
public boolean frontCameraIsSet() {
return useFrontCamera;
}
// Set camera to front if one exists otherwise use back camera.
private void setDefaultCamera() {
useFrontCamera = hasFrontCamera();
}
public void toggleCamera() {
if (vieRunning) {
stopCamera();
}
useFrontCamera = !useFrontCamera;
if (vieRunning) {
startCamera();
}
}
private void startCamera() {
CameraDesc cameraInfo = vie.getCaptureDevice(getCameraId());
currentCameraHandle = vie.allocateCaptureDevice(cameraInfo);
cameraInfo.dispose();
check(vie.connectCaptureDevice(currentCameraHandle, videoChannel) == 0,
"Failed to connect capture device");
// Camera and preview surface. Note, renderer must be created before
// calling StartCapture or |svLocal| won't be able to render.
svLocal = ViERenderer.CreateLocalRenderer(context);
check(vie.startCapture(currentCameraHandle) == 0, "Failed StartCapture");
compensateRotation();
}
private void stopCamera() {
check(vie.stopCapture(currentCameraHandle) == 0, "Failed StopCapture");
svLocal = null;
check(vie.releaseCaptureDevice(currentCameraHandle) == 0,
"Failed ReleaseCaptureDevice");
}
private boolean hasFrontCamera() {
for (int i = 0; i < cameras.length; ++i) {
if (cameras[i].frontFacing()) {
return true;
}
}
return false;
}
public SurfaceView getRemoteSurfaceView() {
return svRemote;
}
public SurfaceView getLocalSurfaceView() {
return svLocal;
}
public void setViewSelection(int viewSelection) {
this.viewSelection = viewSelection;
}
public int viewSelection() { return viewSelection; }
public boolean nackEnabled() { return enableNack; }
public void setNack(boolean enable) {
enableNack = enable;
check(vie.setNackStatus(videoChannel, enableNack) == 0,
"Failed setNackStatus");
}
// Collates current state into a multiline string.
public String sendReceiveState() {
int packetLoss = 0;
if (vieRunning) {
RtcpStatistics stats = vie.getReceivedRtcpStatistics(videoChannel);
if (stats != null) {
// Calculate % lost from fraction lost.
// Definition of fraction lost can be found in RFC3550.
packetLoss = (stats.fractionLost * 100) >> 8;
}
}
String retVal =
"fps in/out: " + inFps + "/" + outFps + "\n" +
"kBps in/out: " + inKbps / 1024 + "/ " + outKbps / 1024 + "\n" +
"resolution: " + inWidth + "x" + inHeight + "\n" +
"loss: " + packetLoss + "%";
return retVal;
}
MediaEngineObserver observer;
public void setObserver(MediaEngineObserver observer) {
this.observer = observer;
}
// Callbacks from the VideoDecodeEncodeObserver interface.
public void incomingRate(int videoChannel, int framerate, int bitrate) {
inFps = framerate;
inKbps = bitrate;
newStats();
}
public void incomingCodecChanged(int videoChannel,
VideoCodecInst videoCodec) {
inWidth = videoCodec.width();
inHeight = videoCodec.height();
videoCodec.dispose();
newStats();
}
public void requestNewKeyFrame(int videoChannel) {}
public void outgoingRate(int videoChannel, int framerate, int bitrate) {
outFps = framerate;
outKbps = bitrate;
newStats();
}
private void newStats() {
if (observer != null) {
observer.newStats(sendReceiveState());
}
}
// Debug helpers.
public boolean videoRtpDump() { return videoRtpDump; }
public void setIncomingVieRtpDump(boolean enable) {
videoRtpDump = enable;
if (!enable) {
check(vie.stopRtpDump(videoChannel,
VideoEngine.RtpDirections.INCOMING) == 0,
"vie StopRTPDump");
return;
}
String debugDirectory = getDebugDirectory();
check(vie.startRtpDump(videoChannel, debugDirectory +
String.format("/vie_%d.rtp", System.currentTimeMillis()),
VideoEngine.RtpDirections.INCOMING) == 0,
"vie StartRtpDump");
}
private int getCameraId() {
return useFrontCamera ? Camera.CameraInfo.CAMERA_FACING_FRONT :
Camera.CameraInfo.CAMERA_FACING_BACK;
}
private void compensateRotation() {
if (svLocal == null) {
// Not rendering (or sending).
return;
}
if (deviceOrientation == OrientationEventListener.ORIENTATION_UNKNOWN) {
return;
}
int cameraRotation =
cameras[getCameraId()].rotationFromRealWorldUp(
deviceOrientation);
// Egress streams should have real world up as up.
check(
vie.setRotateCapturedFrames(currentCameraHandle, cameraRotation) == 0,
"Failed setRotateCapturedFrames: camera " + currentCameraHandle +
"rotation " + cameraRotation);
}
}

View File

@ -0,0 +1,15 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public interface MediaEngineObserver {
void newStats(String stats);
}

View File

@ -0,0 +1,15 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public interface MenuStateProvider {
public MediaEngine getEngine();
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -8,12 +8,15 @@
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengineapp;
package org.webrtc.webrtcdemo;
public interface IViEAndroidCallback {
public int updateStats(int frameRateI, int bitRateI,
int packetLoss, int frameRateO,
int bitRateO);
import android.content.Context;
public int newIncomingResolution(int width, int height);
public class NativeWebRtcContextRegistry {
static {
System.loadLibrary("webrtcdemo-jni");
}
public native void register(Context context);
public native void unRegister();
}

View File

@ -0,0 +1,32 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public class RtcpStatistics {
// Definition of fraction lost can be found in RFC3550.
// It is equivalent to taking the integer part after multiplying the loss
// fraction by 256.
public final int fractionLost;
public final int cumulativeLost;
public final int extendedMax;
public final int jitter;
public final int rttMs;
// Only allowed to be created by the native layer.
private RtcpStatistics(int fractionLost, int cumulativeLost, int extendedMax,
int jitter, int rttMs) {
this.fractionLost = fractionLost;
this.cumulativeLost = cumulativeLost;
this.extendedMax = extendedMax;
this.jitter = jitter;
this.rttMs = rttMs;
}
}

View File

@ -0,0 +1,173 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import android.app.Activity;
import android.app.Fragment;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.RadioGroup;
import android.widget.TextView;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.util.Enumeration;
public class SettingsMenuFragment extends Fragment
implements RadioGroup.OnCheckedChangeListener {
private String TAG;
private MenuStateProvider stateProvider;
EditText etRemoteIp;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.settingsmenu, container, false);
TAG = getResources().getString(R.string.tag);
CheckBox cbVideoReceive = (CheckBox) v.findViewById(R.id.cbVideoReceive);
cbVideoReceive.setChecked(getEngine().receiveVideo());
cbVideoReceive.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbVideoReceive = (CheckBox) checkBox;
getEngine().setReceiveVideo(cbVideoReceive.isChecked());
cbVideoReceive.setChecked(getEngine().receiveVideo());
}
});
CheckBox cbVideoSend = (CheckBox) v.findViewById(R.id.cbVideoSend);
cbVideoSend.setChecked(getEngine().sendVideo());
cbVideoSend.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbVideoSend = (CheckBox) checkBox;
getEngine().setSendVideo(cbVideoSend.isChecked());
cbVideoSend.setChecked(getEngine().sendVideo());
}
});
CheckBox cbAudio = (CheckBox) v.findViewById(R.id.cbAudio);
cbAudio.setChecked(getEngine().audioEnabled());
cbAudio.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbAudio = (CheckBox) checkBox;
getEngine().setAudio(cbAudio.isChecked());
cbAudio.setChecked(getEngine().audioEnabled());
}
});
boolean loopback =
getResources().getBoolean(R.bool.loopback_enabled_default);
CheckBox cbLoopback = (CheckBox) v.findViewById(R.id.cbLoopback);
cbLoopback.setChecked(loopback);
cbLoopback.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
loopbackChanged((CheckBox) checkBox);
}
});
etRemoteIp = (EditText) v.findViewById(R.id.etRemoteIp);
etRemoteIp.setOnClickListener(new View.OnClickListener() {
public void onClick(View editText) {
getEngine().setRemoteIp(etRemoteIp.getText().toString());
}
});
// Has to be after remote IP as loopback changes it.
loopbackChanged(cbLoopback);
RadioGroup rRenderMechanism =
(RadioGroup) v.findViewById(R.id.rRenderMechanism);
rRenderMechanism.clearCheck();
if (getEngine().viewSelection() ==
getResources().getInteger(R.integer.openGl)) {
rRenderMechanism.check(R.id.rOpenGl);
} else if (getEngine().viewSelection() ==
getResources().getInteger(R.integer.surfaceView)) {
rRenderMechanism.check(R.id.rSurfaceView);
} else {
rRenderMechanism.check(R.id.rMediaCodec);
}
rRenderMechanism.setOnCheckedChangeListener(this);
return v;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
// This makes sure that the container activity has implemented
// the callback interface. If not, it throws an exception.
try {
stateProvider = (MenuStateProvider) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity +
" must implement MenuStateProvider");
}
}
private void loopbackChanged(CheckBox cbLoopback) {
boolean loopback = cbLoopback.isChecked();
etRemoteIp.setText(loopback ? getLoopbackIPString() : getLocalIpAddress());
getEngine().setRemoteIp(etRemoteIp.getText().toString());
}
private String getLoopbackIPString() {
return getResources().getString(R.string.loopbackIp);
}
private String getLocalIpAddress() {
String localIp = "";
try {
for (Enumeration<NetworkInterface> en = NetworkInterface
.getNetworkInterfaces(); en.hasMoreElements();) {
NetworkInterface intf = en.nextElement();
for (Enumeration<InetAddress> enumIpAddr =
intf.getInetAddresses();
enumIpAddr.hasMoreElements(); ) {
InetAddress inetAddress = enumIpAddr.nextElement();
if (!inetAddress.isLoopbackAddress()) {
// Set the remote ip address the same as
// the local ip address of the last netif
localIp = inetAddress.getHostAddress().toString();
}
}
}
} catch (SocketException e) {
Log.e(TAG, "Unable to get local IP address. Not the end of the world", e);
}
return localIp;
}
private MediaEngine getEngine() {
return stateProvider.getEngine();
}
public void onCheckedChanged(RadioGroup group, int checkedId) {
switch (checkedId) {
case R.id.rOpenGl:
getEngine().setViewSelection(
getResources().getInteger(R.integer.openGl));
break;
case R.id.rSurfaceView:
getEngine().setViewSelection(
getResources().getInteger(R.integer.surfaceView));
break;
case R.id.rMediaCodec:
getEngine().setViewSelection(
getResources().getInteger(R.integer.mediaCodec));
break;
default:
break;
}
}
}

View File

@ -0,0 +1,49 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import android.widget.ArrayAdapter;
import android.content.Context;
import android.widget.TextView;
import android.view.View;
import android.view.ViewGroup;
import android.view.LayoutInflater;
public class SpinnerAdapter extends ArrayAdapter<String> {
private String[] menuItems;
LayoutInflater inflater;
int textViewResourceId;
public SpinnerAdapter(Context context, int textViewResourceId,
String[] objects, LayoutInflater inflater) {
super(context, textViewResourceId, objects);
menuItems = objects;
this.inflater = inflater;
this.textViewResourceId = textViewResourceId;
}
@Override public View getDropDownView(int position, View convertView,
ViewGroup parent) {
return getCustomView(position, convertView, parent);
}
@Override public View getView(int position, View convertView,
ViewGroup parent) {
return getCustomView(position, convertView, parent);
}
private View getCustomView(int position, View v, ViewGroup parent) {
View row = inflater.inflate(textViewResourceId, parent, false);
TextView label = (TextView) row.findViewById(R.id.spinner_row);
label.setText(menuItems[position]);
return row;
}
}

View File

@ -0,0 +1,45 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public class VideoCodecInst {
private final long nativeCodecInst;
// VideoCodecInst can only be created from the native layer.
private VideoCodecInst(long nativeCodecInst) {
this.nativeCodecInst = nativeCodecInst;
}
public String toString() {
return name() + " " +
"PlType: " + plType() + " " +
"Width: " + width() + " " +
"Height: " + height() + " " +
"StartBitRate: " + startBitRate() + " " +
"MaxFrameRate: " + maxFrameRate();
}
// Dispose must be called before all references to VideoCodecInst are lost as
// it will free memory allocated in the native layer.
public native void dispose();
public native int plType();
public native String name();
public native int width();
public native void setWidth(int width);
public native int height();
public native void setHeight(int height);
public native int startBitRate();
public native void setStartBitRate(int bitrate);
public native int maxBitRate();
public native void setMaxBitRate(int bitrate);
public native int maxFrameRate();
public native void setMaxFrameRate(int framerate);
}

View File

@ -0,0 +1,24 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public interface VideoDecodeEncodeObserver {
void incomingRate(int videoChannel, int framerate, int bitrate);
// VideoCodecInst.dispose must be called for |videoCodec| before all
// references to it are lost as it will free memory allocated in the native
// layer.
void incomingCodecChanged(int videoChannel, VideoCodecInst videoCodec);
void requestNewKeyFrame(int videoChannel);
void outgoingRate(int videoChannel, int framerate, int bitrate);
}

View File

@ -0,0 +1,119 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public class VideoEngine {
private final long nativeVideoEngine;
// Keep in sync (including this comment) with webrtc/common_types.h:TraceLevel
public enum TraceLevel {
TRACE_NONE(0x0000),
TRACE_STATE_INFO(0x0001),
TRACE_WARNING(0x0002),
TRACE_ERROR(0x0004),
TRACE_CRITICAL(0x0008),
TRACE_API_CALL(0x0010),
TRACE_DEFAULT(0x00ff),
TRACE_MODULE_CALL(0x0020),
TRACE_MEMORY(0x0100),
TRACE_TIMER(0x0200),
TRACE_STREAM(0x0400),
TRACE_DEBUG(0x0800),
TRACE_INFO(0x1000),
TRACE_TERSE_INFO(0x2000),
TRACE_ALL(0xfff);
public final int level;
TraceLevel(int level) {
this.level = level;
}
};
// Keep in sync (including this comment) with
// webrtc/video_engine/include/vie_rtp_rtcp.h:ViEKeyFrameRequestMethod
public enum VieKeyFrameRequestMethod {
KEY_FRAME_REQUEST_NONE, KEY_FRAME_REQUEST_PLI_RTCP,
KEY_FRAME_REQUEST_FIR_RTP, KEY_FRAME_REQUEST_FIR_RTCP
}
// Keep in sync (including this comment) with
// webrtc/common_types.h:RtpDirections
public enum RtpDirections { INCOMING, OUTGOING }
public VideoEngine() {
nativeVideoEngine = create();
}
// API comments can be found in VideoEngine's native APIs. Not all native
// APIs are available.
private static native long create();
public native int init();
public native int setVoiceEngine(VoiceEngine voe);
public native void dispose();
public native int startSend(int channel);
public native int stopRender(int channel);
public native int stopSend(int channel);
public native int startReceive(int channel);
public native int stopReceive(int channel);
public native int createChannel();
public native int deleteChannel(int channel);
public native int connectAudioChannel(int videoChannel, int voiceChannel);
public native int setLocalReceiver(int channel, int port);
public native int setSendDestination(int channel, int port, String ipAddr);
public native int numberOfCodecs();
public native VideoCodecInst getCodec(int index);
public native int setReceiveCodec(int channel, VideoCodecInst codec);
public native int setSendCodec(int channel, VideoCodecInst codec);
public native int addRenderer(int channel, Object glSurface, int zOrder,
float left, float top,
float right, float bottom);
public native int removeRenderer(int channel);
public native int registerExternalReceiveCodec(int channel, int plType,
MediaCodecVideoDecoder decoder, boolean internal_source);
public native int deRegisterExternalReceiveCodec(int channel, int plType);
public native int startRender(int channel);
public native int numberOfCaptureDevices();
public native CameraDesc getCaptureDevice(int index);
public native int allocateCaptureDevice(CameraDesc camera);
public native int connectCaptureDevice(int cameraId, int channel);
public native int startCapture(int cameraId);
public native int stopCapture(int cameraId);
public native int releaseCaptureDevice(int cameraId);
public native int getOrientation(CameraDesc camera);
public native int setRotateCapturedFrames(int cameraId, int degrees);
public native int setNackStatus(int channel, boolean enable);
public int setKeyFrameRequestMethod(int channel,
VieKeyFrameRequestMethod requestMethod) {
return setKeyFrameRequestMethod(channel, requestMethod.ordinal());
}
private native int setKeyFrameRequestMethod(int channel,
int requestMethod);
public native RtcpStatistics getReceivedRtcpStatistics(int channel);
public native int registerObserver(int channel,
VideoDecodeEncodeObserver callback);
public native int deregisterObserver(int channel);
public native int setTraceFile(String fileName,
boolean fileCounter);
public int setTraceFilter(TraceLevel filter) {
return filter.level;
}
private native int setTraceFilter(int filter);
public int startRtpDump(int channel, String file,
RtpDirections direction) {
return startRtpDump(channel, file, direction.ordinal());
}
private native int startRtpDump(int channel, String file,
int direction);
public int stopRtpDump(int channel, RtpDirections direction) {
return stopRtpDump(channel, direction.ordinal());
}
private native int stopRtpDump(int channel, int direction);
}

View File

@ -0,0 +1,135 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import android.app.Activity;
import android.app.Fragment;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.CheckBox;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.TextView;
import java.lang.Integer;
public class VideoMenuFragment extends Fragment {
private String TAG;
private MenuStateProvider stateProvider;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.videomenu, container, false);
TAG = getResources().getString(R.string.tag);
String[] videoCodecsString = getEngine().videoCodecsAsString();
Spinner spCodecType = (Spinner) v.findViewById(R.id.spCodecType);
spCodecType.setAdapter(new SpinnerAdapter(getActivity(),
R.layout.dropdownitems,
videoCodecsString,
inflater));
spCodecType.setSelection(getEngine().videoCodecIndex());
spCodecType.setOnItemSelectedListener(new OnItemSelectedListener() {
public void onItemSelected(AdapterView<?> adapterView, View view,
int position, long id) {
getEngine().setVideoCodec(position);
}
public void onNothingSelected(AdapterView<?> arg0) {
Log.d(TAG, "No setting selected");
}
});
Spinner spCodecSize = (Spinner) v.findViewById(R.id.spCodecSize);
spCodecSize.setAdapter(new SpinnerAdapter(getActivity(),
R.layout.dropdownitems,
MediaEngine.resolutionsAsString(),
inflater));
// -2 means selecting the 2nd highest resolution. This maintains legacy
// behavior. Also higher resolutions lead to lower framerate at same
// bit rate.
// TODO(hellner): make configuration in the form [width]x[height] instead of
// an opaque index. Also configuration should happen in a res/values xml
// file rather than inline.
spCodecSize.setSelection(getEngine().resolutionIndex() - 2);
spCodecSize.setOnItemSelectedListener(new OnItemSelectedListener() {
public void onItemSelected(AdapterView<?> adapterView, View view,
int position, long id) {
getEngine().setResolutionIndex(position);
}
public void onNothingSelected(AdapterView<?> arg0) {
Log.d(TAG, "No setting selected");
}
});
EditText etVTxPort = (EditText) v.findViewById(R.id.etVTxPort);
etVTxPort.setText(Integer.toString(getEngine().videoTxPort()));
etVTxPort.setOnClickListener(new View.OnClickListener() {
public void onClick(View editText) {
EditText etVTxPort = (EditText) editText;
getEngine()
.setVideoTxPort(Integer.parseInt(etVTxPort.getText().toString()));
}
});
EditText etVRxPort = (EditText) v.findViewById(R.id.etVRxPort);
etVRxPort.setText(Integer.toString(getEngine().videoRxPort()));
etVRxPort.setOnClickListener(new View.OnClickListener() {
public void onClick(View editText) {
EditText etVRxPort = (EditText) editText;
getEngine()
.setVideoRxPort(Integer.parseInt(etVRxPort.getText().toString()));
}
});
CheckBox cbEnableNack = (CheckBox) v.findViewById(R.id.cbNack);
cbEnableNack.setChecked(getEngine().nackEnabled());
cbEnableNack.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableNack = (CheckBox) checkBox;
getEngine().setNack(cbEnableNack.isChecked());
}
});
CheckBox cbEnableVideoRTPDump =
(CheckBox) v.findViewById(R.id.cbVideoRTPDump);
cbEnableVideoRTPDump.setChecked(getEngine().videoRtpDump());
cbEnableVideoRTPDump.setOnClickListener(new View.OnClickListener() {
public void onClick(View checkBox) {
CheckBox cbEnableVideoRTPDump = (CheckBox) checkBox;
getEngine().setIncomingVieRtpDump(cbEnableVideoRTPDump.isChecked());
}
});
return v;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
// This makes sure that the container activity has implemented
// the callback interface. If not, it throws an exception.
try {
stateProvider = (MenuStateProvider) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity +
" must implement MenuStateProvider");
}
}
private MediaEngine getEngine() {
return stateProvider.getEngine();
}
}

View File

@ -0,0 +1,117 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
public class VoiceEngine {
private final long nativeVoiceEngine;
// Keep in sync (including this comment) with
// webrtc/common_types.h:NsModes
public enum NsModes {
UNCHANGED, DEFAULT, CONFERENCE, LOW_SUPPRESSION,
MODERATE_SUPPRESSION, HIGH_SUPPRESSION, VERY_HIGH_SUPPRESSION
}
// Keep in sync (including this comment) with
// webrtc/common_types.h:AgcModes
public enum AgcModes {
UNCHANGED, DEFAULT, ADAPTIVE_ANALOG, ADAPTIVE_DIGITAL,
FIXED_DIGITAL
}
// Keep in sync (including this comment) with
// webrtc/common_types.h:AecmModes
public enum AecmModes {
QUIET_EARPIECE_OR_HEADSET, EARPIECE, LOUD_EARPIECE,
SPEAKERPHONE, LOUD_SPEAKERPHONE
}
// Keep in sync (including this comment) with
// webrtc/common_types.h:EcModes
public enum EcModes { UNCHANGED, DEFAULT, CONFERENCE, AEC, AECM }
// Keep in sync (including this comment) with
// webrtc/common_types.h:RtpDirections
public enum RtpDirections { INCOMING, OUTGOING }
public static class AgcConfig {
AgcConfig(int targetLevelDbOv, int digitalCompressionGaindB,
boolean limiterEnable) {
this.targetLevelDbOv = targetLevelDbOv;
this.digitalCompressionGaindB = digitalCompressionGaindB;
this.limiterEnable = limiterEnable;
}
private final int targetLevelDbOv;
private final int digitalCompressionGaindB;
private final boolean limiterEnable;
}
public VoiceEngine() {
nativeVoiceEngine = create();
}
private static native long create();
public native int init();
public native void dispose();
public native int createChannel();
public native int deleteChannel(int channel);
public native int setLocalReceiver(int channel, int port);
public native int setSendDestination(int channel, int port, String ipaddr);
public native int startListen(int channel);
public native int startPlayout(int channel);
public native int startSend(int channel);
public native int stopListen(int channel);
public native int stopPlayout(int channel);
public native int stopSend(int channel);
public native int setSpeakerVolume(int volume);
public native int setLoudspeakerStatus(boolean enable);
public native int startPlayingFileLocally(
int channel,
String fileName,
boolean loop);
public native int stopPlayingFileLocally(int channel);
public native int startPlayingFileAsMicrophone(
int channel,
String fileName,
boolean loop);
public native int stopPlayingFileAsMicrophone(int channel);
public native int numOfCodecs();
public native CodecInst getCodec(int index);
public native int setSendCodec(int channel, CodecInst codec);
public int setEcStatus(boolean enable, EcModes mode) {
return setEcStatus(enable, mode.ordinal());
}
private native int setEcStatus(boolean enable, int ec_mode);
public int setAecmMode(AecmModes aecm_mode, boolean cng) {
return setAecmMode(aecm_mode.ordinal(), cng);
}
private native int setAecmMode(int aecm_mode, boolean cng);
public int setAgcStatus(boolean enable, AgcModes agc_mode) {
return setAgcStatus(enable, agc_mode.ordinal());
}
private native int setAgcStatus(boolean enable, int agc_mode);
public native int setAgcConfig(AgcConfig agc_config);
public int setNsStatus(boolean enable, NsModes ns_mode) {
return setNsStatus(enable, ns_mode.ordinal());
}
private native int setNsStatus(boolean enable, int ns_mode);
public native int startDebugRecording(String file);
public native int stopDebugRecording();
public int startRtpDump(int channel, String file,
RtpDirections direction) {
return startRtpDump(channel, file, direction.ordinal());
}
private native int startRtpDump(int channel, String file,
int direction);
public int stopRtpDump(int channel, RtpDirections direction) {
return stopRtpDump(channel, direction.ordinal());
}
private native int stopRtpDump(int channel, int direction);
}

View File

@ -0,0 +1,234 @@
/*
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.webrtcdemo;
import android.app.ActionBar.Tab;
import android.app.ActionBar.TabListener;
import android.app.ActionBar;
import android.app.Activity;
import android.app.Fragment;
import android.app.FragmentTransaction;
import android.content.pm.ActivityInfo;
import android.media.AudioManager;
import android.os.Bundle;
import android.os.Handler;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.WindowManager;
public class WebRTCDemo extends Activity implements MenuStateProvider {
// From http://developer.android.com/guide/topics/ui/actionbar.html
public static class TabListener<T extends Fragment>
implements ActionBar.TabListener {
private Fragment fragment;
private final Activity activity;
private final String tag;
private final Class<T> instance;
private final Bundle args;
public TabListener(Activity activity, String tag, Class<T> clz) {
this(activity, tag, clz, null);
}
public TabListener(Activity activity, String tag, Class<T> clz,
Bundle args) {
this.activity = activity;
this.tag = tag;
this.instance = clz;
this.args = args;
}
public void onTabSelected(Tab tab, FragmentTransaction ft) {
// Check if the fragment is already initialized
if (fragment == null) {
// If not, instantiate and add it to the activity
fragment = Fragment.instantiate(activity, instance.getName(), args);
ft.add(android.R.id.content, fragment, tag);
} else {
// If it exists, simply attach it in order to show it
ft.attach(fragment);
}
}
public void onTabUnselected(Tab tab, FragmentTransaction ft) {
if (fragment != null) {
// Detach the fragment, because another one is being attached
ft.detach(fragment);
}
}
public void onTabReselected(Tab tab, FragmentTransaction ft) {
// User selected the already selected tab. Do nothing.
}
}
private NativeWebRtcContextRegistry contextRegistry = null;
private MediaEngine mediaEngine = null;
private Handler handler;
public MediaEngine getEngine() { return mediaEngine; }
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Global settings.
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
// State.
// Must be instantiated before MediaEngine.
contextRegistry = new NativeWebRtcContextRegistry();
contextRegistry.register(this);
// Load all settings dictated in xml.
mediaEngine = new MediaEngine(this);
mediaEngine.setRemoteIp(getResources().getString(R.string.loopbackIp));
mediaEngine.setTrace(getResources().getBoolean(
R.bool.trace_enabled_default));
mediaEngine.setAudio(getResources().getBoolean(
R.bool.audio_enabled_default));
mediaEngine.setAudioCodec(mediaEngine.getIsacIndex());
mediaEngine.setAudioRxPort(getResources().getInteger(
R.integer.aRxPortDefault));
mediaEngine.setAudioTxPort(getResources().getInteger(
R.integer.aTxPortDefault));
mediaEngine.setSpeaker(getResources().getBoolean(
R.bool.speaker_enabled_default));
mediaEngine.setDebuging(getResources().getBoolean(
R.bool.apm_debug_enabled_default));
mediaEngine.setReceiveVideo(getResources().getBoolean(
R.bool.video_receive_enabled_default));
mediaEngine.setSendVideo(getResources().getBoolean(
R.bool.video_send_enabled_default));
mediaEngine.setVideoCodec(getResources().getInteger(
R.integer.video_codec_default));
// TODO(hellner): resolutions should probably be in the xml as well.
mediaEngine.setResolutionIndex(MediaEngine.numberOfResolutions() - 2);
mediaEngine.setVideoTxPort(getResources().getInteger(
R.integer.vTxPortDefault));
mediaEngine.setVideoRxPort(getResources().getInteger(
R.integer.vRxPortDefault));
mediaEngine.setNack(getResources().getBoolean(R.bool.nack_enabled_default));
mediaEngine.setViewSelection(getResources().getInteger(
R.integer.defaultView));
// Create action bar with all tabs.
ActionBar actionBar = getActionBar();
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS);
actionBar.setDisplayShowTitleEnabled(false);
Tab tab = actionBar.newTab()
.setText("Main")
.setTabListener(new TabListener<MainMenuFragment>(
this, "main", MainMenuFragment.class));
actionBar.addTab(tab);
tab = actionBar.newTab()
.setText("Settings")
.setTabListener(new TabListener<SettingsMenuFragment>(
this, "Settings", SettingsMenuFragment.class));
actionBar.addTab(tab);
tab = actionBar.newTab()
.setText("Video")
.setTabListener(new TabListener<VideoMenuFragment>(
this, "video", VideoMenuFragment.class));
actionBar.addTab(tab);
tab = actionBar.newTab()
.setText("Audio")
.setTabListener(new TabListener<AudioMenuFragment>(
this, "Audio", AudioMenuFragment.class));
actionBar.addTab(tab);
enableTimedStartStop();
// Hint that voice call audio stream should be used for hardware volume
// controls.
setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main_activity_actions, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle presses on the action bar items
switch (item.getItemId()) {
case R.id.action_exit:
MainMenuFragment main = (MainMenuFragment)getFragmentManager()
.findFragmentByTag("main");
main.stopAll();
finish();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onDestroy() {
disableTimedStartStop();
mediaEngine.dispose();
contextRegistry.unRegister();
super.onDestroy();
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
// Prevent app from running in the background.
MainMenuFragment main = (MainMenuFragment)getFragmentManager()
.findFragmentByTag("main");
main.stopAll();
finish();
return true;
}
return super.onKeyDown(keyCode, event);
}
private int getCallRestartPeriodicity() {
return getResources().getInteger(R.integer.call_restart_periodicity_ms);
}
// Thread repeatedly calling start/stop.
void enableTimedStartStop() {
if (getCallRestartPeriodicity() > 0) {
// Periodicity == 0 <-> Disabled.
handler = new Handler();
handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
}
}
void disableTimedStartStop() {
if (handler != null) {
handler.removeCallbacks(startOrStopCallback);
}
}
private Runnable startOrStopCallback = new Runnable() {
public void run() {
MainMenuFragment main = (MainMenuFragment)getFragmentManager()
.findFragmentByTag("main");
main.toggleStart();
handler.postDelayed(startOrStopCallback, getCallRestartPeriodicity());
}
};
}

View File

@ -1,72 +0,0 @@
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'targets': [
{
'target_name': 'libwebrtc-video-demo-jni',
'type': 'loadable_module',
'dependencies': [
'<(webrtc_root)/modules/modules.gyp:*',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
],
'sources': [
'jni/android_media_codec_decoder.cc',
'jni/vie_android_java_api.cc',
],
'link_settings': {
'libraries': [
'-llog',
'-lGLESv2',
'-lOpenSLES',
],
}
},
{
'target_name': 'WebRTCDemo',
'type': 'none',
'dependencies': [
'libwebrtc-video-demo-jni',
'<(modules_java_gyp_path):*',
],
'actions': [
{
# TODO(yujie.mao): Convert building of the demo to a proper GYP target
# so this action is not needed once chromium's apk-building machinery
# can be used. (crbug.com/225101)
'action_name': 'build_webrtcdemo_apk',
'variables': {
'android_webrtc_demo_root': '<(webrtc_root)/video_engine/test/android',
},
'inputs' : [
'<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
'<(PRODUCT_DIR)/lib.java/video_capture_module_java.jar',
'<(PRODUCT_DIR)/lib.java/video_render_module_java.jar',
'<(PRODUCT_DIR)/libwebrtc-video-demo-jni.so',
'<!@(find <(android_webrtc_demo_root)/src -name "*.java")',
'<!@(find <(android_webrtc_demo_root)/res -type f)',
],
'outputs': ['<(PRODUCT_DIR)/WebRTCDemo-debug.apk'],
'action': ['bash', '-ec',
'rm -f <(_outputs) && '
'mkdir -p <(android_webrtc_demo_root)/libs/<(android_app_abi) && '
'<(android_strip) -o <(android_webrtc_demo_root)/libs/<(android_app_abi)/libwebrtc-video-demo-jni.so <(PRODUCT_DIR)/libwebrtc-video-demo-jni.so && '
'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
'cp <(PRODUCT_DIR)/lib.java/video_capture_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
'cp <(PRODUCT_DIR)/lib.java/video_render_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
'cd <(android_webrtc_demo_root) && '
'ant debug && '
'cd - && '
'cp <(android_webrtc_demo_root)/bin/WebRTCDemo-debug.apk <(_outputs)'
],
},
],
},
],
}

View File

@ -1,92 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="WebRTCDemo" default="help">
<!-- The local.properties file is created and updated by the 'android' tool.
It contains the path to the SDK. It should *NOT* be checked into
Version Control Systems. -->
<property file="local.properties" />
<!-- The ant.properties file can be created by you. It is only edited by the
'android' tool to add properties to it.
This is the place to change some Ant specific build properties.
Here are some properties you may want to change/update:
source.dir
The name of the source directory. Default is 'src'.
out.dir
The name of the output directory. Default is 'bin'.
For other overridable properties, look at the beginning of the rules
files in the SDK, at tools/ant/build.xml
Properties related to the SDK location or the project target should
be updated using the 'android' tool with the 'update' action.
This file is an integral part of the build system for your
application and should be checked into Version Control Systems.
-->
<property file="ant.properties" />
<!-- if sdk.dir was not set from one of the property file, then
get it from the ANDROID_SDK_ROOT env var.
This must be done before we load project.properties since
the proguard config can use sdk.dir -->
<property environment="env" />
<condition property="sdk.dir" value="${env.ANDROID_SDK_ROOT}">
<isset property="env.ANDROID_SDK_ROOT" />
</condition>
<!-- The project.properties file is created and updated by the 'android'
tool, as well as ADT.
This contains project specific properties such as project target, and library
dependencies. Lower level build properties are stored in ant.properties
(or in .classpath for Eclipse projects).
This file is an integral part of the build system for your
application and should be checked into Version Control Systems. -->
<loadproperties srcFile="project.properties" />
<!-- quick check on sdk.dir -->
<fail
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_SDK_ROOT environment variable."
unless="sdk.dir"
/>
<!--
Import per project custom build rules if present at the root of the project.
This is the place to put custom intermediary targets such as:
-pre-build
-pre-compile
-post-compile (This is typically used for code obfuscation.
Compiled code location: ${out.classes.absolute.dir}
If this is not done in place, override ${out.dex.input.absolute.dir})
-post-package
-post-build
-pre-clean
-->
<import file="custom_rules.xml" optional="true" />
<!-- Import the actual build file.
To customize existing targets, there are two options:
- Customize only one target:
- copy/paste the target into this file, *before* the
<import> task.
- customize it to your needs.
- Customize the whole content of build.xml
- copy/paste the content of the rules files (minus the top node)
into this file, replacing the <import> task.
- customize to your needs.
***********************
****** IMPORTANT ******
***********************
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
in order to avoid having your file be overridden by tools such as "android update project"
-->
<!-- version-tag: 1 -->
<import file="${sdk.dir}/tools/ant/build.xml" />
</project>

View File

@ -1,126 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <android/log.h>
#define LOG_TAG "AndroidMediaCodecDecoder"
#include <jni.h>
#include "webrtc/video_engine/test/android/jni/android_media_codec_decoder.h"
namespace webrtc {
AndroidMediaCodecDecoder::AndroidMediaCodecDecoder(
JavaVM* vm, jobject surface, jclass decoderClass)
: vm_(vm),
surface_(NULL),
mediaCodecDecoder_(NULL),
decoderClass_(NULL),
env_(NULL),
setEncodedImageID_(NULL),
vm_attached_(false) {
Initialize(vm, surface, decoderClass);
}
AndroidMediaCodecDecoder::~AndroidMediaCodecDecoder() {
env_->DeleteGlobalRef(decoderClass_);
env_->DeleteGlobalRef(surface_);
}
void AndroidMediaCodecDecoder::Initialize(
JavaVM* vm, jobject surface, jclass decoderClass) {
int ret = vm->GetEnv(reinterpret_cast<void**>(&env_), JNI_VERSION_1_4);
if ((ret < 0) || !env_) {
__android_log_print(ANDROID_LOG_ERROR, LOG_TAG,
"Could not get JNI env (%d, %p)", ret, env_);
assert(false);
}
surface_ = env_->NewGlobalRef(surface);
decoderClass_ = reinterpret_cast<jclass>(env_->NewGlobalRef(decoderClass));
}
int32_t AndroidMediaCodecDecoder::InitDecode(
const VideoCodec* codecSettings, int32_t numberOfCores) {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
// TODO(dwkang): Detach this thread from VM. => this leads to a crash on
// "StopCall".
int ret = vm_->AttachCurrentThread(&env_, NULL);
// Get the JNI env for this thread
if ((ret < 0) || !env_) {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG,
"Could not attach thread to JVM (%d, %p)", ret,
env_);
return WEBRTC_VIDEO_CODEC_ERROR;
} else {
vm_attached_ = true;
}
// Initialize the media codec java decoder class.
jmethodID mid = env_->GetMethodID(decoderClass_, "<init>", "()V");
mediaCodecDecoder_ = env_->NewGlobalRef(env_->NewObject(decoderClass_, mid));
mid = env_->GetMethodID(
decoderClass_, "configure", "(Landroid/view/SurfaceView;II)Z");
bool success = env_->CallBooleanMethod(
mediaCodecDecoder_, mid, surface_, codecSettings->width,
codecSettings->height);
if (!success) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
setEncodedImageID_ = env_->GetMethodID(
decoderClass_, "setEncodedImage", "(Ljava/nio/ByteBuffer;J)V");
// Call start()
jmethodID startID = env_->GetMethodID(decoderClass_, "start", "()V");
env_->CallVoidMethod(mediaCodecDecoder_, startID);
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t AndroidMediaCodecDecoder::Decode(
const EncodedImage& inputImage,
bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo,
int64_t renderTimeMs) {
if (!vm_attached_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
jobject byteBuffer =
env_->NewDirectByteBuffer(inputImage._buffer, inputImage._length);
env_->CallVoidMethod(
mediaCodecDecoder_, setEncodedImageID_, byteBuffer, renderTimeMs);
env_->DeleteLocalRef(byteBuffer);
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
int32_t AndroidMediaCodecDecoder::RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t AndroidMediaCodecDecoder::Release() {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
env_->DeleteGlobalRef(mediaCodecDecoder_);
mediaCodecDecoder_ = NULL;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t AndroidMediaCodecDecoder::Reset() {
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
return WEBRTC_VIDEO_CODEC_OK;
}
} // namespace webrtc

View File

@ -1,108 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
#define WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
namespace webrtc {
class AndroidMediaCodecDecoder : public VideoDecoder {
public:
AndroidMediaCodecDecoder(JavaVM* vm, jobject surface, jclass decoderClass);
virtual ~AndroidMediaCodecDecoder();
// Initialize the decoder with the information from the VideoCodec.
//
// Input:
// - inst : Codec settings
// - numberOfCores : Number of cores available for the decoder
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t InitDecode(
const VideoCodec* codecSettings, int32_t numberOfCores);
// Decode encoded image (as a part of a video stream). The decoded image
// will be returned to the user through the decode complete callback.
//
// Input:
// - inputImage : Encoded image to be decoded
// - missingFrames : True if one or more frames have been lost
// since the previous decode call.
// - fragmentation : Specifies where the encoded frame can be
// split into separate fragments. The meaning
// of fragment is codec specific, but often
// means that each fragment is decodable by
// itself.
// - codecSpecificInfo : Pointer to codec specific data
// - renderTimeMs : System time to render in milliseconds. Only
// used by decoders with internal rendering.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t
Decode(const EncodedImage& inputImage,
bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo = NULL,
int64_t renderTimeMs = -1);
// Register an decode complete callback object.
//
// Input:
// - callback : Callback object which handles decoded images.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t RegisterDecodeCompleteCallback(
DecodedImageCallback* callback);
// Free decoder memory.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t Release();
// Reset decoder state and prepare for a new call.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t Reset();
// Codec configuration data sent out-of-band, i.e. in SIP call setup
//
// Input/Output:
// - buffer : Buffer pointer to the configuration data
// - size : The size of the configuration data in
// bytes
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t SetCodecConfigParameters(
const uint8_t* /*buffer*/, int32_t /*size*/) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
// Create a copy of the codec and its internal state.
//
// Return value : A copy of the instance if OK, NULL otherwise.
virtual VideoDecoder* Copy() { return NULL; }
private:
void Initialize(JavaVM* vm, jobject surface, jclass decoderClass);
JavaVM* vm_;
jobject surface_;
jobject mediaCodecDecoder_;
jclass decoderClass_;
JNIEnv* env_;
jmethodID setEncodedImageID_;
bool vm_attached_;
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_

View File

@ -1,495 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_webrtc_videoengineapp_ViEAndroidJavaAPI */
#ifndef _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
#define _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: NativeInit
* Signature: (Landroid/content/Context;)Z
*/
JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit
(JNIEnv *, jobject, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: GetVideoEngine
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: Init
* Signature: (Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init
(JNIEnv *, jobject, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: Terminate
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StartSend
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StopRender
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StopSend
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StartReceive
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StopReceive
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: CreateChannel
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: SetLocalReceiver
* Signature: (II)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver
(JNIEnv *, jobject, jint, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: SetSendDestination
* Signature: (IILjava/lang/String;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination
(JNIEnv *, jobject, jint, jint, jstring);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: GetCodecs
* Signature: ()[Ljava/lang/String;
*/
JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: SetReceiveCodec
* Signature: (IIIIII)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec
(JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: SetSendCodec
* Signature: (IIIIII)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec
(JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: AddRemoteRenderer
* Signature: (ILjava/lang/Object;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer
(JNIEnv *, jobject, jint, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: RemoveRemoteRenderer
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StartRender
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StartCamera
* Signature: (II)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera
(JNIEnv *, jobject, jint, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StopCamera
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: GetCameraOrientation
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: SetRotation
* Signature: (II)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation
(JNIEnv *, jobject, jint, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: SetExternalMediaCodecDecoderRenderer
* Signature: (ILjava/lang/Object;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer
(JNIEnv *, jobject, jint, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: EnableNACK
* Signature: (IZ)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK
(JNIEnv *, jobject, jint, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: EnablePLI
* Signature: (IZ)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI
(JNIEnv *, jobject, jint, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: SetCallback
* Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback
(JNIEnv *, jobject, jint, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StartIncomingRTPDump
* Signature: (ILjava/lang/String;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump
(JNIEnv *, jobject, jint, jstring);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: StopIncomingRTPDump
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_Create
* Signature: (Landroid/content/Context;)Z
*/
JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create
(JNIEnv *, jobject, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_Delete
* Signature: ()Z
*/
JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_Init
* Signature: (Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init
(JNIEnv *, jobject, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_Terminate
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_CreateChannel
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_DeleteChannel
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: ViE_DeleteChannel
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_ViE_1DeleteChannel
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetLocalReceiver
* Signature: (II)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver
(JNIEnv *, jobject, jint, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetSendDestination
* Signature: (IILjava/lang/String;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination
(JNIEnv *, jobject, jint, jint, jstring);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StartListen
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StartPlayout
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StartSend
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StopListen
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StopPlayout
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StopSend
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetSpeakerVolume
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetLoudspeakerStatus
* Signature: (Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus
(JNIEnv *, jobject, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StartPlayingFileLocally
* Signature: (ILjava/lang/String;Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally
(JNIEnv *, jobject, jint, jstring, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StopPlayingFileLocally
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StartPlayingFileAsMicrophone
* Signature: (ILjava/lang/String;Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone
(JNIEnv *, jobject, jint, jstring, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StopPlayingFileAsMicrophone
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone
(JNIEnv *, jobject, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_NumOfCodecs
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_GetCodecs
* Signature: ()[Ljava/lang/String;
*/
JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetSendCodec
* Signature: (II)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec
(JNIEnv *, jobject, jint, jint);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetECStatus
* Signature: (Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus
(JNIEnv *, jobject, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetAGCStatus
* Signature: (Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus
(JNIEnv *, jobject, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_SetNSStatus
* Signature: (Z)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus
(JNIEnv *, jobject, jboolean);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StartDebugRecording
* Signature: (Ljava/lang/String;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording
(JNIEnv *, jobject, jstring);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StopDebugRecording
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording
(JNIEnv *, jobject);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StartIncomingRTPDump
* Signature: (ILjava/lang/String;)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump
(JNIEnv *, jobject, jint, jstring);
/*
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
* Method: VoE_StopIncomingRTPDump
* Signature: (I)I
*/
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopIncomingRTPDump
(JNIEnv *, jobject, jint);
#ifdef __cplusplus
}
#endif
#endif

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.5 KiB

View File

@ -1,87 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout android:layout_width="fill_parent"
android:layout_height="fill_parent" android:orientation="vertical"
xmlns:android="http://schemas.android.com/apk/res/android">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textStyle="bold"
android:textSize="24dip"
android:text="Audio Settings"></TextView>
<TextView android:id="@+id/TextView03"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</TextView>
<TextView android:id="@+id/TextView01"
android:layout_height="wrap_content"
android:layout_gravity="bottom"
android:layout_width="wrap_content"
android:text="@string/codecType">
</TextView>
<Spinner android:layout_height="wrap_content"
android:layout_width="fill_parent"
android:id="@+id/spVoiceCodecType">
</Spinner>
<LinearLayout android:id="@+id/LinearLayout02"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/aTxPort">
</TextView>
<EditText android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:id="@+id/etATxPort">
</EditText>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/aRxPort">
</TextView>
<EditText android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:id="@+id/etARxPort" >
</EditText>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout02"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbAECM"
android:text="@string/AECM">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbNoiseSuppression"
android:text="@string/NoiseSuppression">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbAutoGainControl"
android:text="@string/AutoGainControl">
</CheckBox>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout02"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbSpeaker"
android:text="@string/speaker">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbDebugRecording"
android:text="@string/debugrecording">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbVoiceRTPDump"
android:text="@string/rtpdump">
</CheckBox>
</LinearLayout>
</LinearLayout>

View File

@ -1,43 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<LinearLayout
android:orientation="vertical"
android:layout_width="120dip"
android:layout_height="fill_parent">
<LinearLayout android:id="@+id/llLocalView"
android:layout_width="fill_parent"
android:layout_height="80dip">
</LinearLayout>
<TextView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1"
android:text="" />
<Button android:id="@+id/btSwitchCamera"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/backCamera"
android:layout_gravity="bottom"/>
<Button android:id="@+id/btStartStopCall"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/startCall"
android:layout_gravity="bottom"/>
<Button android:id="@+id/btExit"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:layout_gravity="bottom"
android:text="@string/exit"/>
</LinearLayout>
<LinearLayout
android:id="@+id/llRemoteView"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:layout_weight="1">
</LinearLayout>
</LinearLayout >

View File

@ -1,94 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout android:layout_width="fill_parent"
android:layout_height="fill_parent" android:layout_gravity="right"
android:orientation="vertical"
xmlns:android="http://schemas.android.com/apk/res/android">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/tvTitle"
android:textStyle="bold"
android:textSize="24dip"
android:text="@string/gSettings">
</TextView>
<TextView android:id="@+id/TextView03"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</TextView>
<LinearLayout android:orientation="horizontal"
android:id="@+id/LinearLayout02"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbVideoReceive"
android:text="@string/enableVideoReceive">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbVideoSend"
android:text="@string/enableVideoSend">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbVoice"
android:text="@string/enableVoice">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbLoopback"
android:text="@string/loopback">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbStats"
android:text="@string/stats">
</CheckBox>
</LinearLayout>
<TextView android:id="@+id/TextView02"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/remoteIp">
</TextView>
<EditText android:layout_height="wrap_content"
android:layout_width="fill_parent"
android:id="@+id/etRemoteIp" >
</EditText>
<LinearLayout android:orientation="horizontal"
android:id="@+id/LinearLayout03"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<RadioGroup
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:id="@+id/radio_group1">
<RadioButton
android:id="@+id/radio_opengl"
android:onClick="onClick"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/opengl"
android:checked="true"
android:textColor="#fff"/>
<RadioButton
android:id="@+id/radio_surface"
android:onClick="onClick"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/surfaceview"
android:textColor="#fff" />
<RadioButton
android:id="@+id/radio_mediacodec"
android:onClick="onClick"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/mediacodec"
android:textColor="#fff" />
</RadioGroup>
</LinearLayout>
</LinearLayout>

View File

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:padding="3dip">
<TextView
android:layout_toRightOf="@+id/image"
android:padding="3dip"
android:layout_marginTop="2dip"
android:textColor="#000"
android:textStyle="bold"
android:id="@+id/spinner_row"
android:text="description"
android:layout_marginLeft="5dip"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
</RelativeLayout>

View File

@ -1,17 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<SurfaceView android:id="@+id/svLocal"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</SurfaceView>
<ImageView android:id="@+id/ivPreview"
android:layout_height="fill_parent"
android:layout_width="fill_parent">
</ImageView>
</LinearLayout>

View File

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<TabHost xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@android:id/tabhost"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<LinearLayout
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<TabWidget
android:id="@android:id/tabs"
android:layout_width="fill_parent"
android:layout_height="wrap_content" />
<FrameLayout
android:id="@android:id/tabcontent"
android:layout_width="fill_parent"
android:layout_height="fill_parent">
<include android:id="@+id/tab_video" layout="@layout/both" />
<include android:id="@+id/tab_config" layout="@layout/main" />
<include android:id="@+id/tab_vconfig" layout="@layout/vconfig" />
<include android:id="@+id/tab_aconfig" layout="@layout/aconfig" />
</FrameLayout>
</LinearLayout>
</TabHost>

View File

@ -1,73 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout android:layout_width="fill_parent"
android:layout_height="fill_parent" android:orientation="vertical"
xmlns:android="http://schemas.android.com/apk/res/android">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:textStyle="bold"
android:textSize="24dip"
android:text="@string/vSettings">
</TextView>
<TextView android:id="@+id/TextView03"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
</TextView>
<TextView android:id="@+id/TextView01"
android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:text="@string/codecType">
</TextView>
<Spinner android:layout_height="wrap_content"
android:layout_width="fill_parent"
android:id="@+id/spCodecType">
</Spinner>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/tvCodecSize"
android:text="@string/codecSize">
</TextView>
<Spinner android:layout_height="wrap_content"
android:layout_width="fill_parent"
android:id="@+id/spCodecSize">
</Spinner>
<LinearLayout android:id="@+id/LinearLayout02"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/vTxPort">
</TextView>
<EditText android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:id="@+id/etVTxPort" >
</EditText>
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/vRxPort">
</TextView>
<EditText android:layout_height="wrap_content"
android:layout_width="wrap_content"
android:id="@+id/etVRxPort" >
</EditText>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout03"
android:layout_height="wrap_content"
android:layout_width="fill_parent">
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbNack"
android:text="@string/nack">
</CheckBox>
<CheckBox android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/cbVideoRTPDump"
android:text="@string/rtpdump">
</CheckBox>
</LinearLayout>
</LinearLayout>

View File

@ -1,41 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">WebRTC</string>
<string name="error">Error</string>
<string name="errorCamera">Camera Error</string>
<string name="codectype_prompt">Choose a codec type</string>
<string name="demoTitle">Video Engine Android Demo</string>
<string name="codecType">Codec Type</string>
<string name="codecSize">Codec Size</string>
<string name="remoteIp">Remote IP address</string>
<string name="loopback">Loopback</string>
<string name="stats">Stats</string>
<string name="startListen">Start Listen</string>
<string name="startSend">Start Send</string>
<string name="startBoth">Start Both</string>
<string name="enableVoice">Voice</string>
<string name="enableVideoReceive">Video Receive</string>
<string name="enableVideoSend">Video Send</string>
<string name="gSettings">Global Settings</string>
<string name="vSettings">Video Settings</string>
<string name="vTxPort">Video Tx Port</string>
<string name="vRxPort">Video Rx Port</string>
<string name="aTxPort">Audio Tx Port</string>
<string name="aRxPort">Audio Rx Port</string>
<string name="AutoGainControl">AGC</string>
<string name="VoiceActivityDetection">VAD</string>
<string name="AECM">AECM</string>
<string name="NoiseSuppression">NS</string>
<string name="nack">NACK</string>
<string name="frontCamera">SwitchToFront</string>
<string name="backCamera">SwitchToBack</string>
<string name="startCall">StartCall</string>
<string name="stopCall">StopCall</string>
<string name="exit">Exit</string>
<string name="speaker">Speaker</string>
<string name="debugrecording">APMRecord</string>
<string name="rtpdump">rtpdump</string>
<string name="surfaceview">SurfaceView</string>
<string name="mediacodec">MediaCodec Decoder/Renderer</string>
<string name="opengl">OpenGL</string>
</resources>

View File

@ -1,417 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengine;
import android.media.MediaCodec;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceView;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.LinkedList;
class CodecState {
private static final String TAG = "CodecState";
private ViEMediaCodecDecoder mView;
private MediaFormat mFormat;
private boolean mSawInputEOS, mSawOutputEOS;
private MediaCodec mCodec;
private MediaFormat mOutputFormat;
private ByteBuffer[] mCodecInputBuffers;
private ByteBuffer[] mCodecOutputBuffers;
private LinkedList<Integer> mAvailableInputBufferIndices;
private LinkedList<Integer> mAvailableOutputBufferIndices;
private LinkedList<MediaCodec.BufferInfo> mAvailableOutputBufferInfos;
private long mLastMediaTimeUs;
public CodecState(
ViEMediaCodecDecoder view,
MediaFormat format,
MediaCodec codec) {
mView = view;
mFormat = format;
mSawInputEOS = mSawOutputEOS = false;
mCodec = codec;
mCodec.start();
mCodecInputBuffers = mCodec.getInputBuffers();
mCodecOutputBuffers = mCodec.getOutputBuffers();
mAvailableInputBufferIndices = new LinkedList<Integer>();
mAvailableOutputBufferIndices = new LinkedList<Integer>();
mAvailableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
mLastMediaTimeUs = 0;
}
public void release() {
mCodec.stop();
mCodecInputBuffers = null;
mCodecOutputBuffers = null;
mOutputFormat = null;
mAvailableOutputBufferInfos = null;
mAvailableOutputBufferIndices = null;
mAvailableInputBufferIndices = null;
mCodec.release();
mCodec = null;
}
public void start() {
}
public void pause() {
}
public long getCurrentPositionUs() {
return mLastMediaTimeUs;
}
public void flush() {
mAvailableInputBufferIndices.clear();
mAvailableOutputBufferIndices.clear();
mAvailableOutputBufferInfos.clear();
mSawInputEOS = false;
mSawOutputEOS = false;
mCodec.flush();
}
public void doSomeWork() {
int index = mCodec.dequeueInputBuffer(0 /* timeoutUs */);
if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
mAvailableInputBufferIndices.add(new Integer(index));
}
while (feedInputBuffer()) {}
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
index = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mOutputFormat = mCodec.getOutputFormat();
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mCodecOutputBuffers = mCodec.getOutputBuffers();
} else if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
mAvailableOutputBufferIndices.add(new Integer(index));
mAvailableOutputBufferInfos.add(info);
}
while (drainOutputBuffer()) {}
}
/** returns true if more input data could be fed */
private boolean feedInputBuffer() {
if (mSawInputEOS || mAvailableInputBufferIndices.isEmpty()) {
return false;
}
int index = mAvailableInputBufferIndices.peekFirst().intValue();
ByteBuffer codecData = mCodecInputBuffers[index];
if (mView.hasFrame()) {
Frame frame = mView.dequeueFrame();
ByteBuffer buffer = frame.mBuffer;
if (buffer == null) {
return false;
}
if (codecData.capacity() < buffer.capacity()) {
Log.e(TAG, "Buffer is too small to copy a frame.");
// TODO(dwkang): split the frame into the multiple buffer.
}
buffer.rewind();
codecData.rewind();
codecData.put(buffer);
codecData.rewind();
try {
mCodec.queueInputBuffer(
index, 0 /* offset */, buffer.capacity(), frame.mTimeStampUs,
0 /* flags */);
mAvailableInputBufferIndices.removeFirst();
} catch (MediaCodec.CryptoException e) {
Log.d(TAG, "CryptoException w/ errorCode "
+ e.getErrorCode() + ", '" + e.getMessage() + "'");
}
return true;
}
return false;
}
/** returns true if more output data could be drained */
private boolean drainOutputBuffer() {
if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()) {
return false;
}
int index = mAvailableOutputBufferIndices.peekFirst().intValue();
MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst();
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "saw output EOS.");
mSawOutputEOS = true;
return false;
}
long realTimeUs =
mView.getRealTimeUsForMediaTime(info.presentationTimeUs);
long nowUs = System.currentTimeMillis() * 1000;
long lateUs = nowUs - realTimeUs;
// video
boolean render;
// TODO(dwkang): For some extreme cases, just not doing rendering is not enough.
// Need to seek to the next key frame.
if (lateUs < -10000) {
// too early;
return false;
} else if (lateUs > 30000) {
Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
render = false;
} else {
render = true;
mLastMediaTimeUs = info.presentationTimeUs;
}
MediaFormat format= mCodec.getOutputFormat();
Log.d(TAG, "Video output format :" + format.getInteger(MediaFormat.KEY_COLOR_FORMAT));
mCodec.releaseOutputBuffer(index, render);
mAvailableOutputBufferIndices.removeFirst();
mAvailableOutputBufferInfos.removeFirst();
return true;
}
}
class Frame {
public ByteBuffer mBuffer;
public long mTimeStampUs;
Frame(ByteBuffer buffer, long timeStampUs) {
mBuffer = buffer;
mTimeStampUs = timeStampUs;
}
}
class ViEMediaCodecDecoder {
private static final String TAG = "ViEMediaCodecDecoder";
private MediaExtractor mExtractor;
private CodecState mCodecState;
private int mState;
private static final int STATE_IDLE = 1;
private static final int STATE_PREPARING = 2;
private static final int STATE_PLAYING = 3;
private static final int STATE_PAUSED = 4;
private Handler mHandler;
private static final int EVENT_PREPARE = 1;
private static final int EVENT_DO_SOME_WORK = 2;
private long mDeltaTimeUs;
private long mDurationUs;
private SurfaceView mSurfaceView;
private LinkedList<Frame> mFrameQueue = new LinkedList<Frame>();
private Thread mLooperThread;
public boolean configure(SurfaceView surfaceView, int width, int height) {
mSurfaceView = surfaceView;
Log.d(TAG, "configure " + "width" + width + "height" + height + mSurfaceView.toString());
MediaFormat format = new MediaFormat();
format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
format.setInteger(MediaFormat.KEY_WIDTH, width);
format.setInteger(MediaFormat.KEY_HEIGHT, height);
Surface surface = mSurfaceView.getHolder().getSurface();
Log.d(TAG, "Surface " + surface.isValid());
MediaCodec codec;
try {
codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
// SW VP8 decoder
// codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
// Nexus10 HW VP8 decoder
// codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
} catch (Exception e) {
// TODO(dwkang): replace this instanceof/throw with a narrower catch clause
// once the SDK advances.
if (e instanceof IOException) {
Log.e(TAG, "Failed to create MediaCodec for VP8.", e);
return false;
}
throw new RuntimeException(e);
}
codec.configure(format, surface, null, 0);
mCodecState = new CodecState(this, format, codec);
initMediaCodecView();
return true;
}
public void setEncodedImage(ByteBuffer buffer, long renderTimeMs) {
// TODO(dwkang): figure out why exceptions just make this thread finish.
try {
final long renderTimeUs = renderTimeMs * 1000;
ByteBuffer buf = ByteBuffer.allocate(buffer.capacity());
buf.put(buffer);
buf.rewind();
synchronized(mFrameQueue) {
mFrameQueue.add(new Frame(buf, renderTimeUs));
}
} catch (Exception e) {
e.printStackTrace();
}
}
public boolean hasFrame() {
synchronized(mFrameQueue) {
return !mFrameQueue.isEmpty();
}
}
public Frame dequeueFrame() {
synchronized(mFrameQueue) {
return mFrameQueue.removeFirst();
}
}
private void initMediaCodecView() {
Log.d(TAG, "initMediaCodecView");
mState = STATE_IDLE;
mLooperThread = new Thread()
{
@Override
public void run() {
Log.d(TAG, "Looper prepare");
Looper.prepare();
mHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
// TODO(dwkang): figure out exceptions just make this thread finish.
try {
switch (msg.what) {
case EVENT_PREPARE:
{
mState = STATE_PAUSED;
ViEMediaCodecDecoder.this.start();
break;
}
case EVENT_DO_SOME_WORK:
{
ViEMediaCodecDecoder.this.doSomeWork();
mHandler.sendMessageDelayed(
mHandler.obtainMessage(EVENT_DO_SOME_WORK), 5);
break;
}
default:
break;
}
} catch (Exception e) {
e.printStackTrace();
}
}
};
Log.d(TAG, "Looper loop");
synchronized(ViEMediaCodecDecoder.this) {
ViEMediaCodecDecoder.this.notify();
}
Looper.loop();
}
};
mLooperThread.start();
// Wait until handler is set up.
synchronized(ViEMediaCodecDecoder.this) {
try {
ViEMediaCodecDecoder.this.wait(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Log.d(TAG, "initMediaCodecView end");
}
public void start() {
Log.d(TAG, "start");
if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
return;
} else if (mState == STATE_IDLE) {
mState = STATE_PREPARING;
Log.d(TAG, "Sending EVENT_PREPARE");
mHandler.sendMessage(mHandler.obtainMessage(EVENT_PREPARE));
return;
} else if (mState != STATE_PAUSED) {
throw new IllegalStateException();
}
mCodecState.start();
mHandler.sendMessage(mHandler.obtainMessage(EVENT_DO_SOME_WORK));
mDeltaTimeUs = -1;
mState = STATE_PLAYING;
Log.d(TAG, "start end");
}
public void reset() {
if (mState == STATE_PLAYING) {
mCodecState.pause();
}
mCodecState.release();
mDurationUs = -1;
mState = STATE_IDLE;
}
private void doSomeWork() {
mCodecState.doSomeWork();
}
public long getRealTimeUsForMediaTime(long mediaTimeUs) {
if (mDeltaTimeUs == -1) {
long nowUs = System.currentTimeMillis() * 1000;
mDeltaTimeUs = nowUs - mediaTimeUs;
}
return mDeltaTimeUs + mediaTimeUs;
}
}

View File

@ -1,151 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
package org.webrtc.videoengineapp;
import android.app.Activity;
import android.content.Context;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public class ViEAndroidJavaAPI {
public ViEAndroidJavaAPI(Context context) {
Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI...");
System.loadLibrary("webrtc-video-demo-jni");
Log.d("*WEBRTCJ*", "Calling native init...");
if (!NativeInit(context)) {
Log.e("*WEBRTCJ*", "Native init failed");
throw new RuntimeException("Native init failed");
}
else {
Log.d("*WEBRTCJ*", "Native init successful");
}
String a = "";
a.getBytes();
}
// API Native
private native boolean NativeInit(Context context);
// Video Engine API
// Initialization and Termination functions
public native int GetVideoEngine();
public native int Init(boolean enableTrace);
public native int Terminate();
public native int StartSend(int channel);
public native int StopRender(int channel);
public native int StopSend(int channel);
public native int StartReceive(int channel);
public native int StopReceive(int channel);
// Channel functions
public native int CreateChannel(int voiceChannel);
// Receiver & Destination functions
public native int SetLocalReceiver(int channel, int port);
public native int SetSendDestination(int channel, int port, String ipaddr);
// Codec
public native String[] GetCodecs();
public native int SetReceiveCodec(int channel, int codecNum,
int intbitRate, int width,
int height, int frameRate);
public native int SetSendCodec(int channel, int codecNum,
int intbitRate, int width,
int height, int frameRate);
// Rendering
public native int AddRemoteRenderer(int channel, Object glSurface);
public native int RemoveRemoteRenderer(int channel);
public native int StartRender(int channel);
// Capture
public native int StartCamera(int channel, int cameraNum);
public native int StopCamera(int cameraId);
public native int GetCameraOrientation(int cameraNum);
public native int SetRotation(int cameraId,int degrees);
// External Codec
public native int SetExternalMediaCodecDecoderRenderer(
int channel, Object glSurface);
// NACK
public native int EnableNACK(int channel, boolean enable);
// PLI
public native int EnablePLI(int channel, boolean enable);
// Enable stats callback
public native int SetCallback(int channel, IViEAndroidCallback callback);
public native int StartIncomingRTPDump(int channel, String file);
public native int StopIncomingRTPDump(int channel);
// Voice Engine API
// Create and Delete functions
public native boolean VoE_Create(Context context);
public native boolean VoE_Delete();
// Initialization and Termination functions
public native int VoE_Init(boolean enableTrace);
public native int VoE_Terminate();
// Channel functions
public native int VoE_CreateChannel();
public native int VoE_DeleteChannel(int channel);
public native int ViE_DeleteChannel(int channel);
// Receiver & Destination functions
public native int VoE_SetLocalReceiver(int channel, int port);
public native int VoE_SetSendDestination(int channel, int port,
String ipaddr);
// Media functions
public native int VoE_StartListen(int channel);
public native int VoE_StartPlayout(int channel);
public native int VoE_StartSend(int channel);
public native int VoE_StopListen(int channel);
public native int VoE_StopPlayout(int channel);
public native int VoE_StopSend(int channel);
// Volume
public native int VoE_SetSpeakerVolume(int volume);
// Hardware
public native int VoE_SetLoudspeakerStatus(boolean enable);
// Playout file locally
public native int VoE_StartPlayingFileLocally(
int channel,
String fileName,
boolean loop);
public native int VoE_StopPlayingFileLocally(int channel);
// Play file as microphone
public native int VoE_StartPlayingFileAsMicrophone(
int channel,
String fileName,
boolean loop);
public native int VoE_StopPlayingFileAsMicrophone(int channel);
// Codec-setting functions
public native int VoE_NumOfCodecs();
public native String[] VoE_GetCodecs();
public native int VoE_SetSendCodec(int channel, int index);
//VoiceEngine funtions
public native int VoE_SetECStatus(boolean enable);
public native int VoE_SetAGCStatus(boolean enable);
public native int VoE_SetNSStatus(boolean enable);
public native int VoE_StartDebugRecording(String file);
public native int VoE_StopDebugRecording();
public native int VoE_StartIncomingRTPDump(int channel, String file);
public native int VoE_StopIncomingRTPDump(int channel);
}

View File

@ -18,13 +18,6 @@
'test/libvietest/libvietest.gypi',
'test/auto_test/vie_auto_test.gypi',
],
'conditions': [
['OS=="android"', {
'includes': [
'test/android/android_video_demo.gypi',
],
}],
],
}],
],
}

View File

@ -0,0 +1,85 @@
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': ['build/common.gypi'],
'conditions': [
['OS=="android"', {
'targets': [
{
'target_name': 'libwebrtcdemo-jni',
'type': 'loadable_module',
'dependencies': [
'<(DEPTH)/third_party/icu/icu.gyp:icuuc',
'<(webrtc_root)/modules/modules.gyp:*',
'<(webrtc_root)/test/test.gyp:channel_transport',
'<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core',
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine',
],
'sources': [
'examples/android/media_demo/jni/jni_helpers.cc',
'examples/android/media_demo/jni/on_load.cc',
'examples/android/media_demo/jni/video_engine_jni.cc',
'examples/android/media_demo/jni/voice_engine_jni.cc',
'examples/android/media_demo/jni/media_codec_video_decoder.cc',
],
'link_settings': {
'libraries': [
'-llog',
'-lGLESv2',
'-lOpenSLES',
],
}
},
{
'target_name': 'WebRTCDemo',
'type': 'none',
'dependencies': [
'libwebrtcdemo-jni',
'<(modules_java_gyp_path):*',
],
'actions': [
{
# TODO(yujie.mao): Convert building of the demo to a proper GYP
# target so this action is not needed once chromium's
# apk-building machinery can be used. (crbug.com/225101)
'action_name': 'build_webrtcdemo_apk',
'variables': {
'android_webrtc_demo_root': '<(webrtc_root)/examples/android/media_demo',
},
'inputs' : [
'<(PRODUCT_DIR)/lib.java/audio_device_module_java.jar',
'<(PRODUCT_DIR)/lib.java/video_capture_module_java.jar',
'<(PRODUCT_DIR)/lib.java/video_render_module_java.jar',
'<(PRODUCT_DIR)/libwebrtcdemo-jni.so',
'<!@(find <(android_webrtc_demo_root)/src -name "*.java")',
'<!@(find <(android_webrtc_demo_root)/res -type f)',
'<(android_webrtc_demo_root)/AndroidManifest.xml',
'<(android_webrtc_demo_root)/build.xml',
'<(android_webrtc_demo_root)/project.properties',
],
'outputs': ['<(PRODUCT_DIR)/WebRTCDemo-debug.apk'],
'action': ['bash', '-ec',
'rm -fr <(_outputs) <(android_webrtc_demo_root)/{bin,libs} && '
'mkdir -p <(android_webrtc_demo_root)/libs/<(android_app_abi) && '
'cp <(PRODUCT_DIR)/lib.java/audio_device_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
'cp <(PRODUCT_DIR)/lib.java/video_capture_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
'cp <(PRODUCT_DIR)/lib.java/video_render_module_java.jar <(android_webrtc_demo_root)/libs/ &&'
'<(android_strip) -o <(android_webrtc_demo_root)/libs/<(android_app_abi)/libwebrtcdemo-jni.so <(PRODUCT_DIR)/libwebrtcdemo-jni.so && '
'cd <(android_webrtc_demo_root) && '
'ant debug && '
'cd - && '
'cp <(android_webrtc_demo_root)/bin/WebRTCDemo-debug.apk <(_outputs)'
],
},
],
},
],
}],
],
}