First pass of MediaCodecDecoder which uses Android MediaCodec API.
Background: As of now, MediaCodec API is the only public interface which enables us to access low level HW resource in Android. ViEMediaCodecDecoder will be used for further experiments/exploration. TODO: To fix known issues. (detaching thread from VM and frequent GC) Review URL: https://webrtc-codereview.appspot.com/933033 git-svn-id: http://webrtc.googlecode.com/svn/trunk@3233 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@@ -112,7 +112,7 @@
|
||||
#define WEBRTC_VIDEO_ENGINE_NETWORK_API
|
||||
#define WEBRTC_VIDEO_ENGINE_RENDER_API
|
||||
#define WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
|
||||
// #define WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
|
||||
#define WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
|
||||
|
||||
// Now handled by gyp:
|
||||
// WEBRTC_VIDEO_ENGINE_FILE_API
|
||||
|
@@ -221,7 +221,7 @@ Notice for all the files in this folder.
|
||||
<property name="sdk.dir" location="${env.ANDROID_SDK_ROOT}"/>
|
||||
<property name="sdk.version" value="${env.ANDROID_SDK_VERSION}"/>
|
||||
|
||||
<property name="target" value="android-10"/>
|
||||
<property name="target" value="android-16"/>
|
||||
|
||||
<!-- ******************************************************* -->
|
||||
<!-- **************** Overridable Properties *************** -->
|
||||
|
@@ -288,7 +288,9 @@ include $(CLEAR_VARS)
|
||||
LOCAL_MODULE_TAGS := tests
|
||||
LOCAL_MODULE := libwebrtc-video-demo-jni
|
||||
LOCAL_CPP_EXTENSION := .cc
|
||||
LOCAL_SRC_FILES := vie_android_java_api.cc
|
||||
LOCAL_SRC_FILES := \
|
||||
vie_android_java_api.cc \
|
||||
android_media_codec_decoder.cc
|
||||
LOCAL_CFLAGS := \
|
||||
'-DWEBRTC_TARGET_PC' \
|
||||
'-DWEBRTC_ANDROID'
|
||||
@@ -296,6 +298,7 @@ LOCAL_CFLAGS := \
|
||||
LOCAL_C_INCLUDES := \
|
||||
external/gtest/include \
|
||||
$(LOCAL_PATH)/../../../.. \
|
||||
$(LOCAL_PATH)/../../../../.. \
|
||||
$(LOCAL_PATH)/../../../include \
|
||||
$(LOCAL_PATH)/../../../../voice_engine/include
|
||||
|
||||
|
@@ -0,0 +1,105 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <android/log.h>
|
||||
#define LOG_TAG "AndroidMediaCodecDecoder"
|
||||
|
||||
#include <jni.h>
|
||||
|
||||
#include "android_media_codec_decoder.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
AndroidMediaCodecDecoder::AndroidMediaCodecDecoder(
|
||||
JavaVM* vm, jobject surface, jclass decoderClass)
|
||||
: decode_complete_callback_(NULL),
|
||||
vm_(vm),
|
||||
surface_(surface),
|
||||
mediaCodecDecoder_(NULL),
|
||||
decoderClass_(decoderClass),
|
||||
env_(NULL),
|
||||
setEncodedImageID_(NULL),
|
||||
vm_attached_(false) {
|
||||
}
|
||||
|
||||
WebRtc_Word32 AndroidMediaCodecDecoder::InitDecode(
|
||||
const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores) {
|
||||
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
|
||||
|
||||
// TODO(dwkang): Detach this thread from VM. => this leads to a crash on
|
||||
// "StopCall".
|
||||
int ret = vm_->AttachCurrentThread(&env_, NULL);
|
||||
// Get the JNI env for this thread
|
||||
if ((ret < 0) || !env_) {
|
||||
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG,
|
||||
"Could not attach thread to JVM (%d, %p)", ret,
|
||||
env_);
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
} else {
|
||||
vm_attached_ = true;
|
||||
}
|
||||
|
||||
// Initialize the media codec java decoder class.
|
||||
jmethodID mid = env_->GetMethodID(decoderClass_, "<init>", "()V");
|
||||
mediaCodecDecoder_ = env_->NewGlobalRef(env_->NewObject(decoderClass_, mid));
|
||||
|
||||
mid = env_->GetMethodID(
|
||||
decoderClass_, "configure", "(Landroid/view/SurfaceView;II)V");
|
||||
env_->CallVoidMethod(mediaCodecDecoder_, mid, surface_,
|
||||
codecSettings->width, codecSettings->height);
|
||||
|
||||
setEncodedImageID_ = env_->GetMethodID(
|
||||
decoderClass_, "setEncodedImage", "(Ljava/nio/ByteBuffer;J)V");
|
||||
|
||||
// Call start()
|
||||
jmethodID startID = env_->GetMethodID(decoderClass_, "start", "()V");
|
||||
env_->CallVoidMethod(mediaCodecDecoder_, startID);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
WebRtc_Word32 AndroidMediaCodecDecoder::Decode(
|
||||
const EncodedImage& inputImage,
|
||||
bool missingFrames,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
WebRtc_Word64 renderTimeMs) {
|
||||
if (!vm_attached_) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
|
||||
jobject byteBuffer =
|
||||
env_->NewDirectByteBuffer(inputImage._buffer, inputImage._length);
|
||||
env_->CallVoidMethod(
|
||||
mediaCodecDecoder_, setEncodedImageID_, byteBuffer, renderTimeMs);
|
||||
env_->DeleteLocalRef(byteBuffer);
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
}
|
||||
|
||||
WebRtc_Word32 AndroidMediaCodecDecoder::RegisterDecodeCompleteCallback(
|
||||
DecodedImageCallback* callback) {
|
||||
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
WebRtc_Word32 AndroidMediaCodecDecoder::Release() {
|
||||
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
|
||||
env_->DeleteGlobalRef(mediaCodecDecoder_);
|
||||
mediaCodecDecoder_ = NULL;
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
WebRtc_Word32 AndroidMediaCodecDecoder::Reset() {
|
||||
__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, "%s", __func__);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@@ -0,0 +1,107 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
|
||||
#define WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
|
||||
|
||||
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class AndroidMediaCodecDecoder : public VideoDecoder {
|
||||
public:
|
||||
AndroidMediaCodecDecoder(JavaVM* vm, jobject surface, jclass decoderClass);
|
||||
virtual ~AndroidMediaCodecDecoder() { }
|
||||
|
||||
// Initialize the decoder with the information from the VideoCodec.
|
||||
//
|
||||
// Input:
|
||||
// - inst : Codec settings
|
||||
// - numberOfCores : Number of cores available for the decoder
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 InitDecode(
|
||||
const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores);
|
||||
|
||||
// Decode encoded image (as a part of a video stream). The decoded image
|
||||
// will be returned to the user through the decode complete callback.
|
||||
//
|
||||
// Input:
|
||||
// - inputImage : Encoded image to be decoded
|
||||
// - missingFrames : True if one or more frames have been lost
|
||||
// since the previous decode call.
|
||||
// - fragmentation : Specifies where the encoded frame can be
|
||||
// split into separate fragments. The meaning
|
||||
// of fragment is codec specific, but often
|
||||
// means that each fragment is decodable by
|
||||
// itself.
|
||||
// - codecSpecificInfo : Pointer to codec specific data
|
||||
// - renderTimeMs : System time to render in milliseconds. Only
|
||||
// used by decoders with internal rendering.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32
|
||||
Decode(const EncodedImage& inputImage,
|
||||
bool missingFrames,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const CodecSpecificInfo* codecSpecificInfo = NULL,
|
||||
WebRtc_Word64 renderTimeMs = -1);
|
||||
|
||||
// Register an decode complete callback object.
|
||||
//
|
||||
// Input:
|
||||
// - callback : Callback object which handles decoded images.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 RegisterDecodeCompleteCallback(
|
||||
DecodedImageCallback* callback);
|
||||
|
||||
// Free decoder memory.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 Release();
|
||||
|
||||
// Reset decoder state and prepare for a new call.
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 Reset();
|
||||
|
||||
// Codec configuration data sent out-of-band, i.e. in SIP call setup
|
||||
//
|
||||
// Input/Output:
|
||||
// - buffer : Buffer pointer to the configuration data
|
||||
// - size : The size of the configuration data in
|
||||
// bytes
|
||||
//
|
||||
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
|
||||
virtual WebRtc_Word32 SetCodecConfigParameters(
|
||||
const WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) {
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
// Create a copy of the codec and its internal state.
|
||||
//
|
||||
// Return value : A copy of the instance if OK, NULL otherwise.
|
||||
virtual VideoDecoder* Copy() { return NULL; }
|
||||
|
||||
private:
|
||||
DecodedImageCallback* decode_complete_callback_;
|
||||
JavaVM* vm_;
|
||||
jobject surface_;
|
||||
jobject mediaCodecDecoder_;
|
||||
jclass decoderClass_;
|
||||
JNIEnv* env_;
|
||||
jmethodID setEncodedImageID_;
|
||||
bool vm_attached_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_VIDEO_ENGINE_TEST_ANDROID_JNI_ANDROID_MEDIA_CODEC_DECODER_H_
|
@@ -17,7 +17,6 @@
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: NativeInit
|
||||
@@ -109,19 +108,18 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocal
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: SetSendDestination
|
||||
* Signature: (IILjava/lang/String)I
|
||||
* Signature: (IILjava/lang/String;)I
|
||||
*/
|
||||
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination
|
||||
(JNIEnv *, jobject, jint, jint, jstring);
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: GetCodecs(
|
||||
* Signature: ()I
|
||||
* Method: GetCodecs
|
||||
* Signature: ()[Ljava/lang/String;
|
||||
*/
|
||||
JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs(
|
||||
JNIEnv *env,
|
||||
jobject);
|
||||
JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs
|
||||
(JNIEnv *, jobject);
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
@@ -195,6 +193,14 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCamer
|
||||
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation
|
||||
(JNIEnv *, jobject, jint, jint);
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: SetExternalMediaCodecDecoderRenderer
|
||||
* Signature: (ILjava/lang/Object;)I
|
||||
*/
|
||||
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer
|
||||
(JNIEnv *, jobject, jint, jobject);
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: EnableNACK
|
||||
@@ -238,7 +244,7 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopInco
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: VoE_Create
|
||||
* Signature: (Landroid/content/Context)Z
|
||||
* Signature: (Landroid/content/Context;)Z
|
||||
*/
|
||||
JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create
|
||||
(JNIEnv *, jobject, jobject);
|
||||
@@ -355,8 +361,6 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Sto
|
||||
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume
|
||||
(JNIEnv *, jobject, jint);
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: VoE_SetLoudspeakerStatus
|
||||
@@ -407,8 +411,8 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Num
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: VoE_NumOfCodecs
|
||||
* Signature: ()Z
|
||||
* Method: VoE_GetCodecs
|
||||
* Signature: ()[Ljava/lang/String;
|
||||
*/
|
||||
JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs
|
||||
(JNIEnv *, jobject);
|
||||
@@ -440,7 +444,7 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Set
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: VoE_SetNSStatus
|
||||
* Signature: (ZI)I
|
||||
* Signature: (Z)I
|
||||
*/
|
||||
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus
|
||||
(JNIEnv *, jobject, jboolean);
|
||||
@@ -448,7 +452,7 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Set
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: VoE_StartDebugRecording
|
||||
* Signature: (Ljava/lang/String)I
|
||||
* Signature: (Ljava/lang/String;)I
|
||||
*/
|
||||
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording
|
||||
(JNIEnv *, jobject, jstring);
|
||||
|
@@ -26,11 +26,13 @@
|
||||
#include "vie_base.h"
|
||||
#include "vie_codec.h"
|
||||
#include "vie_capture.h"
|
||||
#include "vie_external_codec.h"
|
||||
#include "vie_network.h"
|
||||
#include "vie_render.h"
|
||||
#include "vie_rtp_rtcp.h"
|
||||
|
||||
#include "common_types.h"
|
||||
#include "android_media_codec_decoder.h"
|
||||
|
||||
#define WEBRTC_LOG_TAG "*WEBRTCN*"
|
||||
#define VALIDATE_BASE_POINTER \
|
||||
@@ -118,6 +120,7 @@ typedef struct
|
||||
ViERTP_RTCP* rtp;
|
||||
ViERender* render;
|
||||
ViECapture* capture;
|
||||
ViEExternalCodec* externalCodec;
|
||||
VideoCallbackAndroid* callback;
|
||||
|
||||
} VideoEngineData;
|
||||
@@ -335,6 +338,13 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideo
|
||||
return -1;
|
||||
}
|
||||
|
||||
vieData.externalCodec = ViEExternalCodec::GetInterface(vieData.vie);
|
||||
if (!vieData.capture) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
|
||||
"Get External Codec sub-API failed");
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -440,6 +450,11 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminat
|
||||
"Failed to release Base sub-API");
|
||||
}
|
||||
|
||||
if (!vieData.externalCodec || vieData.externalCodec->Release()) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
|
||||
"Failed to release External Codec sub-API");
|
||||
}
|
||||
|
||||
// Delete Vie
|
||||
if (!VideoEngine::Delete(vieData.vie)) {
|
||||
__android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
|
||||
@@ -960,6 +975,32 @@ JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotat
|
||||
return ret;
|
||||
}
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: SetExternalMediaCodecDecoderRenderer
|
||||
* Signature: (ILjava/lang/Object;)I
|
||||
*/
|
||||
JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetExternalMediaCodecDecoderRenderer(
|
||||
JNIEnv *env,
|
||||
jobject,
|
||||
jint channel,
|
||||
jobject glSurface)
|
||||
{
|
||||
__android_log_write(
|
||||
ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetExternalMediaCodecDecoder");
|
||||
|
||||
jclass cls = env->FindClass("org/webrtc/videoengine/ViEMediaCodecDecoder");
|
||||
env->NewGlobalRef(cls);
|
||||
|
||||
AndroidMediaCodecDecoder* mediaCodecDecoder =
|
||||
new AndroidMediaCodecDecoder(webrtcGlobalVM, glSurface, cls);
|
||||
|
||||
// TODO(dwkang): Check the ownership of decoder object and release it
|
||||
// if needed.
|
||||
return vieData.externalCodec->RegisterExternalReceiveCodec(
|
||||
channel, 120, mediaCodecDecoder, true);
|
||||
}
|
||||
|
||||
/*
|
||||
* Class: org_webrtc_videoengineapp_ViEAndroidJavaAPI
|
||||
* Method: EnableNACK
|
||||
|
@@ -81,6 +81,13 @@
|
||||
android:layout_height="wrap_content"
|
||||
android:text="@string/surfaceview"
|
||||
android:textColor="#fff" />
|
||||
<RadioButton
|
||||
android:id="@+id/radio_mediacodec"
|
||||
android:onClick="onClick"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="@string/mediacodec"
|
||||
android:textColor="#fff" />
|
||||
</RadioGroup>
|
||||
</LinearLayout>
|
||||
|
||||
|
@@ -36,5 +36,6 @@
|
||||
<string name="debugrecording">APMRecord</string>
|
||||
<string name="rtpdump">rtpdump</string>
|
||||
<string name="surfaceview">SurfaceView</string>
|
||||
<string name="mediacodec">MediaCodec Decoder/Renderer</string>
|
||||
<string name="opengl">OpenGL</string>
|
||||
</resources>
|
||||
|
@@ -0,0 +1,404 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
package org.webrtc.videoengine;
|
||||
|
||||
import android.media.MediaCodec;
|
||||
import android.media.MediaExtractor;
|
||||
import android.media.MediaFormat;
|
||||
import android.os.Handler;
|
||||
import android.os.Looper;
|
||||
import android.os.Message;
|
||||
import android.util.Log;
|
||||
import android.view.Surface;
|
||||
import android.view.SurfaceView;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.LinkedList;
|
||||
|
||||
class CodecState {
|
||||
private static final String TAG = "CodecState";
|
||||
|
||||
private ViEMediaCodecDecoder mView;
|
||||
private MediaFormat mFormat;
|
||||
private boolean mSawInputEOS, mSawOutputEOS;
|
||||
|
||||
private MediaCodec mCodec;
|
||||
private MediaFormat mOutputFormat;
|
||||
private ByteBuffer[] mCodecInputBuffers;
|
||||
private ByteBuffer[] mCodecOutputBuffers;
|
||||
|
||||
private LinkedList<Integer> mAvailableInputBufferIndices;
|
||||
private LinkedList<Integer> mAvailableOutputBufferIndices;
|
||||
private LinkedList<MediaCodec.BufferInfo> mAvailableOutputBufferInfos;
|
||||
|
||||
private long mLastMediaTimeUs;
|
||||
|
||||
public CodecState(
|
||||
ViEMediaCodecDecoder view,
|
||||
MediaFormat format,
|
||||
MediaCodec codec) {
|
||||
mView = view;
|
||||
mFormat = format;
|
||||
mSawInputEOS = mSawOutputEOS = false;
|
||||
|
||||
mCodec = codec;
|
||||
|
||||
mCodec.start();
|
||||
mCodecInputBuffers = mCodec.getInputBuffers();
|
||||
mCodecOutputBuffers = mCodec.getOutputBuffers();
|
||||
|
||||
mAvailableInputBufferIndices = new LinkedList<Integer>();
|
||||
mAvailableOutputBufferIndices = new LinkedList<Integer>();
|
||||
mAvailableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
|
||||
|
||||
mLastMediaTimeUs = 0;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
mCodec.stop();
|
||||
mCodecInputBuffers = null;
|
||||
mCodecOutputBuffers = null;
|
||||
mOutputFormat = null;
|
||||
|
||||
mAvailableOutputBufferInfos = null;
|
||||
mAvailableOutputBufferIndices = null;
|
||||
mAvailableInputBufferIndices = null;
|
||||
|
||||
mCodec.release();
|
||||
mCodec = null;
|
||||
}
|
||||
|
||||
public void start() {
|
||||
}
|
||||
|
||||
public void pause() {
|
||||
}
|
||||
|
||||
public long getCurrentPositionUs() {
|
||||
return mLastMediaTimeUs;
|
||||
}
|
||||
|
||||
public void flush() {
|
||||
mAvailableInputBufferIndices.clear();
|
||||
mAvailableOutputBufferIndices.clear();
|
||||
mAvailableOutputBufferInfos.clear();
|
||||
|
||||
mSawInputEOS = false;
|
||||
mSawOutputEOS = false;
|
||||
|
||||
mCodec.flush();
|
||||
}
|
||||
|
||||
public void doSomeWork() {
|
||||
int index = mCodec.dequeueInputBuffer(0 /* timeoutUs */);
|
||||
|
||||
if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
mAvailableInputBufferIndices.add(new Integer(index));
|
||||
}
|
||||
|
||||
while (feedInputBuffer()) {}
|
||||
|
||||
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
||||
index = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
|
||||
|
||||
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||
mOutputFormat = mCodec.getOutputFormat();
|
||||
} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||
mCodecOutputBuffers = mCodec.getOutputBuffers();
|
||||
} else if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
|
||||
mAvailableOutputBufferIndices.add(new Integer(index));
|
||||
mAvailableOutputBufferInfos.add(info);
|
||||
}
|
||||
|
||||
while (drainOutputBuffer()) {}
|
||||
}
|
||||
|
||||
/** returns true if more input data could be fed */
|
||||
private boolean feedInputBuffer() {
|
||||
if (mSawInputEOS || mAvailableInputBufferIndices.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int index = mAvailableInputBufferIndices.peekFirst().intValue();
|
||||
|
||||
ByteBuffer codecData = mCodecInputBuffers[index];
|
||||
|
||||
if (mView.hasFrame()) {
|
||||
Frame frame = mView.dequeueFrame();
|
||||
ByteBuffer buffer = frame.mBuffer;
|
||||
if (buffer == null) {
|
||||
return false;
|
||||
}
|
||||
if (codecData.capacity() < buffer.capacity()) {
|
||||
Log.e(TAG, "Buffer is too small to copy a frame.");
|
||||
// TODO(dwkang): split the frame into the multiple buffer.
|
||||
}
|
||||
buffer.rewind();
|
||||
codecData.rewind();
|
||||
codecData.put(buffer);
|
||||
codecData.rewind();
|
||||
|
||||
try {
|
||||
mCodec.queueInputBuffer(
|
||||
index, 0 /* offset */, buffer.capacity(), frame.mTimeStampUs,
|
||||
0 /* flags */);
|
||||
|
||||
mAvailableInputBufferIndices.removeFirst();
|
||||
} catch (MediaCodec.CryptoException e) {
|
||||
Log.d(TAG, "CryptoException w/ errorCode "
|
||||
+ e.getErrorCode() + ", '" + e.getMessage() + "'");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/** returns true if more output data could be drained */
|
||||
private boolean drainOutputBuffer() {
|
||||
if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int index = mAvailableOutputBufferIndices.peekFirst().intValue();
|
||||
MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst();
|
||||
|
||||
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
|
||||
Log.d(TAG, "saw output EOS.");
|
||||
|
||||
mSawOutputEOS = true;
|
||||
return false;
|
||||
}
|
||||
|
||||
long realTimeUs =
|
||||
mView.getRealTimeUsForMediaTime(info.presentationTimeUs);
|
||||
long nowUs = System.currentTimeMillis() * 1000;
|
||||
long lateUs = nowUs - realTimeUs;
|
||||
|
||||
// video
|
||||
boolean render;
|
||||
|
||||
// TODO(dwkang): For some extreme cases, just not doing rendering is not enough.
|
||||
// Need to seek to the next key frame.
|
||||
if (lateUs < -10000) {
|
||||
// too early;
|
||||
return false;
|
||||
} else if (lateUs > 30000) {
|
||||
Log.d(TAG, "video late by " + lateUs + " us. Skipping...");
|
||||
render = false;
|
||||
} else {
|
||||
render = true;
|
||||
mLastMediaTimeUs = info.presentationTimeUs;
|
||||
}
|
||||
|
||||
MediaFormat format= mCodec.getOutputFormat();
|
||||
Log.d(TAG, "Video output format :" + format.getInteger(MediaFormat.KEY_COLOR_FORMAT));
|
||||
mCodec.releaseOutputBuffer(index, render);
|
||||
|
||||
mAvailableOutputBufferIndices.removeFirst();
|
||||
mAvailableOutputBufferInfos.removeFirst();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
class Frame {
|
||||
public ByteBuffer mBuffer;
|
||||
public long mTimeStampUs;
|
||||
|
||||
Frame(ByteBuffer buffer, long timeStampUs) {
|
||||
mBuffer = buffer;
|
||||
mTimeStampUs = timeStampUs;
|
||||
}
|
||||
}
|
||||
|
||||
class ViEMediaCodecDecoder {
|
||||
private static final String TAG = "ViEMediaCodecDecoder";
|
||||
|
||||
private MediaExtractor mExtractor;
|
||||
|
||||
private CodecState mCodecState;
|
||||
|
||||
private int mState;
|
||||
private static final int STATE_IDLE = 1;
|
||||
private static final int STATE_PREPARING = 2;
|
||||
private static final int STATE_PLAYING = 3;
|
||||
private static final int STATE_PAUSED = 4;
|
||||
|
||||
private Handler mHandler;
|
||||
private static final int EVENT_PREPARE = 1;
|
||||
private static final int EVENT_DO_SOME_WORK = 2;
|
||||
|
||||
private long mDeltaTimeUs;
|
||||
private long mDurationUs;
|
||||
|
||||
private SurfaceView mSurfaceView;
|
||||
private LinkedList<Frame> mFrameQueue = new LinkedList<Frame>();
|
||||
|
||||
private Thread mLooperThread;
|
||||
|
||||
public void configure(SurfaceView surfaceView, int width, int height) {
|
||||
mSurfaceView = surfaceView;
|
||||
Log.d(TAG, "configure " + "width" + width + "height" + height + mSurfaceView.toString());
|
||||
|
||||
MediaFormat format = new MediaFormat();
|
||||
format.setString(MediaFormat.KEY_MIME, "video/x-vnd.on2.vp8");
|
||||
format.setInteger(MediaFormat.KEY_WIDTH, width);
|
||||
format.setInteger(MediaFormat.KEY_HEIGHT, height);
|
||||
MediaCodec codec = MediaCodec.createDecoderByType("video/x-vnd.on2.vp8");
|
||||
// SW VP8 decoder
|
||||
// MediaCodec codec = MediaCodec.createByCodecName("OMX.google.vpx.decoder");
|
||||
// Nexus10 HW VP8 decoder
|
||||
// MediaCodec codec = MediaCodec.createByCodecName("OMX.Exynos.VP8.Decoder");
|
||||
Surface surface = mSurfaceView.getHolder().getSurface();
|
||||
Log.d(TAG, "Surface " + surface.isValid());
|
||||
codec.configure(
|
||||
format, surface, null, 0);
|
||||
mCodecState = new CodecState(this, format, codec);
|
||||
|
||||
initMediaCodecView();
|
||||
}
|
||||
|
||||
public void setEncodedImage(ByteBuffer buffer, long renderTimeMs) {
|
||||
// TODO(dwkang): figure out why exceptions just make this thread finish.
|
||||
try {
|
||||
final long renderTimeUs = renderTimeMs * 1000;
|
||||
ByteBuffer buf = ByteBuffer.allocate(buffer.capacity());
|
||||
buf.put(buffer);
|
||||
buf.rewind();
|
||||
synchronized(mFrameQueue) {
|
||||
mFrameQueue.add(new Frame(buf, renderTimeUs));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean hasFrame() {
|
||||
synchronized(mFrameQueue) {
|
||||
return !mFrameQueue.isEmpty();
|
||||
}
|
||||
}
|
||||
|
||||
public Frame dequeueFrame() {
|
||||
synchronized(mFrameQueue) {
|
||||
return mFrameQueue.removeFirst();
|
||||
}
|
||||
}
|
||||
|
||||
private void initMediaCodecView() {
|
||||
Log.d(TAG, "initMediaCodecView");
|
||||
mState = STATE_IDLE;
|
||||
|
||||
mLooperThread = new Thread()
|
||||
{
|
||||
@Override
|
||||
public void run() {
|
||||
Log.d(TAG, "Looper prepare");
|
||||
Looper.prepare();
|
||||
mHandler = new Handler() {
|
||||
@Override
|
||||
public void handleMessage(Message msg) {
|
||||
// TODO(dwkang): figure out exceptions just make this thread finish.
|
||||
try {
|
||||
switch (msg.what) {
|
||||
case EVENT_PREPARE:
|
||||
{
|
||||
mState = STATE_PAUSED;
|
||||
ViEMediaCodecDecoder.this.start();
|
||||
break;
|
||||
}
|
||||
|
||||
case EVENT_DO_SOME_WORK:
|
||||
{
|
||||
ViEMediaCodecDecoder.this.doSomeWork();
|
||||
|
||||
mHandler.sendMessageDelayed(
|
||||
mHandler.obtainMessage(EVENT_DO_SOME_WORK), 5);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
};
|
||||
Log.d(TAG, "Looper loop");
|
||||
synchronized(ViEMediaCodecDecoder.this) {
|
||||
ViEMediaCodecDecoder.this.notify();
|
||||
}
|
||||
Looper.loop();
|
||||
}
|
||||
};
|
||||
mLooperThread.start();
|
||||
|
||||
// Wait until handler is set up.
|
||||
synchronized(ViEMediaCodecDecoder.this) {
|
||||
try {
|
||||
ViEMediaCodecDecoder.this.wait(1000);
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
Log.d(TAG, "initMediaCodecView end");
|
||||
}
|
||||
|
||||
public void start() {
|
||||
Log.d(TAG, "start");
|
||||
|
||||
if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
|
||||
return;
|
||||
} else if (mState == STATE_IDLE) {
|
||||
mState = STATE_PREPARING;
|
||||
Log.d(TAG, "Sending EVENT_PREPARE");
|
||||
mHandler.sendMessage(mHandler.obtainMessage(EVENT_PREPARE));
|
||||
return;
|
||||
} else if (mState != STATE_PAUSED) {
|
||||
throw new IllegalStateException();
|
||||
}
|
||||
|
||||
mCodecState.start();
|
||||
|
||||
mHandler.sendMessage(mHandler.obtainMessage(EVENT_DO_SOME_WORK));
|
||||
|
||||
mDeltaTimeUs = -1;
|
||||
mState = STATE_PLAYING;
|
||||
|
||||
Log.d(TAG, "start end");
|
||||
}
|
||||
|
||||
public void reset() {
|
||||
if (mState == STATE_PLAYING) {
|
||||
mCodecState.pause();
|
||||
}
|
||||
|
||||
mCodecState.release();
|
||||
|
||||
mDurationUs = -1;
|
||||
mState = STATE_IDLE;
|
||||
}
|
||||
|
||||
private void doSomeWork() {
|
||||
mCodecState.doSomeWork();
|
||||
}
|
||||
|
||||
public long getRealTimeUsForMediaTime(long mediaTimeUs) {
|
||||
if (mDeltaTimeUs == -1) {
|
||||
long nowUs = System.currentTimeMillis() * 1000;
|
||||
mDeltaTimeUs = nowUs - mediaTimeUs;
|
||||
}
|
||||
|
||||
return mDeltaTimeUs + mediaTimeUs;
|
||||
}
|
||||
}
|
@@ -72,6 +72,10 @@ public class ViEAndroidJavaAPI {
|
||||
public native int GetCameraOrientation(int cameraNum);
|
||||
public native int SetRotation(int cameraId,int degrees);
|
||||
|
||||
// External Codec
|
||||
public native int SetExternalMediaCodecDecoderRenderer(
|
||||
int channel, Object glSurface);
|
||||
|
||||
// NACK
|
||||
public native int EnableNACK(int channel, boolean enable);
|
||||
|
||||
|
@@ -122,7 +122,12 @@ public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
|
||||
private boolean loopbackMode = true;
|
||||
private CheckBox cbStats;
|
||||
private boolean isStatsOn = true;
|
||||
private boolean useOpenGLRender = true;
|
||||
public enum RenderType {
|
||||
OPENGL,
|
||||
SURFACE,
|
||||
MEDIACODEC
|
||||
}
|
||||
RenderType renderType = RenderType.OPENGL;
|
||||
|
||||
// Video settings
|
||||
private Spinner spCodecType;
|
||||
@@ -499,10 +504,12 @@ public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
|
||||
|
||||
RadioGroup radioGroup = (RadioGroup) findViewById(R.id.radio_group1);
|
||||
radioGroup.clearCheck();
|
||||
if (useOpenGLRender == true) {
|
||||
if (renderType == RenderType.OPENGL) {
|
||||
radioGroup.check(R.id.radio_opengl);
|
||||
} else {
|
||||
} else if (renderType == RenderType.SURFACE) {
|
||||
radioGroup.check(R.id.radio_surface);
|
||||
} else if (renderType == RenderType.MEDIACODEC) {
|
||||
radioGroup.check(R.id.radio_mediacodec);
|
||||
}
|
||||
|
||||
etRemoteIp = (EditText) findViewById(R.id.etRemoteIp);
|
||||
@@ -604,13 +611,25 @@ public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
|
||||
getRemoteIPString());
|
||||
|
||||
if (enableVideoReceive) {
|
||||
if (useOpenGLRender) {
|
||||
if (renderType == RenderType.OPENGL) {
|
||||
Log.v(TAG, "Create OpenGL Render");
|
||||
remoteSurfaceView = ViERenderer.CreateRenderer(this, true);
|
||||
ret = vieAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView);
|
||||
} else {
|
||||
} else if (renderType == RenderType.SURFACE) {
|
||||
Log.v(TAG, "Create SurfaceView Render");
|
||||
remoteSurfaceView = ViERenderer.CreateRenderer(this, false);
|
||||
} else if (renderType == RenderType.MEDIACODEC) {
|
||||
Log.v(TAG, "Create MediaCodec Decoder/Renderer");
|
||||
remoteSurfaceView = new SurfaceView(this);
|
||||
}
|
||||
|
||||
if (mLlRemoteSurface != null) {
|
||||
mLlRemoteSurface.addView(remoteSurfaceView);
|
||||
}
|
||||
|
||||
if (renderType == RenderType.MEDIACODEC) {
|
||||
ret = vieAndroidAPI.SetExternalMediaCodecDecoderRenderer(
|
||||
channel, remoteSurfaceView);
|
||||
} else {
|
||||
ret = vieAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView);
|
||||
}
|
||||
|
||||
@@ -653,12 +672,6 @@ public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
|
||||
}
|
||||
}
|
||||
|
||||
if (enableVideoReceive) {
|
||||
if (mLlRemoteSurface != null) {
|
||||
mLlRemoteSurface.addView(remoteSurfaceView);
|
||||
}
|
||||
}
|
||||
|
||||
isStatsOn = cbStats.isChecked();
|
||||
if (isStatsOn) {
|
||||
addStatusView();
|
||||
@@ -841,10 +854,13 @@ public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
|
||||
}
|
||||
break;
|
||||
case R.id.radio_surface:
|
||||
useOpenGLRender = false;
|
||||
renderType = RenderType.SURFACE;
|
||||
break;
|
||||
case R.id.radio_opengl:
|
||||
useOpenGLRender = true;
|
||||
renderType = RenderType.OPENGL;
|
||||
break;
|
||||
case R.id.radio_mediacodec:
|
||||
renderType = RenderType.MEDIACODEC;
|
||||
break;
|
||||
case R.id.cbNack:
|
||||
enableNack = cbEnableNack.isChecked();
|
||||
|
Reference in New Issue
Block a user