Add VP8 video decoding hw acceleration support to Java Peerconnection library.
For now NVidia decoder is supported only, Qualcomm will be added once b/16353967 is fixed. TODO: - Support queuing 2-3 decoder input buffers. - Add average decoding time statistics. - Add Qualcomm hw decoder support. BUG=3030 R=tkchin@webrtc.org Review URL: https://webrtc-codereview.appspot.com/20969004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6758 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
6f48f1bf68
commit
efe4b9af49
@ -74,8 +74,10 @@
|
|||||||
#include "talk/media/base/videorenderer.h"
|
#include "talk/media/base/videorenderer.h"
|
||||||
#include "talk/media/devices/videorendererfactory.h"
|
#include "talk/media/devices/videorendererfactory.h"
|
||||||
#include "talk/media/webrtc/webrtcvideocapturer.h"
|
#include "talk/media/webrtc/webrtcvideocapturer.h"
|
||||||
|
#include "talk/media/webrtc/webrtcvideodecoderfactory.h"
|
||||||
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
|
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
|
||||||
#include "third_party/icu/source/common/unicode/unistr.h"
|
#include "third_party/icu/source/common/unicode/unistr.h"
|
||||||
|
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||||
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
||||||
#include "third_party/libyuv/include/libyuv/video_common.h"
|
#include "third_party/libyuv/include/libyuv/video_common.h"
|
||||||
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
|
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||||
@ -86,7 +88,13 @@
|
|||||||
|
|
||||||
#ifdef ANDROID
|
#ifdef ANDROID
|
||||||
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
|
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
|
||||||
|
using webrtc::CodecSpecificInfo;
|
||||||
|
using webrtc::DecodedImageCallback;
|
||||||
|
using webrtc::EncodedImage;
|
||||||
|
using webrtc::I420VideoFrame;
|
||||||
using webrtc::LogcatTraceContext;
|
using webrtc::LogcatTraceContext;
|
||||||
|
using webrtc::RTPFragmentationHeader;
|
||||||
|
using webrtc::VideoCodec;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
using icu::UnicodeString;
|
using icu::UnicodeString;
|
||||||
@ -264,6 +272,7 @@ class ClassReferenceHolder {
|
|||||||
#ifdef ANDROID
|
#ifdef ANDROID
|
||||||
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
|
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
|
||||||
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
|
LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
|
||||||
|
LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
|
||||||
#endif
|
#endif
|
||||||
LoadClass(jni, "org/webrtc/MediaSource$State");
|
LoadClass(jni, "org/webrtc/MediaSource$State");
|
||||||
LoadClass(jni, "org/webrtc/MediaStream");
|
LoadClass(jni, "org/webrtc/MediaStream");
|
||||||
@ -1134,14 +1143,16 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
|||||||
// into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
|
// into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
|
||||||
// from this file.
|
// from this file.
|
||||||
|
|
||||||
//#define TRACK_BUFFER_TIMING
|
|
||||||
#ifdef TRACK_BUFFER_TIMING
|
|
||||||
#include <android/log.h>
|
#include <android/log.h>
|
||||||
#define TAG "MediaCodecVideoEncoder"
|
//#define TRACK_BUFFER_TIMING
|
||||||
|
#define TAG "MediaCodecVideo"
|
||||||
|
#ifdef TRACK_BUFFER_TIMING
|
||||||
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
|
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
|
||||||
#else
|
#else
|
||||||
#define ALOGV(...)
|
#define ALOGV(...)
|
||||||
#endif
|
#endif
|
||||||
|
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
|
||||||
|
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
|
||||||
|
|
||||||
// Color formats supported by encoder - should mirror supportedColorList
|
// Color formats supported by encoder - should mirror supportedColorList
|
||||||
// from MediaCodecVideoEncoder.java
|
// from MediaCodecVideoEncoder.java
|
||||||
@ -1254,6 +1265,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
|||||||
// Touched only on codec_thread_ so no explicit synchronization necessary.
|
// Touched only on codec_thread_ so no explicit synchronization necessary.
|
||||||
int width_; // Frame width in pixels.
|
int width_; // Frame width in pixels.
|
||||||
int height_; // Frame height in pixels.
|
int height_; // Frame height in pixels.
|
||||||
|
bool inited_;
|
||||||
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
|
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
|
||||||
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
|
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
|
||||||
int last_set_fps_; // Last-requested frame rate.
|
int last_set_fps_; // Last-requested frame rate.
|
||||||
@ -1412,7 +1424,7 @@ void MediaCodecVideoEncoder::CheckOnCodecThread() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void MediaCodecVideoEncoder::ResetCodec() {
|
void MediaCodecVideoEncoder::ResetCodec() {
|
||||||
LOG(LS_ERROR) << "ResetCodec";
|
ALOGE("ResetCodec");
|
||||||
if (Release() != WEBRTC_VIDEO_CODEC_OK ||
|
if (Release() != WEBRTC_VIDEO_CODEC_OK ||
|
||||||
codec_thread_->Invoke<int32_t>(Bind(
|
codec_thread_->Invoke<int32_t>(Bind(
|
||||||
&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, 0, 0, 0, 0))
|
&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, 0, 0, 0, 0))
|
||||||
@ -1428,7 +1440,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
|||||||
CheckOnCodecThread();
|
CheckOnCodecThread();
|
||||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||||
ScopedLocalRefFrame local_ref_frame(jni);
|
ScopedLocalRefFrame local_ref_frame(jni);
|
||||||
LOG(LS_INFO) << "InitEncodeOnCodecThread " << width << " x " << height;
|
ALOGD("InitEncodeOnCodecThread %d x %d", width, height);
|
||||||
|
|
||||||
if (width == 0) {
|
if (width == 0) {
|
||||||
width = width_;
|
width = width_;
|
||||||
@ -1459,6 +1471,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
|||||||
if (IsNull(jni, input_buffers))
|
if (IsNull(jni, input_buffers))
|
||||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
|
||||||
|
inited_ = true;
|
||||||
switch (GetIntField(jni, *j_media_codec_video_encoder_,
|
switch (GetIntField(jni, *j_media_codec_video_encoder_,
|
||||||
j_color_format_field_)) {
|
j_color_format_field_)) {
|
||||||
case COLOR_FormatYUV420Planar:
|
case COLOR_FormatYUV420Planar:
|
||||||
@ -1542,7 +1555,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
|||||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
ALOGV("Frame # %d. Buffer # %d. TS: %lld.",
|
ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
|
||||||
frames_received_, j_input_buffer_index, frame.render_time_ms());
|
frames_received_, j_input_buffer_index, frame.render_time_ms());
|
||||||
|
|
||||||
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
|
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
|
||||||
@ -1586,10 +1599,12 @@ int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
|
|||||||
}
|
}
|
||||||
|
|
||||||
int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
|
int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
|
||||||
|
if (!inited_)
|
||||||
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
CheckOnCodecThread();
|
CheckOnCodecThread();
|
||||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||||
LOG(LS_INFO) << "Frames received: " << frames_received_ <<
|
ALOGD("EncoderRelease: Frames received: %d. Frames dropped: %d.",
|
||||||
". Frames dropped: " << frames_dropped_;
|
frames_received_,frames_dropped_);
|
||||||
ScopedLocalRefFrame local_ref_frame(jni);
|
ScopedLocalRefFrame local_ref_frame(jni);
|
||||||
for (size_t i = 0; i < input_buffers_.size(); ++i)
|
for (size_t i = 0; i < input_buffers_.size(); ++i)
|
||||||
jni->DeleteGlobalRef(input_buffers_[i]);
|
jni->DeleteGlobalRef(input_buffers_[i]);
|
||||||
@ -1629,6 +1644,7 @@ void MediaCodecVideoEncoder::ResetParameters(JNIEnv* jni) {
|
|||||||
height_ = 0;
|
height_ = 0;
|
||||||
yuv_size_ = 0;
|
yuv_size_ = 0;
|
||||||
drop_next_input_frame_ = false;
|
drop_next_input_frame_ = false;
|
||||||
|
inited_ = false;
|
||||||
CHECK(input_buffers_.empty(),
|
CHECK(input_buffers_.empty(),
|
||||||
"ResetParameters called while holding input_buffers_!");
|
"ResetParameters called while holding input_buffers_!");
|
||||||
}
|
}
|
||||||
@ -1678,7 +1694,7 @@ bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
|||||||
1000;
|
1000;
|
||||||
last_output_timestamp_ms_ = capture_time_ms;
|
last_output_timestamp_ms_ = capture_time_ms;
|
||||||
frames_in_queue_--;
|
frames_in_queue_--;
|
||||||
ALOGV("Got output buffer # %d. TS: %lld. Latency: %lld",
|
ALOGV("Encoder got output buffer # %d. TS: %lld. Latency: %lld",
|
||||||
output_buffer_index, last_output_timestamp_ms_,
|
output_buffer_index, last_output_timestamp_ms_,
|
||||||
last_input_timestamp_ms_ - last_output_timestamp_ms_);
|
last_input_timestamp_ms_ - last_output_timestamp_ms_);
|
||||||
|
|
||||||
@ -1774,7 +1790,7 @@ MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
|
|||||||
// encoder? Sure would be. Too bad it doesn't. So we hard-code some
|
// encoder? Sure would be. Too bad it doesn't. So we hard-code some
|
||||||
// reasonable defaults.
|
// reasonable defaults.
|
||||||
supported_codecs_.push_back(
|
supported_codecs_.push_back(
|
||||||
VideoCodec(kVideoCodecVP8, "VP8", 1920, 1088, 30));
|
VideoCodec(kVideoCodecVP8, "VP8", 1280, 1280, 30));
|
||||||
}
|
}
|
||||||
|
|
||||||
MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
|
MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
|
||||||
@ -1801,6 +1817,429 @@ void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
|
|||||||
delete encoder;
|
delete encoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
||||||
|
public talk_base::MessageHandler {
|
||||||
|
public:
|
||||||
|
explicit MediaCodecVideoDecoder(JNIEnv* jni);
|
||||||
|
virtual ~MediaCodecVideoDecoder();
|
||||||
|
|
||||||
|
virtual int32_t InitDecode(const VideoCodec* codecSettings,
|
||||||
|
int32_t numberOfCores) OVERRIDE;
|
||||||
|
|
||||||
|
virtual int32_t
|
||||||
|
Decode(const EncodedImage& inputImage, bool missingFrames,
|
||||||
|
const RTPFragmentationHeader* fragmentation,
|
||||||
|
const CodecSpecificInfo* codecSpecificInfo = NULL,
|
||||||
|
int64_t renderTimeMs = -1) OVERRIDE;
|
||||||
|
|
||||||
|
virtual int32_t RegisterDecodeCompleteCallback(
|
||||||
|
DecodedImageCallback* callback) OVERRIDE;
|
||||||
|
|
||||||
|
virtual int32_t Release() OVERRIDE;
|
||||||
|
|
||||||
|
virtual int32_t Reset() OVERRIDE;
|
||||||
|
// talk_base::MessageHandler implementation.
|
||||||
|
virtual void OnMessage(talk_base::Message* msg) OVERRIDE;
|
||||||
|
|
||||||
|
private:
|
||||||
|
// CHECK-fail if not running on |codec_thread_|.
|
||||||
|
void CheckOnCodecThread();
|
||||||
|
|
||||||
|
int32_t InitDecodeOnCodecThread();
|
||||||
|
int32_t ReleaseOnCodecThread();
|
||||||
|
int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
|
||||||
|
|
||||||
|
bool key_frame_required_;
|
||||||
|
bool inited_;
|
||||||
|
VideoCodec codec_;
|
||||||
|
I420VideoFrame decoded_image_;
|
||||||
|
DecodedImageCallback* callback_;
|
||||||
|
int frames_received_; // Number of frames received by decoder.
|
||||||
|
|
||||||
|
// State that is constant for the lifetime of this object once the ctor
|
||||||
|
// returns.
|
||||||
|
scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
|
||||||
|
ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
|
||||||
|
ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
|
||||||
|
jmethodID j_init_decode_method_;
|
||||||
|
jmethodID j_release_method_;
|
||||||
|
jmethodID j_dequeue_input_buffer_method_;
|
||||||
|
jmethodID j_queue_input_buffer_method_;
|
||||||
|
jmethodID j_dequeue_output_buffer_method_;
|
||||||
|
jmethodID j_release_output_buffer_method_;
|
||||||
|
jfieldID j_input_buffers_field_;
|
||||||
|
jfieldID j_output_buffers_field_;
|
||||||
|
jfieldID j_color_format_field_;
|
||||||
|
jfieldID j_width_field_;
|
||||||
|
jfieldID j_height_field_;
|
||||||
|
jfieldID j_stride_field_;
|
||||||
|
jfieldID j_slice_height_field_;
|
||||||
|
|
||||||
|
// Global references; must be deleted in Release().
|
||||||
|
std::vector<jobject> input_buffers_;
|
||||||
|
};
|
||||||
|
|
||||||
|
MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni) :
|
||||||
|
key_frame_required_(true),
|
||||||
|
inited_(false),
|
||||||
|
codec_thread_(new Thread()),
|
||||||
|
j_media_codec_video_decoder_class_(
|
||||||
|
jni,
|
||||||
|
FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
|
||||||
|
j_media_codec_video_decoder_(
|
||||||
|
jni,
|
||||||
|
jni->NewObject(*j_media_codec_video_decoder_class_,
|
||||||
|
GetMethodID(jni,
|
||||||
|
*j_media_codec_video_decoder_class_,
|
||||||
|
"<init>",
|
||||||
|
"()V"))) {
|
||||||
|
ScopedLocalRefFrame local_ref_frame(jni);
|
||||||
|
codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
|
||||||
|
CHECK(codec_thread_->Start(), "Failed to start MediaCodecVideoDecoder");
|
||||||
|
|
||||||
|
j_init_decode_method_ = GetMethodID(jni,
|
||||||
|
*j_media_codec_video_decoder_class_,
|
||||||
|
"initDecode", "(II)Z");
|
||||||
|
j_release_method_ =
|
||||||
|
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
|
||||||
|
j_dequeue_input_buffer_method_ = GetMethodID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
|
||||||
|
j_queue_input_buffer_method_ = GetMethodID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
|
||||||
|
j_dequeue_output_buffer_method_ = GetMethodID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer", "()I");
|
||||||
|
j_release_output_buffer_method_ = GetMethodID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(I)Z");
|
||||||
|
|
||||||
|
j_input_buffers_field_ = GetFieldID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_,
|
||||||
|
"inputBuffers", "[Ljava/nio/ByteBuffer;");
|
||||||
|
j_output_buffers_field_ = GetFieldID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_,
|
||||||
|
"outputBuffers", "[Ljava/nio/ByteBuffer;");
|
||||||
|
j_color_format_field_ = GetFieldID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
|
||||||
|
j_width_field_ = GetFieldID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "width", "I");
|
||||||
|
j_height_field_ = GetFieldID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "height", "I");
|
||||||
|
j_stride_field_ = GetFieldID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "stride", "I");
|
||||||
|
j_slice_height_field_ = GetFieldID(
|
||||||
|
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
|
||||||
|
|
||||||
|
CHECK_EXCEPTION(jni, "MediaCodecVideoDecoder ctor failed");
|
||||||
|
memset(&codec_, 0, sizeof(codec_));
|
||||||
|
}
|
||||||
|
|
||||||
|
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
|
||||||
|
Release();
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
|
||||||
|
int32_t numberOfCores) {
|
||||||
|
if (inst == NULL) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||||
|
}
|
||||||
|
int ret_val = Release();
|
||||||
|
if (ret_val < 0) {
|
||||||
|
return ret_val;
|
||||||
|
}
|
||||||
|
// Save VideoCodec instance for later.
|
||||||
|
if (&codec_ != inst) {
|
||||||
|
codec_ = *inst;
|
||||||
|
}
|
||||||
|
codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
|
||||||
|
|
||||||
|
// Always start with a complete key frame.
|
||||||
|
key_frame_required_ = true;
|
||||||
|
frames_received_ = 0;
|
||||||
|
|
||||||
|
// Call Java init.
|
||||||
|
return codec_thread_->Invoke<int32_t>(
|
||||||
|
Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||||
|
CheckOnCodecThread();
|
||||||
|
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||||
|
ScopedLocalRefFrame local_ref_frame(jni);
|
||||||
|
ALOGD("InitDecodeOnCodecThread: %d x %d. FPS: %d",
|
||||||
|
codec_.width, codec_.height, codec_.maxFramerate);
|
||||||
|
|
||||||
|
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
|
||||||
|
j_init_decode_method_,
|
||||||
|
codec_.width,
|
||||||
|
codec_.height);
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
if (!success)
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
inited_ = true;
|
||||||
|
|
||||||
|
jobjectArray input_buffers = (jobjectArray)GetObjectField(
|
||||||
|
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
|
||||||
|
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
|
||||||
|
|
||||||
|
input_buffers_.resize(num_input_buffers);
|
||||||
|
for (size_t i = 0; i < num_input_buffers; ++i) {
|
||||||
|
input_buffers_[i] =
|
||||||
|
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
}
|
||||||
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::Release() {
|
||||||
|
return codec_thread_->Invoke<int32_t>(
|
||||||
|
Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
|
||||||
|
if (!inited_)
|
||||||
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
|
CheckOnCodecThread();
|
||||||
|
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||||
|
ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
|
||||||
|
ScopedLocalRefFrame local_ref_frame(jni);
|
||||||
|
for (size_t i = 0; i < input_buffers_.size(); ++i)
|
||||||
|
jni->DeleteGlobalRef(input_buffers_[i]);
|
||||||
|
input_buffers_.clear();
|
||||||
|
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
inited_ = false;
|
||||||
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void MediaCodecVideoDecoder::CheckOnCodecThread() {
|
||||||
|
CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread(),
|
||||||
|
"Running on wrong thread!");
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::Decode(
|
||||||
|
const EncodedImage& inputImage,
|
||||||
|
bool missingFrames,
|
||||||
|
const RTPFragmentationHeader* fragmentation,
|
||||||
|
const CodecSpecificInfo* codecSpecificInfo,
|
||||||
|
int64_t renderTimeMs) {
|
||||||
|
if (!inited_) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
|
}
|
||||||
|
if (callback_ == NULL) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
|
}
|
||||||
|
if (inputImage._buffer == NULL && inputImage._length > 0) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||||
|
}
|
||||||
|
// Check if encoded frame dimension has changed.
|
||||||
|
if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
|
||||||
|
(inputImage._encodedWidth != codec_.width ||
|
||||||
|
inputImage._encodedHeight != codec_.height)) {
|
||||||
|
codec_.width = inputImage._encodedWidth;
|
||||||
|
codec_.height = inputImage._encodedHeight;
|
||||||
|
InitDecode(&codec_, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always start with a complete key frame.
|
||||||
|
if (key_frame_required_) {
|
||||||
|
if (inputImage._frameType != webrtc::kKeyFrame) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
if (!inputImage._completeFrame) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
key_frame_required_ = false;
|
||||||
|
}
|
||||||
|
if (inputImage._length == 0) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
return codec_thread_->Invoke<int32_t>(Bind(
|
||||||
|
&MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
||||||
|
const EncodedImage& inputImage) {
|
||||||
|
static uint8_t yVal_ = 0x7f;
|
||||||
|
|
||||||
|
CheckOnCodecThread();
|
||||||
|
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||||
|
ScopedLocalRefFrame local_ref_frame(jni);
|
||||||
|
|
||||||
|
// Get input buffer.
|
||||||
|
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
|
||||||
|
j_dequeue_input_buffer_method_);
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
if (j_input_buffer_index < 0) {
|
||||||
|
ALOGE("dequeueInputBuffer error");
|
||||||
|
Reset();
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy encoded data to Java ByteBuffer.
|
||||||
|
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
|
||||||
|
uint8* buffer =
|
||||||
|
reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
|
||||||
|
CHECK(buffer, "Indirect buffer??");
|
||||||
|
int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
if (buffer_capacity < inputImage._length) {
|
||||||
|
ALOGE("Input frame size %d is bigger than buffer size %d.",
|
||||||
|
inputImage._length, buffer_capacity);
|
||||||
|
Reset();
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
ALOGV("Decode frame # %d. Buffer # %d. Size: %d",
|
||||||
|
frames_received_, j_input_buffer_index, inputImage._length);
|
||||||
|
memcpy(buffer, inputImage._buffer, inputImage._length);
|
||||||
|
|
||||||
|
// Feed input to decoder.
|
||||||
|
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
|
||||||
|
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
|
||||||
|
j_queue_input_buffer_method_,
|
||||||
|
j_input_buffer_index,
|
||||||
|
inputImage._length,
|
||||||
|
timestamp_us);
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
if (!success) {
|
||||||
|
ALOGE("queueInputBuffer error");
|
||||||
|
Reset();
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get output index.
|
||||||
|
int j_output_buffer_index =
|
||||||
|
jni->CallIntMethod(*j_media_codec_video_decoder_,
|
||||||
|
j_dequeue_output_buffer_method_);
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
if (j_output_buffer_index < 0) {
|
||||||
|
ALOGE("dequeueOutputBuffer error");
|
||||||
|
Reset();
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract data from Java ByteBuffer.
|
||||||
|
jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
|
||||||
|
jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
|
||||||
|
jobject output_buffer =
|
||||||
|
jni->GetObjectArrayElement(output_buffers, j_output_buffer_index);
|
||||||
|
buffer_capacity = jni->GetDirectBufferCapacity(output_buffer);
|
||||||
|
uint8_t* payload =
|
||||||
|
reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(output_buffer));
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
|
||||||
|
j_color_format_field_);
|
||||||
|
int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
|
||||||
|
int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
|
||||||
|
int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
|
||||||
|
int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
|
||||||
|
j_slice_height_field_);
|
||||||
|
if (buffer_capacity < width * height * 3 / 2) {
|
||||||
|
ALOGE("Insufficient output buffer capacity: %d", buffer_capacity);
|
||||||
|
Reset();
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
ALOGV("Decoder got output buffer %d x %d. %d x %d. Color: 0x%x. Size: %d",
|
||||||
|
width, height, stride, slice_height, color_format, buffer_capacity);
|
||||||
|
|
||||||
|
if (color_format == COLOR_FormatYUV420Planar) {
|
||||||
|
decoded_image_.CreateFrame(
|
||||||
|
stride * slice_height, payload,
|
||||||
|
(stride * slice_height) / 4, payload + (stride * slice_height),
|
||||||
|
(stride * slice_height) / 4, payload + (5 * stride * slice_height / 4),
|
||||||
|
width, height,
|
||||||
|
stride, stride / 2, stride / 2);
|
||||||
|
} else {
|
||||||
|
// All other supported formats are nv12.
|
||||||
|
decoded_image_.CreateEmptyFrame(width, height, width, width / 2, width / 2);
|
||||||
|
libyuv::NV12ToI420(
|
||||||
|
payload, stride,
|
||||||
|
payload + stride * slice_height, stride,
|
||||||
|
decoded_image_.buffer(webrtc::kYPlane),
|
||||||
|
decoded_image_.stride(webrtc::kYPlane),
|
||||||
|
decoded_image_.buffer(webrtc::kUPlane),
|
||||||
|
decoded_image_.stride(webrtc::kUPlane),
|
||||||
|
decoded_image_.buffer(webrtc::kVPlane),
|
||||||
|
decoded_image_.stride(webrtc::kVPlane),
|
||||||
|
width, height);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return output buffer back to codec.
|
||||||
|
success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
|
||||||
|
j_release_output_buffer_method_,
|
||||||
|
j_output_buffer_index);
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
if (!success) {
|
||||||
|
ALOGE("releaseOutputBuffer error");
|
||||||
|
Reset();
|
||||||
|
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Callback.
|
||||||
|
decoded_image_.set_timestamp(inputImage._timeStamp);
|
||||||
|
decoded_image_.set_ntp_time_ms(inputImage.ntp_time_ms_);
|
||||||
|
frames_received_++;
|
||||||
|
return callback_->Decoded(decoded_image_);
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
|
||||||
|
DecodedImageCallback* callback) {
|
||||||
|
callback_ = callback;
|
||||||
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
|
}
|
||||||
|
|
||||||
|
int32_t MediaCodecVideoDecoder::Reset() {
|
||||||
|
ALOGD("DecoderReset");
|
||||||
|
if (!inited_) {
|
||||||
|
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||||
|
}
|
||||||
|
return InitDecode(&codec_, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
void MediaCodecVideoDecoder::OnMessage(talk_base::Message* msg) {
|
||||||
|
}
|
||||||
|
|
||||||
|
class MediaCodecVideoDecoderFactory
|
||||||
|
: public cricket::WebRtcVideoDecoderFactory {
|
||||||
|
public:
|
||||||
|
MediaCodecVideoDecoderFactory();
|
||||||
|
virtual ~MediaCodecVideoDecoderFactory();
|
||||||
|
// WebRtcVideoDecoderFactory implementation.
|
||||||
|
virtual webrtc::VideoDecoder* CreateVideoDecoder(
|
||||||
|
webrtc::VideoCodecType type) OVERRIDE;
|
||||||
|
|
||||||
|
virtual void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) OVERRIDE;
|
||||||
|
|
||||||
|
private:
|
||||||
|
bool is_platform_supported_;
|
||||||
|
};
|
||||||
|
|
||||||
|
MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
|
||||||
|
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||||
|
ScopedLocalRefFrame local_ref_frame(jni);
|
||||||
|
jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
|
||||||
|
is_platform_supported_ = jni->CallStaticBooleanMethod(
|
||||||
|
j_decoder_class,
|
||||||
|
GetStaticMethodID(jni, j_decoder_class, "isPlatformSupported", "()Z"));
|
||||||
|
CHECK_EXCEPTION(jni, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
|
||||||
|
|
||||||
|
webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
|
||||||
|
webrtc::VideoCodecType type) {
|
||||||
|
if (type != kVideoCodecVP8 || !is_platform_supported_) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
|
||||||
|
webrtc::VideoDecoder* decoder) {
|
||||||
|
delete decoder;
|
||||||
|
}
|
||||||
|
|
||||||
#endif // ANDROID
|
#endif // ANDROID
|
||||||
|
|
||||||
} // anonymous namespace
|
} // anonymous namespace
|
||||||
@ -2030,15 +2469,17 @@ JOW(jlong, PeerConnectionFactory_nativeCreatePeerConnectionFactory)(
|
|||||||
CHECK(worker_thread->Start() && signaling_thread->Start(),
|
CHECK(worker_thread->Start() && signaling_thread->Start(),
|
||||||
"Failed to start threads");
|
"Failed to start threads");
|
||||||
scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
|
scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
|
||||||
|
scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
|
||||||
#ifdef ANDROID
|
#ifdef ANDROID
|
||||||
encoder_factory.reset(new MediaCodecVideoEncoderFactory());
|
encoder_factory.reset(new MediaCodecVideoEncoderFactory());
|
||||||
|
decoder_factory.reset(new MediaCodecVideoDecoderFactory());
|
||||||
#endif
|
#endif
|
||||||
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
|
talk_base::scoped_refptr<PeerConnectionFactoryInterface> factory(
|
||||||
webrtc::CreatePeerConnectionFactory(worker_thread,
|
webrtc::CreatePeerConnectionFactory(worker_thread,
|
||||||
signaling_thread,
|
signaling_thread,
|
||||||
NULL,
|
NULL,
|
||||||
encoder_factory.release(),
|
encoder_factory.release(),
|
||||||
NULL));
|
decoder_factory.release()));
|
||||||
OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
|
OwnedFactoryAndThreads* owned_factory = new OwnedFactoryAndThreads(
|
||||||
worker_thread, signaling_thread, factory.release());
|
worker_thread, signaling_thread, factory.release());
|
||||||
return jlongFromPointer(owned_factory);
|
return jlongFromPointer(owned_factory);
|
||||||
|
291
talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
Normal file
291
talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
Normal file
@ -0,0 +1,291 @@
|
|||||||
|
/*
|
||||||
|
* libjingle
|
||||||
|
* Copyright 2014, Google Inc.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without
|
||||||
|
* modification, are permitted provided that the following conditions are met:
|
||||||
|
*
|
||||||
|
* 1. Redistributions of source code must retain the above copyright notice,
|
||||||
|
* this list of conditions and the following disclaimer.
|
||||||
|
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||||
|
* this list of conditions and the following disclaimer in the documentation
|
||||||
|
* and/or other materials provided with the distribution.
|
||||||
|
* 3. The name of the author may not be used to endorse or promote products
|
||||||
|
* derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||||
|
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||||
|
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||||
|
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||||
|
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||||
|
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||||
|
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||||
|
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.webrtc;
|
||||||
|
|
||||||
|
import android.media.MediaCodec;
|
||||||
|
import android.media.MediaCodecInfo;
|
||||||
|
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||||
|
import android.media.MediaCodecList;
|
||||||
|
import android.media.MediaFormat;
|
||||||
|
import android.os.Build;
|
||||||
|
import android.os.Bundle;
|
||||||
|
import android.util.Log;
|
||||||
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
// Java-side of peerconnection_jni.cc:MediaCodecVideoDecoder.
|
||||||
|
// This class is an implementation detail of the Java PeerConnection API.
|
||||||
|
// MediaCodec is thread-hostile so this class must be operated on a single
|
||||||
|
// thread.
|
||||||
|
class MediaCodecVideoDecoder {
|
||||||
|
// This class is constructed, operated, and destroyed by its C++ incarnation,
|
||||||
|
// so the class and its methods have non-public visibility. The API this
|
||||||
|
// class exposes aims to mimic the webrtc::VideoDecoder API as closely as
|
||||||
|
// possibly to minimize the amount of translation work necessary.
|
||||||
|
|
||||||
|
private static final String TAG = "MediaCodecVideoDecoder";
|
||||||
|
|
||||||
|
private static final int DEQUEUE_TIMEOUT = 1000000; // 1 sec timeout.
|
||||||
|
private Thread mediaCodecThread;
|
||||||
|
private MediaCodec mediaCodec;
|
||||||
|
private ByteBuffer[] inputBuffers;
|
||||||
|
private ByteBuffer[] outputBuffers;
|
||||||
|
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
|
||||||
|
// List of supported HW VP8 decoders.
|
||||||
|
private static final String[] supportedHwCodecPrefixes =
|
||||||
|
{"OMX.Nvidia."};
|
||||||
|
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||||
|
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||||
|
private static final int
|
||||||
|
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
|
||||||
|
// Allowable color formats supported by codec - in order of preference.
|
||||||
|
private static final int[] supportedColorList = {
|
||||||
|
CodecCapabilities.COLOR_FormatYUV420Planar,
|
||||||
|
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
|
||||||
|
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
|
||||||
|
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
|
||||||
|
};
|
||||||
|
private int colorFormat;
|
||||||
|
private int width;
|
||||||
|
private int height;
|
||||||
|
private int stride;
|
||||||
|
private int sliceHeight;
|
||||||
|
|
||||||
|
private MediaCodecVideoDecoder() { }
|
||||||
|
|
||||||
|
// Helper struct for findVp8HwDecoder() below.
|
||||||
|
private static class DecoderProperties {
|
||||||
|
DecoderProperties(String codecName, int colorFormat) {
|
||||||
|
this.codecName = codecName;
|
||||||
|
this.colorFormat = colorFormat;
|
||||||
|
}
|
||||||
|
public final String codecName; // OpenMax component name for VP8 codec.
|
||||||
|
public final int colorFormat; // Color format supported by codec.
|
||||||
|
}
|
||||||
|
|
||||||
|
private static DecoderProperties findVp8HwDecoder() {
|
||||||
|
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT)
|
||||||
|
return null; // MediaCodec.setParameters is missing.
|
||||||
|
|
||||||
|
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
|
||||||
|
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
|
||||||
|
if (info.isEncoder()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
String name = null;
|
||||||
|
for (String mimeType : info.getSupportedTypes()) {
|
||||||
|
if (mimeType.equals(VP8_MIME_TYPE)) {
|
||||||
|
name = info.getName();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (name == null) {
|
||||||
|
continue; // No VP8 support in this codec; try the next one.
|
||||||
|
}
|
||||||
|
Log.d(TAG, "Found candidate decoder " + name);
|
||||||
|
CodecCapabilities capabilities =
|
||||||
|
info.getCapabilitiesForType(VP8_MIME_TYPE);
|
||||||
|
for (int colorFormat : capabilities.colorFormats) {
|
||||||
|
Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is supported HW decoder
|
||||||
|
for (String hwCodecPrefix : supportedHwCodecPrefixes) {
|
||||||
|
if (!name.startsWith(hwCodecPrefix)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// Check if codec supports either yuv420 or nv12
|
||||||
|
for (int supportedColorFormat : supportedColorList) {
|
||||||
|
for (int codecColorFormat : capabilities.colorFormats) {
|
||||||
|
if (codecColorFormat == supportedColorFormat) {
|
||||||
|
// Found supported HW VP8 decoder
|
||||||
|
Log.d(TAG, "Found target decoder " + name +
|
||||||
|
". Color: 0x" + Integer.toHexString(codecColorFormat));
|
||||||
|
return new DecoderProperties(name, codecColorFormat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null; // No HW VP8 decoder.
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isPlatformSupported() {
|
||||||
|
return findVp8HwDecoder() != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void checkOnMediaCodecThread() {
|
||||||
|
if (mediaCodecThread.getId() != Thread.currentThread().getId()) {
|
||||||
|
throw new RuntimeException(
|
||||||
|
"MediaCodecVideoDecoder previously operated on " + mediaCodecThread +
|
||||||
|
" but is now called on " + Thread.currentThread());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean initDecode(int width, int height) {
|
||||||
|
if (mediaCodecThread != null) {
|
||||||
|
throw new RuntimeException("Forgot to release()?");
|
||||||
|
}
|
||||||
|
DecoderProperties properties = findVp8HwDecoder();
|
||||||
|
if (properties == null) {
|
||||||
|
throw new RuntimeException("Cannot find HW VP8 decoder");
|
||||||
|
}
|
||||||
|
Log.d(TAG, "Java initDecode: " + width + " x " + height +
|
||||||
|
". Color: 0x" + Integer.toHexString(properties.colorFormat));
|
||||||
|
mediaCodecThread = Thread.currentThread();
|
||||||
|
try {
|
||||||
|
this.width = width;
|
||||||
|
this.height = height;
|
||||||
|
stride = width;
|
||||||
|
sliceHeight = height;
|
||||||
|
MediaFormat format =
|
||||||
|
MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height);
|
||||||
|
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
|
||||||
|
Log.d(TAG, " Format: " + format);
|
||||||
|
mediaCodec = MediaCodec.createByCodecName(properties.codecName);
|
||||||
|
if (mediaCodec == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
mediaCodec.configure(format, null, null, 0);
|
||||||
|
mediaCodec.start();
|
||||||
|
colorFormat = properties.colorFormat;
|
||||||
|
outputBuffers = mediaCodec.getOutputBuffers();
|
||||||
|
inputBuffers = mediaCodec.getInputBuffers();
|
||||||
|
Log.d(TAG, "Input buffers: " + inputBuffers.length +
|
||||||
|
". Output buffers: " + outputBuffers.length);
|
||||||
|
return true;
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
Log.e(TAG, "initDecode failed", e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void release() {
|
||||||
|
Log.d(TAG, "Java releaseDecoder");
|
||||||
|
checkOnMediaCodecThread();
|
||||||
|
try {
|
||||||
|
mediaCodec.stop();
|
||||||
|
mediaCodec.release();
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
Log.e(TAG, "release failed", e);
|
||||||
|
}
|
||||||
|
mediaCodec = null;
|
||||||
|
mediaCodecThread = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dequeue an input buffer and return its index, -1 if no input buffer is
|
||||||
|
// available, or -2 if the codec is no longer operative.
|
||||||
|
private int dequeueInputBuffer() {
|
||||||
|
checkOnMediaCodecThread();
|
||||||
|
try {
|
||||||
|
return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
Log.e(TAG, "dequeueIntputBuffer failed", e);
|
||||||
|
return -2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean queueInputBuffer(
|
||||||
|
int inputBufferIndex, int size, long timestampUs) {
|
||||||
|
checkOnMediaCodecThread();
|
||||||
|
try {
|
||||||
|
inputBuffers[inputBufferIndex].position(0);
|
||||||
|
inputBuffers[inputBufferIndex].limit(size);
|
||||||
|
mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
catch (IllegalStateException e) {
|
||||||
|
Log.e(TAG, "decode failed", e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Dequeue and return an output buffer index, -1 if no output
|
||||||
|
// buffer available or -2 if error happened.
|
||||||
|
private int dequeueOutputBuffer() {
|
||||||
|
checkOnMediaCodecThread();
|
||||||
|
try {
|
||||||
|
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
|
||||||
|
int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
|
||||||
|
while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
|
||||||
|
result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||||
|
if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
|
||||||
|
outputBuffers = mediaCodec.getOutputBuffers();
|
||||||
|
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
|
||||||
|
MediaFormat format = mediaCodec.getOutputFormat();
|
||||||
|
Log.d(TAG, "Format changed: " + format.toString());
|
||||||
|
width = format.getInteger(MediaFormat.KEY_WIDTH);
|
||||||
|
height = format.getInteger(MediaFormat.KEY_HEIGHT);
|
||||||
|
if (format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
|
||||||
|
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
|
||||||
|
Log.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
|
||||||
|
// Check if new color space is supported.
|
||||||
|
boolean validColorFormat = false;
|
||||||
|
for (int supportedColorFormat : supportedColorList) {
|
||||||
|
if (colorFormat == supportedColorFormat) {
|
||||||
|
validColorFormat = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!validColorFormat) {
|
||||||
|
Log.e(TAG, "Non supported color format");
|
||||||
|
return -2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (format.containsKey("stride")) {
|
||||||
|
stride = format.getInteger("stride");
|
||||||
|
}
|
||||||
|
if (format.containsKey("slice-height")) {
|
||||||
|
sliceHeight = format.getInteger("slice-height");
|
||||||
|
}
|
||||||
|
Log.d(TAG, "Frame stride and slice height: "
|
||||||
|
+ stride + " x " + sliceHeight);
|
||||||
|
stride = Math.max(width, stride);
|
||||||
|
sliceHeight = Math.max(height, sliceHeight);
|
||||||
|
}
|
||||||
|
result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
Log.e(TAG, "dequeueOutputBuffer failed", e);
|
||||||
|
return -2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Release a dequeued output buffer back to the codec for re-use. Return
|
||||||
|
// false if the codec is no longer operable.
|
||||||
|
private boolean releaseOutputBuffer(int index) {
|
||||||
|
checkOnMediaCodecThread();
|
||||||
|
try {
|
||||||
|
mediaCodec.releaseOutputBuffer(index, false);
|
||||||
|
return true;
|
||||||
|
} catch (IllegalStateException e) {
|
||||||
|
Log.e(TAG, "releaseOutputBuffer failed", e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -156,7 +156,7 @@ class MediaCodecVideoEncoder {
|
|||||||
|
|
||||||
// Return the array of input buffers, or null on failure.
|
// Return the array of input buffers, or null on failure.
|
||||||
private ByteBuffer[] initEncode(int width, int height, int kbps, int fps) {
|
private ByteBuffer[] initEncode(int width, int height, int kbps, int fps) {
|
||||||
Log.d(TAG, "initEncode: " + width + " x " + height +
|
Log.d(TAG, "Java initEncode: " + width + " x " + height +
|
||||||
". @ " + kbps + " kbps. Fps: " + fps +
|
". @ " + kbps + " kbps. Fps: " + fps +
|
||||||
". Color: 0x" + Integer.toHexString(colorFormat));
|
". Color: 0x" + Integer.toHexString(colorFormat));
|
||||||
if (mediaCodecThread != null) {
|
if (mediaCodecThread != null) {
|
||||||
@ -222,7 +222,7 @@ class MediaCodecVideoEncoder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void release() {
|
private void release() {
|
||||||
Log.d(TAG, "release");
|
Log.d(TAG, "Java releaseEncoder");
|
||||||
checkOnMediaCodecThread();
|
checkOnMediaCodecThread();
|
||||||
try {
|
try {
|
||||||
mediaCodec.stop();
|
mediaCodec.stop();
|
||||||
|
@ -110,6 +110,7 @@
|
|||||||
'android_java_files': [
|
'android_java_files': [
|
||||||
'app/webrtc/java/android/org/webrtc/VideoRendererGui.java',
|
'app/webrtc/java/android/org/webrtc/VideoRendererGui.java',
|
||||||
'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java',
|
'app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java',
|
||||||
|
'app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java',
|
||||||
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java',
|
'<(webrtc_modules_dir)/audio_device/android/java/src/org/webrtc/voiceengine/AudioManagerAndroid.java',
|
||||||
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java',
|
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java',
|
||||||
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java',
|
'<(webrtc_modules_dir)/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java',
|
||||||
|
Loading…
x
Reference in New Issue
Block a user