Move Android MediaCodec encoder and decoder factories to separate files.
Move Android media encoder and media decoder factories from peerconnection_jni.cc to androidmediaencoder_jni.cc and androidmediadecoder_jni.cc R=perkj@webrtc.org Review URL: https://webrtc-codereview.appspot.com/36139004 Cr-Commit-Position: refs/heads/master@{#8417} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8417 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
88828e77d9
commit
18c92472df
83
talk/app/webrtc/java/jni/androidmediacodeccommon.h
Normal file
83
talk/app/webrtc/java/jni/androidmediacodeccommon.h
Normal file
@ -0,0 +1,83 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2015 Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
|
||||
#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
|
||||
|
||||
#include <android/log.h>
|
||||
#include "webrtc/base/thread.h"
|
||||
#include "webrtc/system_wrappers/interface/tick_util.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
// Uncomment this define to enable verbose logging for every encoded/decoded
|
||||
// video frame.
|
||||
// #define TRACK_BUFFER_TIMING
|
||||
|
||||
#define TAG "MediaCodecVideo"
|
||||
#ifdef TRACK_BUFFER_TIMING
|
||||
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__)
|
||||
#else
|
||||
#define ALOGV(...)
|
||||
#endif
|
||||
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
|
||||
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
|
||||
|
||||
// Color formats supported by encoder - should mirror supportedColorList
|
||||
// from MediaCodecVideoEncoder.java
|
||||
enum COLOR_FORMATTYPE {
|
||||
COLOR_FormatYUV420Planar = 0x13,
|
||||
COLOR_FormatYUV420SemiPlanar = 0x15,
|
||||
COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
|
||||
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||
// This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
|
||||
// but requires some (16, 32?) byte alignment.
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
|
||||
};
|
||||
|
||||
// Arbitrary interval to poll the codec for new outputs.
|
||||
enum { kMediaCodecPollMs = 10 };
|
||||
// Media codec maximum output buffer ready timeout.
|
||||
enum { kMediaCodecTimeoutMs = 500 };
|
||||
// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
|
||||
enum { kMediaCodecStatisticsIntervalMs = 3000 };
|
||||
|
||||
static inline int64_t GetCurrentTimeMs() {
|
||||
return webrtc::TickTime::Now().Ticks() / 1000000LL;
|
||||
}
|
||||
|
||||
static inline void AllowBlockingCalls() {
|
||||
rtc::Thread* current_thread = rtc::Thread::Current();
|
||||
if (current_thread != NULL)
|
||||
current_thread->SetAllowBlockingCalls(true);
|
||||
}
|
||||
|
||||
} // namespace webrtc_jni
|
||||
|
||||
#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
|
717
talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
Normal file
717
talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
Normal file
@ -0,0 +1,717 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2015 Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "talk/app/webrtc/java/jni/androidmediadecoder_jni.h"
|
||||
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
|
||||
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
||||
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
|
||||
#include "webrtc/base/bind.h"
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/base/thread.h"
|
||||
#include "webrtc/common_video/interface/texture_video_frame.h"
|
||||
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
|
||||
#include "webrtc/system_wrappers/interface/tick_util.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
||||
#include "third_party/libyuv/include/libyuv/video_common.h"
|
||||
|
||||
using rtc::Bind;
|
||||
using rtc::Thread;
|
||||
using rtc::ThreadManager;
|
||||
using rtc::scoped_ptr;
|
||||
|
||||
using webrtc::CodecSpecificInfo;
|
||||
using webrtc::DecodedImageCallback;
|
||||
using webrtc::EncodedImage;
|
||||
using webrtc::I420VideoFrame;
|
||||
using webrtc::RTPFragmentationHeader;
|
||||
using webrtc::TextureVideoFrame;
|
||||
using webrtc::TickTime;
|
||||
using webrtc::VideoCodec;
|
||||
using webrtc::kVideoCodecVP8;
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
jobject MediaCodecVideoDecoderFactory::render_egl_context_ = NULL;
|
||||
|
||||
class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
|
||||
public rtc::MessageHandler {
|
||||
public:
|
||||
explicit MediaCodecVideoDecoder(JNIEnv* jni);
|
||||
virtual ~MediaCodecVideoDecoder();
|
||||
|
||||
static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
|
||||
|
||||
int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores)
|
||||
override;
|
||||
|
||||
int32_t Decode(
|
||||
const EncodedImage& inputImage, bool missingFrames,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const CodecSpecificInfo* codecSpecificInfo = NULL,
|
||||
int64_t renderTimeMs = -1) override;
|
||||
|
||||
int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
|
||||
override;
|
||||
|
||||
int32_t Release() override;
|
||||
|
||||
int32_t Reset() override;
|
||||
// rtc::MessageHandler implementation.
|
||||
void OnMessage(rtc::Message* msg) override;
|
||||
|
||||
private:
|
||||
// CHECK-fail if not running on |codec_thread_|.
|
||||
void CheckOnCodecThread();
|
||||
|
||||
int32_t InitDecodeOnCodecThread();
|
||||
int32_t ReleaseOnCodecThread();
|
||||
int32_t DecodeOnCodecThread(const EncodedImage& inputImage);
|
||||
// Deliver any outputs pending in the MediaCodec to our |callback_| and return
|
||||
// true on success.
|
||||
bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
|
||||
|
||||
bool key_frame_required_;
|
||||
bool inited_;
|
||||
bool use_surface_;
|
||||
int error_count_;
|
||||
VideoCodec codec_;
|
||||
I420VideoFrame decoded_image_;
|
||||
NativeHandleImpl native_handle_;
|
||||
DecodedImageCallback* callback_;
|
||||
int frames_received_; // Number of frames received by decoder.
|
||||
int frames_decoded_; // Number of frames decoded by decoder
|
||||
int64_t start_time_ms_; // Start time for statistics.
|
||||
int current_frames_; // Number of frames in the current statistics interval.
|
||||
int current_bytes_; // Encoded bytes in the current statistics interval.
|
||||
int current_decoding_time_ms_; // Overall decoding time in the current second
|
||||
uint32_t max_pending_frames_; // Maximum number of pending input frames
|
||||
std::vector<int32_t> timestamps_;
|
||||
std::vector<int64_t> ntp_times_ms_;
|
||||
std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
|
||||
// decoder input.
|
||||
|
||||
// State that is constant for the lifetime of this object once the ctor
|
||||
// returns.
|
||||
scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
|
||||
ScopedGlobalRef<jclass> j_media_codec_video_decoder_class_;
|
||||
ScopedGlobalRef<jobject> j_media_codec_video_decoder_;
|
||||
jmethodID j_init_decode_method_;
|
||||
jmethodID j_release_method_;
|
||||
jmethodID j_dequeue_input_buffer_method_;
|
||||
jmethodID j_queue_input_buffer_method_;
|
||||
jmethodID j_dequeue_output_buffer_method_;
|
||||
jmethodID j_release_output_buffer_method_;
|
||||
// MediaCodecVideoDecoder fields.
|
||||
jfieldID j_input_buffers_field_;
|
||||
jfieldID j_output_buffers_field_;
|
||||
jfieldID j_color_format_field_;
|
||||
jfieldID j_width_field_;
|
||||
jfieldID j_height_field_;
|
||||
jfieldID j_stride_field_;
|
||||
jfieldID j_slice_height_field_;
|
||||
jfieldID j_surface_texture_field_;
|
||||
jfieldID j_textureID_field_;
|
||||
// MediaCodecVideoDecoder.DecoderOutputBufferInfo fields.
|
||||
jfieldID j_info_index_field_;
|
||||
jfieldID j_info_offset_field_;
|
||||
jfieldID j_info_size_field_;
|
||||
jfieldID j_info_presentation_timestamp_us_field_;
|
||||
|
||||
// Global references; must be deleted in Release().
|
||||
std::vector<jobject> input_buffers_;
|
||||
jobject surface_texture_;
|
||||
jobject previous_surface_texture_;
|
||||
};
|
||||
|
||||
MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
|
||||
: key_frame_required_(true),
|
||||
inited_(false),
|
||||
error_count_(0),
|
||||
surface_texture_(NULL),
|
||||
previous_surface_texture_(NULL),
|
||||
codec_thread_(new Thread()),
|
||||
j_media_codec_video_decoder_class_(
|
||||
jni,
|
||||
FindClass(jni, "org/webrtc/MediaCodecVideoDecoder")),
|
||||
j_media_codec_video_decoder_(
|
||||
jni,
|
||||
jni->NewObject(*j_media_codec_video_decoder_class_,
|
||||
GetMethodID(jni,
|
||||
*j_media_codec_video_decoder_class_,
|
||||
"<init>",
|
||||
"()V"))) {
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
codec_thread_->SetName("MediaCodecVideoDecoder", NULL);
|
||||
CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoDecoder";
|
||||
|
||||
j_init_decode_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_decoder_class_, "initDecode",
|
||||
"(IIZZLandroid/opengl/EGLContext;)Z");
|
||||
j_release_method_ =
|
||||
GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
|
||||
j_dequeue_input_buffer_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_decoder_class_, "dequeueInputBuffer", "()I");
|
||||
j_queue_input_buffer_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_decoder_class_, "queueInputBuffer", "(IIJ)Z");
|
||||
j_dequeue_output_buffer_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_decoder_class_, "dequeueOutputBuffer",
|
||||
"(I)Lorg/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo;");
|
||||
j_release_output_buffer_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_decoder_class_, "releaseOutputBuffer", "(IZ)Z");
|
||||
|
||||
j_input_buffers_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_,
|
||||
"inputBuffers", "[Ljava/nio/ByteBuffer;");
|
||||
j_output_buffers_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_,
|
||||
"outputBuffers", "[Ljava/nio/ByteBuffer;");
|
||||
j_color_format_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_, "colorFormat", "I");
|
||||
j_width_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_, "width", "I");
|
||||
j_height_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_, "height", "I");
|
||||
j_stride_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_, "stride", "I");
|
||||
j_slice_height_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_, "sliceHeight", "I");
|
||||
j_textureID_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_, "textureID", "I");
|
||||
j_surface_texture_field_ = GetFieldID(
|
||||
jni, *j_media_codec_video_decoder_class_, "surfaceTexture",
|
||||
"Landroid/graphics/SurfaceTexture;");
|
||||
|
||||
jclass j_decoder_output_buffer_info_class = FindClass(jni,
|
||||
"org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
|
||||
j_info_index_field_ = GetFieldID(
|
||||
jni, j_decoder_output_buffer_info_class, "index", "I");
|
||||
j_info_offset_field_ = GetFieldID(
|
||||
jni, j_decoder_output_buffer_info_class, "offset", "I");
|
||||
j_info_size_field_ = GetFieldID(
|
||||
jni, j_decoder_output_buffer_info_class, "size", "I");
|
||||
j_info_presentation_timestamp_us_field_ = GetFieldID(
|
||||
jni, j_decoder_output_buffer_info_class, "presentationTimestampUs", "J");
|
||||
|
||||
CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
|
||||
use_surface_ = true;
|
||||
if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL)
|
||||
use_surface_ = false;
|
||||
memset(&codec_, 0, sizeof(codec_));
|
||||
AllowBlockingCalls();
|
||||
}
|
||||
|
||||
MediaCodecVideoDecoder::~MediaCodecVideoDecoder() {
|
||||
// Call Release() to ensure no more callbacks to us after we are deleted.
|
||||
Release();
|
||||
// Delete global references.
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
if (previous_surface_texture_ != NULL)
|
||||
jni->DeleteGlobalRef(previous_surface_texture_);
|
||||
if (surface_texture_ != NULL)
|
||||
jni->DeleteGlobalRef(surface_texture_);
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
|
||||
int32_t numberOfCores) {
|
||||
if (inst == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
int ret_val = Release();
|
||||
if (ret_val < 0) {
|
||||
return ret_val;
|
||||
}
|
||||
// Save VideoCodec instance for later.
|
||||
if (&codec_ != inst) {
|
||||
codec_ = *inst;
|
||||
}
|
||||
codec_.maxFramerate = (codec_.maxFramerate >= 1) ? codec_.maxFramerate : 1;
|
||||
|
||||
// Always start with a complete key frame.
|
||||
key_frame_required_ = true;
|
||||
frames_received_ = 0;
|
||||
frames_decoded_ = 0;
|
||||
|
||||
// Call Java init.
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoDecoder::InitDecodeOnCodecThread, this));
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
ALOGD("InitDecodeOnCodecThread: %d x %d. Fps: %d. Errors: %d",
|
||||
codec_.width, codec_.height, codec_.maxFramerate, error_count_);
|
||||
bool use_sw_codec = false;
|
||||
if (error_count_ > 1) {
|
||||
// If more than one critical errors happen for HW codec, switch to SW codec.
|
||||
use_sw_codec = true;
|
||||
}
|
||||
|
||||
bool success = jni->CallBooleanMethod(
|
||||
*j_media_codec_video_decoder_,
|
||||
j_init_decode_method_,
|
||||
codec_.width,
|
||||
codec_.height,
|
||||
use_sw_codec,
|
||||
use_surface_,
|
||||
MediaCodecVideoDecoderFactory::render_egl_context_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!success) {
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
inited_ = true;
|
||||
|
||||
max_pending_frames_ = 0;
|
||||
if (use_surface_) {
|
||||
max_pending_frames_ = 1;
|
||||
}
|
||||
start_time_ms_ = GetCurrentTimeMs();
|
||||
current_frames_ = 0;
|
||||
current_bytes_ = 0;
|
||||
current_decoding_time_ms_ = 0;
|
||||
timestamps_.clear();
|
||||
ntp_times_ms_.clear();
|
||||
frame_rtc_times_ms_.clear();
|
||||
|
||||
jobjectArray input_buffers = (jobjectArray)GetObjectField(
|
||||
jni, *j_media_codec_video_decoder_, j_input_buffers_field_);
|
||||
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
|
||||
input_buffers_.resize(num_input_buffers);
|
||||
for (size_t i = 0; i < num_input_buffers; ++i) {
|
||||
input_buffers_[i] =
|
||||
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
|
||||
CHECK_EXCEPTION(jni);
|
||||
}
|
||||
|
||||
if (use_surface_) {
|
||||
jobject surface_texture = GetObjectField(
|
||||
jni, *j_media_codec_video_decoder_, j_surface_texture_field_);
|
||||
if (previous_surface_texture_ != NULL) {
|
||||
jni->DeleteGlobalRef(previous_surface_texture_);
|
||||
}
|
||||
previous_surface_texture_ = surface_texture_;
|
||||
surface_texture_ = jni->NewGlobalRef(surface_texture);
|
||||
}
|
||||
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::Release() {
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::ReleaseOnCodecThread() {
|
||||
if (!inited_) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
for (size_t i = 0; i < input_buffers_.size(); i++) {
|
||||
jni->DeleteGlobalRef(input_buffers_[i]);
|
||||
}
|
||||
input_buffers_.clear();
|
||||
jni->CallVoidMethod(*j_media_codec_video_decoder_, j_release_method_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
rtc::MessageQueueManager::Clear(this);
|
||||
inited_ = false;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
void MediaCodecVideoDecoder::CheckOnCodecThread() {
|
||||
CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
|
||||
<< "Running on wrong thread!";
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::Decode(
|
||||
const EncodedImage& inputImage,
|
||||
bool missingFrames,
|
||||
const RTPFragmentationHeader* fragmentation,
|
||||
const CodecSpecificInfo* codecSpecificInfo,
|
||||
int64_t renderTimeMs) {
|
||||
if (!inited_) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
if (callback_ == NULL) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
if (inputImage._buffer == NULL && inputImage._length > 0) {
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
// Check if encoded frame dimension has changed.
|
||||
if ((inputImage._encodedWidth * inputImage._encodedHeight > 0) &&
|
||||
(inputImage._encodedWidth != codec_.width ||
|
||||
inputImage._encodedHeight != codec_.height)) {
|
||||
codec_.width = inputImage._encodedWidth;
|
||||
codec_.height = inputImage._encodedHeight;
|
||||
InitDecode(&codec_, 1);
|
||||
}
|
||||
|
||||
// Always start with a complete key frame.
|
||||
if (key_frame_required_) {
|
||||
if (inputImage._frameType != webrtc::kKeyFrame) {
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
if (!inputImage._completeFrame) {
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
key_frame_required_ = false;
|
||||
}
|
||||
if (inputImage._length == 0) {
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
return codec_thread_->Invoke<int32_t>(Bind(
|
||||
&MediaCodecVideoDecoder::DecodeOnCodecThread, this, inputImage));
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
|
||||
const EncodedImage& inputImage) {
|
||||
static uint8_t yVal_ = 0x7f;
|
||||
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
|
||||
// Try to drain the decoder and wait until output is not too
|
||||
// much behind the input.
|
||||
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
|
||||
ALOGV("Wait for output...");
|
||||
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
|
||||
error_count_++;
|
||||
Reset();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
|
||||
ALOGE("Output buffer dequeue timeout");
|
||||
error_count_++;
|
||||
Reset();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
}
|
||||
|
||||
// Get input buffer.
|
||||
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_decoder_,
|
||||
j_dequeue_input_buffer_method_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (j_input_buffer_index < 0) {
|
||||
ALOGE("dequeueInputBuffer error");
|
||||
error_count_++;
|
||||
Reset();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
// Copy encoded data to Java ByteBuffer.
|
||||
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
|
||||
uint8* buffer =
|
||||
reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
|
||||
CHECK(buffer) << "Indirect buffer??";
|
||||
int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (buffer_capacity < inputImage._length) {
|
||||
ALOGE("Input frame size %d is bigger than buffer size %d.",
|
||||
inputImage._length, buffer_capacity);
|
||||
error_count_++;
|
||||
Reset();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
|
||||
frames_received_, j_input_buffer_index, inputImage._length);
|
||||
memcpy(buffer, inputImage._buffer, inputImage._length);
|
||||
|
||||
// Save input image timestamps for later output.
|
||||
frames_received_++;
|
||||
current_bytes_ += inputImage._length;
|
||||
timestamps_.push_back(inputImage._timeStamp);
|
||||
ntp_times_ms_.push_back(inputImage.ntp_time_ms_);
|
||||
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
|
||||
|
||||
// Feed input to decoder.
|
||||
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
|
||||
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
|
||||
j_queue_input_buffer_method_,
|
||||
j_input_buffer_index,
|
||||
inputImage._length,
|
||||
timestamp_us);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!success) {
|
||||
ALOGE("queueInputBuffer error");
|
||||
error_count_++;
|
||||
Reset();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
// Try to drain the decoder
|
||||
if (!DeliverPendingOutputs(jni, 0)) {
|
||||
ALOGE("DeliverPendingOutputs error");
|
||||
error_count_++;
|
||||
Reset();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
bool MediaCodecVideoDecoder::DeliverPendingOutputs(
|
||||
JNIEnv* jni, int dequeue_timeout_us) {
|
||||
if (frames_received_ <= frames_decoded_) {
|
||||
// No need to query for output buffers - decoder is drained.
|
||||
return true;
|
||||
}
|
||||
// Get decoder output.
|
||||
jobject j_decoder_output_buffer_info = jni->CallObjectMethod(
|
||||
*j_media_codec_video_decoder_,
|
||||
j_dequeue_output_buffer_method_,
|
||||
dequeue_timeout_us);
|
||||
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (IsNull(jni, j_decoder_output_buffer_info)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Extract output buffer info from Java DecoderOutputBufferInfo.
|
||||
int output_buffer_index =
|
||||
GetIntField(jni, j_decoder_output_buffer_info, j_info_index_field_);
|
||||
if (output_buffer_index < 0) {
|
||||
ALOGE("dequeueOutputBuffer error : %d", output_buffer_index);
|
||||
return false;
|
||||
}
|
||||
int output_buffer_offset =
|
||||
GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
|
||||
int output_buffer_size =
|
||||
GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
|
||||
// Get decoded video frame properties.
|
||||
int color_format = GetIntField(jni, *j_media_codec_video_decoder_,
|
||||
j_color_format_field_);
|
||||
int width = GetIntField(jni, *j_media_codec_video_decoder_, j_width_field_);
|
||||
int height = GetIntField(jni, *j_media_codec_video_decoder_, j_height_field_);
|
||||
int stride = GetIntField(jni, *j_media_codec_video_decoder_, j_stride_field_);
|
||||
int slice_height = GetIntField(jni, *j_media_codec_video_decoder_,
|
||||
j_slice_height_field_);
|
||||
int texture_id = GetIntField(jni, *j_media_codec_video_decoder_,
|
||||
j_textureID_field_);
|
||||
|
||||
// Extract data from Java ByteBuffer and create output yuv420 frame -
|
||||
// for non surface decoding only.
|
||||
if (!use_surface_) {
|
||||
if (output_buffer_size < width * height * 3 / 2) {
|
||||
ALOGE("Insufficient output buffer size: %d", output_buffer_size);
|
||||
return false;
|
||||
}
|
||||
jobjectArray output_buffers = reinterpret_cast<jobjectArray>(GetObjectField(
|
||||
jni, *j_media_codec_video_decoder_, j_output_buffers_field_));
|
||||
jobject output_buffer =
|
||||
jni->GetObjectArrayElement(output_buffers, output_buffer_index);
|
||||
uint8_t* payload = reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(
|
||||
output_buffer));
|
||||
CHECK_EXCEPTION(jni);
|
||||
payload += output_buffer_offset;
|
||||
|
||||
// Create yuv420 frame.
|
||||
if (color_format == COLOR_FormatYUV420Planar) {
|
||||
decoded_image_.CreateFrame(
|
||||
stride * slice_height, payload,
|
||||
(stride * slice_height) / 4, payload + (stride * slice_height),
|
||||
(stride * slice_height) / 4,
|
||||
payload + (5 * stride * slice_height / 4),
|
||||
width, height,
|
||||
stride, stride / 2, stride / 2);
|
||||
} else {
|
||||
// All other supported formats are nv12.
|
||||
decoded_image_.CreateEmptyFrame(width, height, width,
|
||||
width / 2, width / 2);
|
||||
libyuv::NV12ToI420(
|
||||
payload, stride,
|
||||
payload + stride * slice_height, stride,
|
||||
decoded_image_.buffer(webrtc::kYPlane),
|
||||
decoded_image_.stride(webrtc::kYPlane),
|
||||
decoded_image_.buffer(webrtc::kUPlane),
|
||||
decoded_image_.stride(webrtc::kUPlane),
|
||||
decoded_image_.buffer(webrtc::kVPlane),
|
||||
decoded_image_.stride(webrtc::kVPlane),
|
||||
width, height);
|
||||
}
|
||||
}
|
||||
|
||||
// Get frame timestamps from a queue.
|
||||
int32_t timestamp = timestamps_.front();
|
||||
timestamps_.erase(timestamps_.begin());
|
||||
int64_t ntp_time_ms = ntp_times_ms_.front();
|
||||
ntp_times_ms_.erase(ntp_times_ms_.begin());
|
||||
int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
|
||||
frame_rtc_times_ms_.front();
|
||||
frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
|
||||
|
||||
ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
|
||||
" DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
|
||||
color_format, output_buffer_size, frame_decoding_time_ms);
|
||||
|
||||
// Return output buffer back to codec.
|
||||
bool success = jni->CallBooleanMethod(
|
||||
*j_media_codec_video_decoder_,
|
||||
j_release_output_buffer_method_,
|
||||
output_buffer_index,
|
||||
use_surface_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!success) {
|
||||
ALOGE("releaseOutputBuffer error");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Calculate and print decoding statistics - every 3 seconds.
|
||||
frames_decoded_++;
|
||||
current_frames_++;
|
||||
current_decoding_time_ms_ += frame_decoding_time_ms;
|
||||
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
|
||||
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
|
||||
current_frames_ > 0) {
|
||||
ALOGD("Decoder bitrate: %d kbps, fps: %d, decTime: %d for last %d ms",
|
||||
current_bytes_ * 8 / statistic_time_ms,
|
||||
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
|
||||
current_decoding_time_ms_ / current_frames_, statistic_time_ms);
|
||||
start_time_ms_ = GetCurrentTimeMs();
|
||||
current_frames_ = 0;
|
||||
current_bytes_ = 0;
|
||||
current_decoding_time_ms_ = 0;
|
||||
}
|
||||
|
||||
// Callback - output decoded frame.
|
||||
int32_t callback_status = WEBRTC_VIDEO_CODEC_OK;
|
||||
if (use_surface_) {
|
||||
native_handle_.SetTextureObject(surface_texture_, texture_id);
|
||||
TextureVideoFrame texture_image(
|
||||
&native_handle_, width, height, timestamp, 0);
|
||||
texture_image.set_ntp_time_ms(ntp_time_ms);
|
||||
callback_status = callback_->Decoded(texture_image);
|
||||
} else {
|
||||
decoded_image_.set_timestamp(timestamp);
|
||||
decoded_image_.set_ntp_time_ms(ntp_time_ms);
|
||||
callback_status = callback_->Decoded(decoded_image_);
|
||||
}
|
||||
if (callback_status > 0) {
|
||||
ALOGE("callback error");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::RegisterDecodeCompleteCallback(
|
||||
DecodedImageCallback* callback) {
|
||||
callback_ = callback;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoDecoder::Reset() {
|
||||
ALOGD("DecoderReset");
|
||||
if (!inited_) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
return InitDecode(&codec_, 1);
|
||||
}
|
||||
|
||||
void MediaCodecVideoDecoder::OnMessage(rtc::Message* msg) {
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
if (!inited_) {
|
||||
return;
|
||||
}
|
||||
// We only ever send one message to |this| directly (not through a Bind()'d
|
||||
// functor), so expect no ID/data.
|
||||
CHECK(!msg->message_id) << "Unexpected message!";
|
||||
CHECK(!msg->pdata) << "Unexpected message!";
|
||||
CheckOnCodecThread();
|
||||
|
||||
if (!DeliverPendingOutputs(jni, 0)) {
|
||||
error_count_++;
|
||||
Reset();
|
||||
}
|
||||
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
|
||||
}
|
||||
|
||||
int MediaCodecVideoDecoderFactory::SetAndroidObjects(JNIEnv* jni,
|
||||
jobject render_egl_context) {
|
||||
ALOGD("SetAndroidObjects for surface decoding.");
|
||||
if (render_egl_context_) {
|
||||
jni->DeleteGlobalRef(render_egl_context_);
|
||||
}
|
||||
if (IsNull(jni, render_egl_context)) {
|
||||
render_egl_context_ = NULL;
|
||||
} else {
|
||||
render_egl_context_ = jni->NewGlobalRef(render_egl_context);
|
||||
CHECK_EXCEPTION(jni) << "error calling NewGlobalRef for EGL Context.";
|
||||
jclass j_egl_context_class = FindClass(jni, "android/opengl/EGLContext");
|
||||
if (!jni->IsInstanceOf(render_egl_context_, j_egl_context_class)) {
|
||||
ALOGE("Wrong EGL Context.");
|
||||
jni->DeleteGlobalRef(render_egl_context_);
|
||||
render_egl_context_ = NULL;
|
||||
}
|
||||
}
|
||||
if (render_egl_context_ == NULL) {
|
||||
ALOGD("NULL VideoDecoder EGL context - HW surface decoding is disabled.");
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
MediaCodecVideoDecoderFactory::MediaCodecVideoDecoderFactory() {
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
|
||||
is_platform_supported_ = jni->CallStaticBooleanMethod(
|
||||
j_decoder_class,
|
||||
GetStaticMethodID(jni, j_decoder_class, "isPlatformSupported", "()Z"));
|
||||
CHECK_EXCEPTION(jni);
|
||||
}
|
||||
|
||||
MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
|
||||
|
||||
webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
|
||||
webrtc::VideoCodecType type) {
|
||||
if (type != kVideoCodecVP8 || !is_platform_supported_) {
|
||||
return NULL;
|
||||
}
|
||||
return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded());
|
||||
}
|
||||
|
||||
void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
|
||||
webrtc::VideoDecoder* decoder) {
|
||||
delete decoder;
|
||||
}
|
||||
|
||||
} // namespace webrtc_jni
|
||||
|
59
talk/app/webrtc/java/jni/androidmediadecoder_jni.h
Normal file
59
talk/app/webrtc/java/jni/androidmediadecoder_jni.h
Normal file
@ -0,0 +1,59 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2015 Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
|
||||
#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
|
||||
|
||||
#include "talk/app/webrtc/java/jni/jni_helpers.h"
|
||||
#include "talk/media/webrtc/webrtcvideodecoderfactory.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
// Implementation of Android MediaCodec based decoder factory.
|
||||
class MediaCodecVideoDecoderFactory
|
||||
: public cricket::WebRtcVideoDecoderFactory {
|
||||
public:
|
||||
MediaCodecVideoDecoderFactory();
|
||||
virtual ~MediaCodecVideoDecoderFactory();
|
||||
static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
|
||||
|
||||
// WebRtcVideoDecoderFactory implementation.
|
||||
webrtc::VideoDecoder* CreateVideoDecoder(webrtc::VideoCodecType type)
|
||||
override;
|
||||
|
||||
void DestroyVideoDecoder(webrtc::VideoDecoder* decoder) override;
|
||||
// Render EGL context.
|
||||
static jobject render_egl_context_;
|
||||
|
||||
private:
|
||||
bool is_platform_supported_;
|
||||
};
|
||||
|
||||
} // namespace webrtc_jni
|
||||
|
||||
#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIADECODER_JNI_H_
|
758
talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
Normal file
758
talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
Normal file
@ -0,0 +1,758 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2015 Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
|
||||
#include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
|
||||
#include "talk/app/webrtc/java/jni/classreferenceholder.h"
|
||||
#include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
|
||||
#include "webrtc/base/bind.h"
|
||||
#include "webrtc/base/checks.h"
|
||||
#include "webrtc/base/logging.h"
|
||||
#include "webrtc/base/thread.h"
|
||||
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "webrtc/system_wrappers/interface/logcat_trace_context.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
||||
#include "third_party/libyuv/include/libyuv/video_common.h"
|
||||
|
||||
using rtc::Bind;
|
||||
using rtc::Thread;
|
||||
using rtc::ThreadManager;
|
||||
using rtc::scoped_ptr;
|
||||
|
||||
using webrtc::CodecSpecificInfo;
|
||||
using webrtc::EncodedImage;
|
||||
using webrtc::I420VideoFrame;
|
||||
using webrtc::RTPFragmentationHeader;
|
||||
using webrtc::VideoCodec;
|
||||
using webrtc::kVideoCodecVP8;
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
// MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
|
||||
// Android's MediaCodec SDK API behind the scenes to implement (hopefully)
|
||||
// HW-backed video encode. This C++ class is implemented as a very thin shim,
|
||||
// delegating all of the interesting work to org.webrtc.MediaCodecVideoEncoder.
|
||||
// MediaCodecVideoEncoder is created, operated, and destroyed on a single
|
||||
// thread, currently the libjingle Worker thread.
|
||||
class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
||||
public rtc::MessageHandler {
|
||||
public:
|
||||
virtual ~MediaCodecVideoEncoder();
|
||||
explicit MediaCodecVideoEncoder(JNIEnv* jni);
|
||||
|
||||
// webrtc::VideoEncoder implementation. Everything trampolines to
|
||||
// |codec_thread_| for execution.
|
||||
int32_t InitEncode(const webrtc::VideoCodec* codec_settings,
|
||||
int32_t /* number_of_cores */,
|
||||
size_t /* max_payload_size */) override;
|
||||
int32_t Encode(
|
||||
const webrtc::I420VideoFrame& input_image,
|
||||
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
|
||||
const std::vector<webrtc::VideoFrameType>* frame_types) override;
|
||||
int32_t RegisterEncodeCompleteCallback(
|
||||
webrtc::EncodedImageCallback* callback) override;
|
||||
int32_t Release() override;
|
||||
int32_t SetChannelParameters(uint32_t /* packet_loss */,
|
||||
int64_t /* rtt */) override;
|
||||
int32_t SetRates(uint32_t new_bit_rate, uint32_t frame_rate) override;
|
||||
|
||||
// rtc::MessageHandler implementation.
|
||||
void OnMessage(rtc::Message* msg) override;
|
||||
|
||||
private:
|
||||
// CHECK-fail if not running on |codec_thread_|.
|
||||
void CheckOnCodecThread();
|
||||
|
||||
// Release() and InitEncode() in an attempt to restore the codec to an
|
||||
// operable state. Necessary after all manner of OMX-layer errors.
|
||||
void ResetCodec();
|
||||
|
||||
// Implementation of webrtc::VideoEncoder methods above, all running on the
|
||||
// codec thread exclusively.
|
||||
//
|
||||
// If width==0 then this is assumed to be a re-initialization and the
|
||||
// previously-current values are reused instead of the passed parameters
|
||||
// (makes it easier to reason about thread-safety).
|
||||
int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
|
||||
int32_t EncodeOnCodecThread(
|
||||
const webrtc::I420VideoFrame& input_image,
|
||||
const std::vector<webrtc::VideoFrameType>* frame_types);
|
||||
int32_t RegisterEncodeCompleteCallbackOnCodecThread(
|
||||
webrtc::EncodedImageCallback* callback);
|
||||
int32_t ReleaseOnCodecThread();
|
||||
int32_t SetRatesOnCodecThread(uint32_t new_bit_rate, uint32_t frame_rate);
|
||||
|
||||
// Helper accessors for MediaCodecVideoEncoder$OutputBufferInfo members.
|
||||
int GetOutputBufferInfoIndex(JNIEnv* jni, jobject j_output_buffer_info);
|
||||
jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
|
||||
bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
|
||||
jlong GetOutputBufferInfoPresentationTimestampUs(
|
||||
JNIEnv* jni,
|
||||
jobject j_output_buffer_info);
|
||||
|
||||
// Deliver any outputs pending in the MediaCodec to our |callback_| and return
|
||||
// true on success.
|
||||
bool DeliverPendingOutputs(JNIEnv* jni);
|
||||
|
||||
// Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
|
||||
// |codec_thread_| synchronously.
|
||||
webrtc::EncodedImageCallback* callback_;
|
||||
|
||||
// State that is constant for the lifetime of this object once the ctor
|
||||
// returns.
|
||||
scoped_ptr<Thread> codec_thread_; // Thread on which to operate MediaCodec.
|
||||
ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
|
||||
ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
|
||||
jmethodID j_init_encode_method_;
|
||||
jmethodID j_dequeue_input_buffer_method_;
|
||||
jmethodID j_encode_method_;
|
||||
jmethodID j_release_method_;
|
||||
jmethodID j_set_rates_method_;
|
||||
jmethodID j_dequeue_output_buffer_method_;
|
||||
jmethodID j_release_output_buffer_method_;
|
||||
jfieldID j_color_format_field_;
|
||||
jfieldID j_info_index_field_;
|
||||
jfieldID j_info_buffer_field_;
|
||||
jfieldID j_info_is_key_frame_field_;
|
||||
jfieldID j_info_presentation_timestamp_us_field_;
|
||||
|
||||
// State that is valid only between InitEncode() and the next Release().
|
||||
// Touched only on codec_thread_ so no explicit synchronization necessary.
|
||||
int width_; // Frame width in pixels.
|
||||
int height_; // Frame height in pixels.
|
||||
bool inited_;
|
||||
uint16_t picture_id_;
|
||||
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
|
||||
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
|
||||
int last_set_fps_; // Last-requested frame rate.
|
||||
int64_t current_timestamp_us_; // Current frame timestamps in us.
|
||||
int frames_received_; // Number of frames received by encoder.
|
||||
int frames_dropped_; // Number of frames dropped by encoder.
|
||||
int frames_resolution_update_; // Number of frames with new codec resolution.
|
||||
int frames_in_queue_; // Number of frames in encoder queue.
|
||||
int64_t start_time_ms_; // Start time for statistics.
|
||||
int current_frames_; // Number of frames in the current statistics interval.
|
||||
int current_bytes_; // Encoded bytes in the current statistics interval.
|
||||
int current_encoding_time_ms_; // Overall encoding time in the current second
|
||||
int64_t last_input_timestamp_ms_; // Timestamp of last received yuv frame.
|
||||
int64_t last_output_timestamp_ms_; // Timestamp of last encoded frame.
|
||||
std::vector<int32_t> timestamps_; // Video frames timestamp queue.
|
||||
std::vector<int64_t> render_times_ms_; // Video frames render time queue.
|
||||
std::vector<int64_t> frame_rtc_times_ms_; // Time when video frame is sent to
|
||||
// encoder input.
|
||||
// Frame size in bytes fed to MediaCodec.
|
||||
int yuv_size_;
|
||||
// True only when between a callback_->Encoded() call return a positive value
|
||||
// and the next Encode() call being ignored.
|
||||
bool drop_next_input_frame_;
|
||||
// Global references; must be deleted in Release().
|
||||
std::vector<jobject> input_buffers_;
|
||||
};
|
||||
|
||||
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
|
||||
// Call Release() to ensure no more callbacks to us after we are deleted.
|
||||
Release();
|
||||
}
|
||||
|
||||
MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
|
||||
: callback_(NULL),
|
||||
inited_(false),
|
||||
picture_id_(0),
|
||||
codec_thread_(new Thread()),
|
||||
j_media_codec_video_encoder_class_(
|
||||
jni,
|
||||
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder")),
|
||||
j_media_codec_video_encoder_(
|
||||
jni,
|
||||
jni->NewObject(*j_media_codec_video_encoder_class_,
|
||||
GetMethodID(jni,
|
||||
*j_media_codec_video_encoder_class_,
|
||||
"<init>",
|
||||
"()V"))) {
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
// It would be nice to avoid spinning up a new thread per MediaCodec, and
|
||||
// instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
|
||||
// 2732 means that deadlocks abound. This class synchronously trampolines
|
||||
// to |codec_thread_|, so if anything else can be coming to _us_ from
|
||||
// |codec_thread_|, or from any thread holding the |_sendCritSect| described
|
||||
// in the bug, we have a problem. For now work around that with a dedicated
|
||||
// thread.
|
||||
codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
|
||||
CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
|
||||
|
||||
jclass j_output_buffer_info_class =
|
||||
FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
|
||||
j_init_encode_method_ = GetMethodID(jni,
|
||||
*j_media_codec_video_encoder_class_,
|
||||
"initEncode",
|
||||
"(IIII)[Ljava/nio/ByteBuffer;");
|
||||
j_dequeue_input_buffer_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
|
||||
j_encode_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
|
||||
j_release_method_ =
|
||||
GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
|
||||
j_set_rates_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
|
||||
j_dequeue_output_buffer_method_ =
|
||||
GetMethodID(jni,
|
||||
*j_media_codec_video_encoder_class_,
|
||||
"dequeueOutputBuffer",
|
||||
"()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
|
||||
j_release_output_buffer_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
|
||||
|
||||
j_color_format_field_ =
|
||||
GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
|
||||
j_info_index_field_ =
|
||||
GetFieldID(jni, j_output_buffer_info_class, "index", "I");
|
||||
j_info_buffer_field_ = GetFieldID(
|
||||
jni, j_output_buffer_info_class, "buffer", "Ljava/nio/ByteBuffer;");
|
||||
j_info_is_key_frame_field_ =
|
||||
GetFieldID(jni, j_output_buffer_info_class, "isKeyFrame", "Z");
|
||||
j_info_presentation_timestamp_us_field_ = GetFieldID(
|
||||
jni, j_output_buffer_info_class, "presentationTimestampUs", "J");
|
||||
CHECK_EXCEPTION(jni) << "MediaCodecVideoEncoder ctor failed";
|
||||
AllowBlockingCalls();
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::InitEncode(
|
||||
const webrtc::VideoCodec* codec_settings,
|
||||
int32_t /* number_of_cores */,
|
||||
size_t /* max_payload_size */) {
|
||||
// Factory should guard against other codecs being used with us.
|
||||
CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
|
||||
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
|
||||
this,
|
||||
codec_settings->width,
|
||||
codec_settings->height,
|
||||
codec_settings->startBitrate,
|
||||
codec_settings->maxFramerate));
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::Encode(
|
||||
const webrtc::I420VideoFrame& frame,
|
||||
const webrtc::CodecSpecificInfo* /* codec_specific_info */,
|
||||
const std::vector<webrtc::VideoFrameType>* frame_types) {
|
||||
return codec_thread_->Invoke<int32_t>(Bind(
|
||||
&MediaCodecVideoEncoder::EncodeOnCodecThread, this, frame, frame_types));
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallback(
|
||||
webrtc::EncodedImageCallback* callback) {
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread,
|
||||
this,
|
||||
callback));
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::Release() {
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::SetChannelParameters(uint32_t /* packet_loss */,
|
||||
int64_t /* rtt */) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::SetRates(uint32_t new_bit_rate,
|
||||
uint32_t frame_rate) {
|
||||
return codec_thread_->Invoke<int32_t>(
|
||||
Bind(&MediaCodecVideoEncoder::SetRatesOnCodecThread,
|
||||
this,
|
||||
new_bit_rate,
|
||||
frame_rate));
|
||||
}
|
||||
|
||||
void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
|
||||
// We only ever send one message to |this| directly (not through a Bind()'d
|
||||
// functor), so expect no ID/data.
|
||||
CHECK(!msg->message_id) << "Unexpected message!";
|
||||
CHECK(!msg->pdata) << "Unexpected message!";
|
||||
CheckOnCodecThread();
|
||||
if (!inited_) {
|
||||
return;
|
||||
}
|
||||
|
||||
// It would be nice to recover from a failure here if one happened, but it's
|
||||
// unclear how to signal such a failure to the app, so instead we stay silent
|
||||
// about it and let the next app-called API method reveal the borkedness.
|
||||
DeliverPendingOutputs(jni);
|
||||
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
|
||||
}
|
||||
|
||||
void MediaCodecVideoEncoder::CheckOnCodecThread() {
|
||||
CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
|
||||
<< "Running on wrong thread!";
|
||||
}
|
||||
|
||||
void MediaCodecVideoEncoder::ResetCodec() {
|
||||
ALOGE("ResetCodec");
|
||||
if (Release() != WEBRTC_VIDEO_CODEC_OK ||
|
||||
codec_thread_->Invoke<int32_t>(Bind(
|
||||
&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
|
||||
width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
|
||||
// TODO(fischman): wouldn't it be nice if there was a way to gracefully
|
||||
// degrade to a SW encoder at this point? There isn't one AFAICT :(
|
||||
// https://code.google.com/p/webrtc/issues/detail?id=2920
|
||||
}
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
||||
int width, int height, int kbps, int fps) {
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
|
||||
ALOGD("InitEncodeOnCodecThread %d x %d. Bitrate: %d kbps. Fps: %d",
|
||||
width, height, kbps, fps);
|
||||
if (kbps == 0) {
|
||||
kbps = last_set_bitrate_kbps_;
|
||||
}
|
||||
if (fps == 0) {
|
||||
fps = last_set_fps_;
|
||||
}
|
||||
|
||||
width_ = width;
|
||||
height_ = height;
|
||||
last_set_bitrate_kbps_ = kbps;
|
||||
last_set_fps_ = fps;
|
||||
yuv_size_ = width_ * height_ * 3 / 2;
|
||||
frames_received_ = 0;
|
||||
frames_dropped_ = 0;
|
||||
frames_resolution_update_ = 0;
|
||||
frames_in_queue_ = 0;
|
||||
current_timestamp_us_ = 0;
|
||||
start_time_ms_ = GetCurrentTimeMs();
|
||||
current_frames_ = 0;
|
||||
current_bytes_ = 0;
|
||||
current_encoding_time_ms_ = 0;
|
||||
last_input_timestamp_ms_ = -1;
|
||||
last_output_timestamp_ms_ = -1;
|
||||
timestamps_.clear();
|
||||
render_times_ms_.clear();
|
||||
frame_rtc_times_ms_.clear();
|
||||
drop_next_input_frame_ = false;
|
||||
picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
|
||||
// We enforce no extra stride/padding in the format creation step.
|
||||
jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
|
||||
jni->CallObjectMethod(*j_media_codec_video_encoder_,
|
||||
j_init_encode_method_,
|
||||
width_,
|
||||
height_,
|
||||
kbps,
|
||||
fps));
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (IsNull(jni, input_buffers))
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
|
||||
inited_ = true;
|
||||
switch (GetIntField(jni, *j_media_codec_video_encoder_,
|
||||
j_color_format_field_)) {
|
||||
case COLOR_FormatYUV420Planar:
|
||||
encoder_fourcc_ = libyuv::FOURCC_YU12;
|
||||
break;
|
||||
case COLOR_FormatYUV420SemiPlanar:
|
||||
case COLOR_QCOM_FormatYUV420SemiPlanar:
|
||||
case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
|
||||
encoder_fourcc_ = libyuv::FOURCC_NV12;
|
||||
break;
|
||||
default:
|
||||
LOG(LS_ERROR) << "Wrong color format.";
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
|
||||
CHECK(input_buffers_.empty())
|
||||
<< "Unexpected double InitEncode without Release";
|
||||
input_buffers_.resize(num_input_buffers);
|
||||
for (size_t i = 0; i < num_input_buffers; ++i) {
|
||||
input_buffers_[i] =
|
||||
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
|
||||
int64 yuv_buffer_capacity =
|
||||
jni->GetDirectBufferCapacity(input_buffers_[i]);
|
||||
CHECK_EXCEPTION(jni);
|
||||
CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
|
||||
}
|
||||
CHECK_EXCEPTION(jni);
|
||||
|
||||
codec_thread_->PostDelayed(kMediaCodecPollMs, this);
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
||||
const webrtc::I420VideoFrame& frame,
|
||||
const std::vector<webrtc::VideoFrameType>* frame_types) {
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
|
||||
if (!inited_) {
|
||||
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
|
||||
}
|
||||
frames_received_++;
|
||||
if (!DeliverPendingOutputs(jni)) {
|
||||
ResetCodec();
|
||||
// Continue as if everything's fine.
|
||||
}
|
||||
|
||||
if (drop_next_input_frame_) {
|
||||
ALOGV("Encoder drop frame - failed callback.");
|
||||
drop_next_input_frame_ = false;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
CHECK(frame_types->size() == 1) << "Unexpected stream count";
|
||||
if (frame.width() != width_ || frame.height() != height_) {
|
||||
frames_resolution_update_++;
|
||||
ALOGD("Unexpected frame resolution change from %d x %d to %d x %d",
|
||||
width_, height_, frame.width(), frame.height());
|
||||
if (frames_resolution_update_ > 3) {
|
||||
// Reset codec if we received more than 3 frames with new resolution.
|
||||
width_ = frame.width();
|
||||
height_ = frame.height();
|
||||
frames_resolution_update_ = 0;
|
||||
ResetCodec();
|
||||
}
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
frames_resolution_update_ = 0;
|
||||
|
||||
bool key_frame = frame_types->front() != webrtc::kDeltaFrame;
|
||||
|
||||
// Check if we accumulated too many frames in encoder input buffers
|
||||
// or the encoder latency exceeds 70 ms and drop frame if so.
|
||||
if (frames_in_queue_ > 0 && last_input_timestamp_ms_ >= 0) {
|
||||
int encoder_latency_ms = last_input_timestamp_ms_ -
|
||||
last_output_timestamp_ms_;
|
||||
if (frames_in_queue_ > 2 || encoder_latency_ms > 70) {
|
||||
ALOGD("Drop frame - encoder is behind by %d ms. Q size: %d",
|
||||
encoder_latency_ms, frames_in_queue_);
|
||||
frames_dropped_++;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
}
|
||||
|
||||
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
|
||||
j_dequeue_input_buffer_method_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (j_input_buffer_index == -1) {
|
||||
// Video codec falls behind - no input buffer available.
|
||||
ALOGV("Encoder drop frame - no input buffers available");
|
||||
frames_dropped_++;
|
||||
return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
|
||||
}
|
||||
if (j_input_buffer_index == -2) {
|
||||
ResetCodec();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
|
||||
frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
|
||||
|
||||
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
|
||||
uint8* yuv_buffer =
|
||||
reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
|
||||
CHECK_EXCEPTION(jni);
|
||||
CHECK(yuv_buffer) << "Indirect buffer??";
|
||||
CHECK(!libyuv::ConvertFromI420(
|
||||
frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
|
||||
frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
|
||||
frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
|
||||
yuv_buffer, width_,
|
||||
width_, height_,
|
||||
encoder_fourcc_))
|
||||
<< "ConvertFromI420 failed";
|
||||
last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
|
||||
frames_in_queue_++;
|
||||
|
||||
// Save input image timestamps for later output
|
||||
timestamps_.push_back(frame.timestamp());
|
||||
render_times_ms_.push_back(frame.render_time_ms());
|
||||
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
|
||||
|
||||
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
|
||||
j_encode_method_,
|
||||
key_frame,
|
||||
j_input_buffer_index,
|
||||
yuv_size_,
|
||||
current_timestamp_us_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
current_timestamp_us_ += 1000000 / last_set_fps_;
|
||||
|
||||
if (!encode_status || !DeliverPendingOutputs(jni)) {
|
||||
ResetCodec();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
|
||||
webrtc::EncodedImageCallback* callback) {
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
callback_ = callback;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
|
||||
if (!inited_) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ALOGD("EncoderRelease: Frames received: %d. Frames dropped: %d.",
|
||||
frames_received_, frames_dropped_);
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
for (size_t i = 0; i < input_buffers_.size(); ++i)
|
||||
jni->DeleteGlobalRef(input_buffers_[i]);
|
||||
input_buffers_.clear();
|
||||
jni->CallVoidMethod(*j_media_codec_video_encoder_, j_release_method_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
rtc::MessageQueueManager::Clear(this);
|
||||
inited_ = false;
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
|
||||
uint32_t frame_rate) {
|
||||
CheckOnCodecThread();
|
||||
if (last_set_bitrate_kbps_ == new_bit_rate &&
|
||||
last_set_fps_ == frame_rate) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
if (new_bit_rate > 0) {
|
||||
last_set_bitrate_kbps_ = new_bit_rate;
|
||||
}
|
||||
if (frame_rate > 0) {
|
||||
last_set_fps_ = frame_rate;
|
||||
}
|
||||
bool ret = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
|
||||
j_set_rates_method_,
|
||||
last_set_bitrate_kbps_,
|
||||
last_set_fps_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!ret) {
|
||||
ResetCodec();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
||||
int MediaCodecVideoEncoder::GetOutputBufferInfoIndex(
|
||||
JNIEnv* jni,
|
||||
jobject j_output_buffer_info) {
|
||||
return GetIntField(jni, j_output_buffer_info, j_info_index_field_);
|
||||
}
|
||||
|
||||
jobject MediaCodecVideoEncoder::GetOutputBufferInfoBuffer(
|
||||
JNIEnv* jni,
|
||||
jobject j_output_buffer_info) {
|
||||
return GetObjectField(jni, j_output_buffer_info, j_info_buffer_field_);
|
||||
}
|
||||
|
||||
bool MediaCodecVideoEncoder::GetOutputBufferInfoIsKeyFrame(
|
||||
JNIEnv* jni,
|
||||
jobject j_output_buffer_info) {
|
||||
return GetBooleanField(jni, j_output_buffer_info, j_info_is_key_frame_field_);
|
||||
}
|
||||
|
||||
jlong MediaCodecVideoEncoder::GetOutputBufferInfoPresentationTimestampUs(
|
||||
JNIEnv* jni,
|
||||
jobject j_output_buffer_info) {
|
||||
return GetLongField(
|
||||
jni, j_output_buffer_info, j_info_presentation_timestamp_us_field_);
|
||||
}
|
||||
|
||||
bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
|
||||
while (true) {
|
||||
jobject j_output_buffer_info = jni->CallObjectMethod(
|
||||
*j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (IsNull(jni, j_output_buffer_info)) {
|
||||
break;
|
||||
}
|
||||
|
||||
int output_buffer_index =
|
||||
GetOutputBufferInfoIndex(jni, j_output_buffer_info);
|
||||
if (output_buffer_index == -1) {
|
||||
ResetCodec();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get frame timestamps from a queue.
|
||||
last_output_timestamp_ms_ =
|
||||
GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
|
||||
1000;
|
||||
int32_t timestamp = timestamps_.front();
|
||||
timestamps_.erase(timestamps_.begin());
|
||||
int64_t render_time_ms = render_times_ms_.front();
|
||||
render_times_ms_.erase(render_times_ms_.begin());
|
||||
int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
|
||||
frame_rtc_times_ms_.front();
|
||||
frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
|
||||
frames_in_queue_--;
|
||||
|
||||
// Extract payload and key frame flag.
|
||||
int32_t callback_status = 0;
|
||||
jobject j_output_buffer =
|
||||
GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
|
||||
bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
|
||||
size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
|
||||
uint8* payload = reinterpret_cast<uint8_t*>(
|
||||
jni->GetDirectBufferAddress(j_output_buffer));
|
||||
CHECK_EXCEPTION(jni);
|
||||
|
||||
ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
|
||||
" EncTime: %lld",
|
||||
output_buffer_index, payload_size, last_output_timestamp_ms_,
|
||||
last_input_timestamp_ms_ - last_output_timestamp_ms_,
|
||||
frame_encoding_time_ms);
|
||||
|
||||
// Calculate and print encoding statistics - every 3 seconds.
|
||||
current_frames_++;
|
||||
current_bytes_ += payload_size;
|
||||
current_encoding_time_ms_ += frame_encoding_time_ms;
|
||||
int statistic_time_ms = GetCurrentTimeMs() - start_time_ms_;
|
||||
if (statistic_time_ms >= kMediaCodecStatisticsIntervalMs &&
|
||||
current_frames_ > 0) {
|
||||
ALOGD("Encoder bitrate: %d, target: %d kbps, fps: %d,"
|
||||
" encTime: %d for last %d ms",
|
||||
current_bytes_ * 8 / statistic_time_ms,
|
||||
last_set_bitrate_kbps_,
|
||||
(current_frames_ * 1000 + statistic_time_ms / 2) / statistic_time_ms,
|
||||
current_encoding_time_ms_ / current_frames_, statistic_time_ms);
|
||||
start_time_ms_ = GetCurrentTimeMs();
|
||||
current_frames_ = 0;
|
||||
current_bytes_ = 0;
|
||||
current_encoding_time_ms_ = 0;
|
||||
}
|
||||
|
||||
// Callback - return encoded frame.
|
||||
if (callback_) {
|
||||
scoped_ptr<webrtc::EncodedImage> image(
|
||||
new webrtc::EncodedImage(payload, payload_size, payload_size));
|
||||
image->_encodedWidth = width_;
|
||||
image->_encodedHeight = height_;
|
||||
image->_timeStamp = timestamp;
|
||||
image->capture_time_ms_ = render_time_ms;
|
||||
image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
|
||||
image->_completeFrame = true;
|
||||
|
||||
webrtc::CodecSpecificInfo info;
|
||||
memset(&info, 0, sizeof(info));
|
||||
info.codecType = kVideoCodecVP8;
|
||||
info.codecSpecific.VP8.pictureId = picture_id_;
|
||||
info.codecSpecific.VP8.nonReference = false;
|
||||
info.codecSpecific.VP8.simulcastIdx = 0;
|
||||
info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
|
||||
info.codecSpecific.VP8.layerSync = false;
|
||||
info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
|
||||
info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
|
||||
picture_id_ = (picture_id_ + 1) & 0x7FFF;
|
||||
|
||||
// Generate a header describing a single fragment.
|
||||
webrtc::RTPFragmentationHeader header;
|
||||
memset(&header, 0, sizeof(header));
|
||||
header.VerifyAndAllocateFragmentationHeader(1);
|
||||
header.fragmentationOffset[0] = 0;
|
||||
header.fragmentationLength[0] = image->_length;
|
||||
header.fragmentationPlType[0] = 0;
|
||||
header.fragmentationTimeDiff[0] = 0;
|
||||
|
||||
callback_status = callback_->Encoded(*image, &info, &header);
|
||||
}
|
||||
|
||||
// Return output buffer back to the encoder.
|
||||
bool success = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
|
||||
j_release_output_buffer_method_,
|
||||
output_buffer_index);
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!success) {
|
||||
ResetCodec();
|
||||
return false;
|
||||
}
|
||||
|
||||
if (callback_status > 0) {
|
||||
drop_next_input_frame_ = true;
|
||||
// Theoretically could handle callback_status<0 here, but unclear what that
|
||||
// would mean for us.
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
|
||||
bool is_platform_supported = jni->CallStaticBooleanMethod(
|
||||
j_encoder_class,
|
||||
GetStaticMethodID(jni, j_encoder_class, "isPlatformSupported", "()Z"));
|
||||
CHECK_EXCEPTION(jni);
|
||||
if (!is_platform_supported)
|
||||
return;
|
||||
|
||||
// Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
|
||||
// encoder? Sure would be. Too bad it doesn't. So we hard-code some
|
||||
// reasonable defaults.
|
||||
supported_codecs_.push_back(
|
||||
VideoCodec(kVideoCodecVP8, "VP8", 1280, 1280, 30));
|
||||
}
|
||||
|
||||
MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
|
||||
|
||||
webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
|
||||
webrtc::VideoCodecType type) {
|
||||
if (type != kVideoCodecVP8 || supported_codecs_.empty())
|
||||
return NULL;
|
||||
return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded());
|
||||
}
|
||||
|
||||
const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
|
||||
MediaCodecVideoEncoderFactory::codecs() const {
|
||||
return supported_codecs_;
|
||||
}
|
||||
|
||||
void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
|
||||
webrtc::VideoEncoder* encoder) {
|
||||
delete encoder;
|
||||
}
|
||||
|
||||
} // namespace webrtc_jni
|
||||
|
59
talk/app/webrtc/java/jni/androidmediaencoder_jni.h
Normal file
59
talk/app/webrtc/java/jni/androidmediaencoder_jni.h
Normal file
@ -0,0 +1,59 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2015 Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
|
||||
#define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "talk/app/webrtc/java/jni/jni_helpers.h"
|
||||
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
// Implementation of Android MediaCodec based encoder factory.
|
||||
class MediaCodecVideoEncoderFactory
|
||||
: public cricket::WebRtcVideoEncoderFactory {
|
||||
public:
|
||||
MediaCodecVideoEncoderFactory();
|
||||
virtual ~MediaCodecVideoEncoderFactory();
|
||||
|
||||
// WebRtcVideoEncoderFactory implementation.
|
||||
webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
|
||||
override;
|
||||
const std::vector<VideoCodec>& codecs() const override;
|
||||
void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
|
||||
|
||||
private:
|
||||
// Empty if platform support is lacking, const after ctor returns.
|
||||
std::vector<VideoCodec> supported_codecs_;
|
||||
};
|
||||
|
||||
} // namespace webrtc_jni
|
||||
|
||||
#endif // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIAENCODER_JNI_H_
|
70
talk/app/webrtc/java/jni/native_handle_impl.h
Normal file
70
talk/app/webrtc/java/jni/native_handle_impl.h
Normal file
@ -0,0 +1,70 @@
|
||||
/*
|
||||
* libjingle
|
||||
* Copyright 2015 Google Inc.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are met:
|
||||
*
|
||||
* 1. Redistributions of source code must retain the above copyright notice,
|
||||
* this list of conditions and the following disclaimer.
|
||||
* 2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
* this list of conditions and the following disclaimer in the documentation
|
||||
* and/or other materials provided with the distribution.
|
||||
* 3. The name of the author may not be used to endorse or promote products
|
||||
* derived from this software without specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
||||
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
|
||||
* EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
|
||||
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
|
||||
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
|
||||
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
|
||||
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
|
||||
#define TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
|
||||
|
||||
#include "webrtc/common_video/interface/texture_video_frame.h"
|
||||
|
||||
namespace webrtc_jni {
|
||||
|
||||
// Wrapper for texture object in TextureVideoFrame.
|
||||
class NativeHandleImpl : public webrtc::NativeHandle {
|
||||
public:
|
||||
NativeHandleImpl() :
|
||||
ref_count_(0), texture_object_(NULL), texture_id_(-1) {}
|
||||
virtual ~NativeHandleImpl() {}
|
||||
virtual int32_t AddRef() {
|
||||
return ++ref_count_;
|
||||
}
|
||||
virtual int32_t Release() {
|
||||
return --ref_count_;
|
||||
}
|
||||
virtual void* GetHandle() {
|
||||
return texture_object_;
|
||||
}
|
||||
int GetTextureId() {
|
||||
return texture_id_;
|
||||
}
|
||||
void SetTextureObject(void *texture_object, int texture_id) {
|
||||
texture_object_ = reinterpret_cast<jobject>(texture_object);
|
||||
texture_id_ = texture_id;
|
||||
}
|
||||
int32_t ref_count() {
|
||||
return ref_count_;
|
||||
}
|
||||
|
||||
private:
|
||||
int32_t ref_count_;
|
||||
jobject texture_object_;
|
||||
int32_t texture_id_;
|
||||
};
|
||||
|
||||
} // namespace webrtc_jni
|
||||
|
||||
#endif // TALK_APP_WEBRTC_JAVA_JNI_NATIVE_HANDLE_IMPL_H_
|
File diff suppressed because it is too large
Load Diff
@ -54,6 +54,7 @@
|
||||
'app/webrtc/java/jni/classreferenceholder.h',
|
||||
'app/webrtc/java/jni/jni_helpers.cc',
|
||||
'app/webrtc/java/jni/jni_helpers.h',
|
||||
'app/webrtc/java/jni/native_handle_impl.h',
|
||||
'app/webrtc/java/jni/peerconnection_jni.cc',
|
||||
],
|
||||
'include_dirs': [
|
||||
@ -81,6 +82,15 @@
|
||||
'app/webrtc/java/jni/androidvideocapturer_jni.h',
|
||||
]
|
||||
}],
|
||||
['OS=="android" and build_with_chromium==0', {
|
||||
'sources': [
|
||||
'app/webrtc/java/jni/androidmediacodeccommon.h',
|
||||
'app/webrtc/java/jni/androidmediadecoder_jni.cc',
|
||||
'app/webrtc/java/jni/androidmediadecoder_jni.h',
|
||||
'app/webrtc/java/jni/androidmediaencoder_jni.cc',
|
||||
'app/webrtc/java/jni/androidmediaencoder_jni.h',
|
||||
]
|
||||
}],
|
||||
],
|
||||
},
|
||||
{
|
||||
|
Loading…
x
Reference in New Issue
Block a user