Fix H.264 HW decoding for Qualcomm KK devices.

- Qualcomm H.264 HW decoder on KK and older requires
a few video frames before it can generate output. Increase
maximum allowed pending frames for H.264 decoder to 30.
Plus changes in the logging to track decoder buffers
timestamps.

R=wzh@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/36319004

Cr-Commit-Position: refs/heads/master@{#8490}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8490 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
glaznev@webrtc.org 2015-02-25 00:02:50 +00:00
parent 49096de442
commit a4623d26d7
3 changed files with 27 additions and 14 deletions

View File

@ -65,9 +65,13 @@ enum COLOR_FORMATTYPE {
// Arbitrary interval to poll the codec for new outputs.
enum { kMediaCodecPollMs = 10 };
// Media codec maximum output buffer ready timeout.
enum { kMediaCodecTimeoutMs = 500 };
enum { kMediaCodecTimeoutMs = 1000 };
// Interval to print codec statistics (bitrate, fps, encoding/decoding time).
enum { kMediaCodecStatisticsIntervalMs = 3000 };
// Maximum amount of pending frames for VP8 decoder.
enum { kMaxPendingFramesVp8 = 1 };
// Maximum amount of pending frames for H.264 decoder.
enum { kMaxPendingFramesH264 = 30 };
static inline int64_t GetCurrentTimeMs() {
return webrtc::TickTime::Now().Ticks() / 1000000LL;

View File

@ -315,9 +315,15 @@ int32_t MediaCodecVideoDecoder::InitDecodeOnCodecThread() {
}
inited_ = true;
switch (codecType_) {
case kVideoCodecVP8:
max_pending_frames_ = kMaxPendingFramesVp8;
break;
case kVideoCodecH264:
max_pending_frames_ = kMaxPendingFramesH264;
break;
default:
max_pending_frames_ = 0;
if (use_surface_) {
max_pending_frames_ = 1;
}
start_time_ms_ = GetCurrentTimeMs();
current_frames_ = 0;
@ -436,7 +442,8 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
// Try to drain the decoder and wait until output is not too
// much behind the input.
if (frames_received_ > frames_decoded_ + max_pending_frames_) {
ALOGV("Wait for output...");
ALOGV("Received: %d. Decoded: %d. Wait for output...",
frames_received_, frames_decoded_);
if (!DeliverPendingOutputs(jni, kMediaCodecTimeoutMs * 1000)) {
error_count_++;
Reset();
@ -475,9 +482,10 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
Reset();
return WEBRTC_VIDEO_CODEC_ERROR;
}
ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. Size: %d",
frames_received_, inputImage._frameType,
j_input_buffer_index, inputImage._length);
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. TS: %lld. Size: %d",
frames_received_, inputImage._frameType, j_input_buffer_index,
timestamp_us / 1000, inputImage._length);
memcpy(buffer, inputImage._buffer, inputImage._length);
// Save input image timestamps for later output.
@ -488,7 +496,6 @@ int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
// Feed input to decoder.
jlong timestamp_us = (frames_received_ * 1000000) / codec_.maxFramerate;
bool success = jni->CallBooleanMethod(*j_media_codec_video_decoder_,
j_queue_input_buffer_method_,
j_input_buffer_index,
@ -541,6 +548,9 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
GetIntField(jni, j_decoder_output_buffer_info, j_info_offset_field_);
int output_buffer_size =
GetIntField(jni, j_decoder_output_buffer_info, j_info_size_field_);
long output_timestamps_ms = GetLongField(jni, j_decoder_output_buffer_info,
j_info_presentation_timestamp_us_field_) / 1000;
CHECK_EXCEPTION(jni);
// Get decoded video frame properties.
@ -610,9 +620,9 @@ bool MediaCodecVideoDecoder::DeliverPendingOutputs(
frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
}
ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. TS: %ld."
" DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
color_format, output_buffer_size, frame_decoding_time_ms);
color_format, output_timestamps_ms, frame_decoding_time_ms);
// Return output buffer back to codec.
bool success = jni->CallBooleanMethod(

View File

@ -41,7 +41,6 @@ import android.opengl.EGLSurface;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
@ -465,10 +464,10 @@ class MediaCodecVideoDecoder {
result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mediaCodec.getOutputBuffers();
Log.d(TAG, "Output buffers changed: " + outputBuffers.length);
Log.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat format = mediaCodec.getOutputFormat();
Log.d(TAG, "Format changed: " + format.toString());
Log.d(TAG, "Decoder format changed: " + format.toString());
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {