Add support for NVidia VP8 HW encoder.
- Some changes in HW VP8 encoder search logic to detect HW codec with supported color space format. - Support yuv420 and nv12 formants for encoder input. - Add some extra logging and encoder frame drop statistics. BUG=3176 R=fischman@webrtc.org, tkchin@webrtc.org Review URL: https://webrtc-codereview.appspot.com/12719004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@6389 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
fd59c39caa
commit
a40210aee2
@ -76,7 +76,8 @@
|
||||
#include "talk/media/webrtc/webrtcvideocapturer.h"
|
||||
#include "talk/media/webrtc/webrtcvideoencoderfactory.h"
|
||||
#include "third_party/icu/source/common/unicode/unistr.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert_from.h"
|
||||
#include "third_party/libyuv/include/libyuv/video_common.h"
|
||||
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
|
||||
#include "webrtc/system_wrappers/interface/compile_assert.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
@ -1133,6 +1134,19 @@ class JavaVideoRendererWrapper : public VideoRendererInterface {
|
||||
// into its own .h/.cc pair, if/when the JNI helper stuff above is extracted
|
||||
// from this file.
|
||||
|
||||
// Color formats supported by encoder - should mirror supportedColorList
|
||||
// from MediaCodecVideoEncoder.java
|
||||
enum COLOR_FORMATTYPE {
|
||||
COLOR_FormatYUV420Planar = 0x13,
|
||||
COLOR_FormatYUV420SemiPlanar = 0x15,
|
||||
COLOR_QCOM_FormatYUV420SemiPlanar = 0x7FA30C00,
|
||||
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||
// This format is presumably similar to COLOR_FormatYUV420SemiPlanar,
|
||||
// but requires some (16, 32?) byte alignment.
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04
|
||||
};
|
||||
|
||||
// Arbitrary interval to poll the codec for new outputs.
|
||||
enum { kMediaCodecPollMs = 10 };
|
||||
|
||||
@ -1221,6 +1235,7 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
||||
jmethodID j_set_rates_method_;
|
||||
jmethodID j_dequeue_output_buffer_method_;
|
||||
jmethodID j_release_output_buffer_method_;
|
||||
jfieldID j_color_format_field_;
|
||||
jfieldID j_info_index_field_;
|
||||
jfieldID j_info_buffer_field_;
|
||||
jfieldID j_info_is_key_frame_field_;
|
||||
@ -1230,9 +1245,12 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
||||
// Touched only on codec_thread_ so no explicit synchronization necessary.
|
||||
int width_; // Frame width in pixels.
|
||||
int height_; // Frame height in pixels.
|
||||
enum libyuv::FourCC encoder_fourcc_; // Encoder color space format.
|
||||
int last_set_bitrate_kbps_; // Last-requested bitrate in kbps.
|
||||
// Frame size in bytes fed to MediaCodec (stride==width, sliceHeight==height).
|
||||
int nv12_size_;
|
||||
int frames_received_; // Number of frames received by encoder.
|
||||
int frames_dropped_; // Number of frames dropped by encoder.
|
||||
// Frame size in bytes fed to MediaCodec.
|
||||
int yuv_size_;
|
||||
// True only when between a callback_->Encoded() call return a positive value
|
||||
// and the next Encode() call being ignored.
|
||||
bool drop_next_input_frame_;
|
||||
@ -1240,8 +1258,6 @@ class MediaCodecVideoEncoder : public webrtc::VideoEncoder,
|
||||
std::vector<jobject> input_buffers_;
|
||||
};
|
||||
|
||||
enum { MSG_SET_RATES, MSG_POLL_FOR_READY_OUTPUTS, };
|
||||
|
||||
MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
|
||||
// We depend on ResetParameters() to ensure no more callbacks to us after we
|
||||
// are deleted, so assert it here.
|
||||
@ -1296,6 +1312,8 @@ MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
|
||||
j_release_output_buffer_method_ = GetMethodID(
|
||||
jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
|
||||
|
||||
j_color_format_field_ =
|
||||
GetFieldID(jni, *j_media_codec_video_encoder_class_, "colorFormat", "I");
|
||||
j_info_index_field_ =
|
||||
GetFieldID(jni, j_output_buffer_info_class, "index", "I");
|
||||
j_info_buffer_field_ = GetFieldID(
|
||||
@ -1380,6 +1398,7 @@ void MediaCodecVideoEncoder::CheckOnCodecThread() {
|
||||
}
|
||||
|
||||
void MediaCodecVideoEncoder::ResetCodec() {
|
||||
LOG(LS_ERROR) << "ResetCodec";
|
||||
if (Release() != WEBRTC_VIDEO_CODEC_OK ||
|
||||
codec_thread_->Invoke<int32_t>(Bind(
|
||||
&MediaCodecVideoEncoder::InitEncodeOnCodecThread, this, 0, 0, 0)) !=
|
||||
@ -1395,6 +1414,7 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
LOG(LS_INFO) << "InitEncodeOnCodecThread " << width << " x " << height;
|
||||
|
||||
if (width == 0) {
|
||||
width = width_;
|
||||
@ -1405,7 +1425,9 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
||||
width_ = width;
|
||||
height_ = height;
|
||||
last_set_bitrate_kbps_ = kbps;
|
||||
nv12_size_ = width_ * height_ * 3 / 2;
|
||||
yuv_size_ = width_ * height_ * 3 / 2;
|
||||
frames_received_ = 0;
|
||||
frames_dropped_ = 0;
|
||||
// We enforce no extra stride/padding in the format creation step.
|
||||
jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
|
||||
jni->CallObjectMethod(*j_media_codec_video_encoder_,
|
||||
@ -1417,16 +1439,30 @@ int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
|
||||
if (IsNull(jni, input_buffers))
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
|
||||
switch (GetIntField(jni, *j_media_codec_video_encoder_,
|
||||
j_color_format_field_)) {
|
||||
case COLOR_FormatYUV420Planar:
|
||||
encoder_fourcc_ = libyuv::FOURCC_YU12;
|
||||
break;
|
||||
case COLOR_FormatYUV420SemiPlanar:
|
||||
case COLOR_QCOM_FormatYUV420SemiPlanar:
|
||||
case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
|
||||
encoder_fourcc_ = libyuv::FOURCC_NV12;
|
||||
break;
|
||||
default:
|
||||
LOG(LS_ERROR) << "Wrong color format.";
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
size_t num_input_buffers = jni->GetArrayLength(input_buffers);
|
||||
CHECK(input_buffers_.empty(), "Unexpected double InitEncode without Release");
|
||||
input_buffers_.resize(num_input_buffers);
|
||||
for (size_t i = 0; i < num_input_buffers; ++i) {
|
||||
input_buffers_[i] =
|
||||
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
|
||||
int64 nv12_buffer_capacity =
|
||||
int64 yuv_buffer_capacity =
|
||||
jni->GetDirectBufferCapacity(input_buffers_[i]);
|
||||
CHECK_EXCEPTION(jni, "");
|
||||
CHECK(nv12_buffer_capacity >= nv12_size_, "Insufficient capacity");
|
||||
CHECK(yuv_buffer_capacity >= yuv_size_, "Insufficient capacity");
|
||||
}
|
||||
CHECK_EXCEPTION(jni, "");
|
||||
|
||||
@ -1441,6 +1477,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
|
||||
frames_received_++;
|
||||
if (!DeliverPendingOutputs(jni)) {
|
||||
ResetCodec();
|
||||
// Continue as if everything's fine.
|
||||
@ -1460,39 +1497,36 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
|
||||
int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
|
||||
j_dequeue_input_buffer_method_);
|
||||
CHECK_EXCEPTION(jni, "");
|
||||
if (j_input_buffer_index == -1)
|
||||
if (j_input_buffer_index == -1) {
|
||||
// Video codec falls behind - no input buffer available.
|
||||
frames_dropped_++;
|
||||
return WEBRTC_VIDEO_CODEC_OK; // TODO(fischman): see webrtc bug 2887.
|
||||
}
|
||||
if (j_input_buffer_index == -2) {
|
||||
ResetCodec();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
|
||||
uint8* nv12_buffer =
|
||||
uint8* yuv_buffer =
|
||||
reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
|
||||
CHECK_EXCEPTION(jni, "");
|
||||
CHECK(nv12_buffer, "Indirect buffer??");
|
||||
CHECK(!libyuv::I420ToNV12(
|
||||
frame.buffer(webrtc::kYPlane),
|
||||
frame.stride(webrtc::kYPlane),
|
||||
frame.buffer(webrtc::kUPlane),
|
||||
frame.stride(webrtc::kUPlane),
|
||||
frame.buffer(webrtc::kVPlane),
|
||||
frame.stride(webrtc::kVPlane),
|
||||
nv12_buffer,
|
||||
frame.width(),
|
||||
nv12_buffer + frame.stride(webrtc::kYPlane) * frame.height(),
|
||||
frame.width(),
|
||||
frame.width(),
|
||||
frame.height()),
|
||||
"I420ToNV12 failed");
|
||||
CHECK(yuv_buffer, "Indirect buffer??");
|
||||
CHECK(!libyuv::ConvertFromI420(
|
||||
frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
|
||||
frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
|
||||
frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
|
||||
yuv_buffer, width_,
|
||||
width_, height_,
|
||||
encoder_fourcc_),
|
||||
"ConvertFromI420 failed");
|
||||
jlong timestamp_us = frame.render_time_ms() * 1000;
|
||||
int64_t start = talk_base::Time();
|
||||
bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
|
||||
j_encode_method_,
|
||||
key_frame,
|
||||
j_input_buffer_index,
|
||||
nv12_size_,
|
||||
yuv_size_,
|
||||
timestamp_us);
|
||||
CHECK_EXCEPTION(jni, "");
|
||||
if (!encode_status || !DeliverPendingOutputs(jni)) {
|
||||
@ -1515,6 +1549,8 @@ int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
|
||||
int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
|
||||
CheckOnCodecThread();
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
LOG(LS_INFO) << "Frames received: " << frames_received_ <<
|
||||
". Frames dropped: " << frames_dropped_;
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
for (size_t i = 0; i < input_buffers_.size(); ++i)
|
||||
jni->DeleteGlobalRef(input_buffers_[i]);
|
||||
@ -1528,6 +1564,9 @@ int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
|
||||
int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
|
||||
uint32_t frame_rate) {
|
||||
CheckOnCodecThread();
|
||||
if (last_set_bitrate_kbps_ == new_bit_rate) {
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
JNIEnv* jni = AttachCurrentThreadIfNeeded();
|
||||
ScopedLocalRefFrame local_ref_frame(jni);
|
||||
last_set_bitrate_kbps_ = new_bit_rate;
|
||||
@ -1547,7 +1586,7 @@ void MediaCodecVideoEncoder::ResetParameters(JNIEnv* jni) {
|
||||
talk_base::MessageQueueManager::Clear(this);
|
||||
width_ = 0;
|
||||
height_ = 0;
|
||||
nv12_size_ = 0;
|
||||
yuv_size_ = 0;
|
||||
drop_next_input_frame_ = false;
|
||||
CHECK(input_buffers_.empty(),
|
||||
"ResetParameters called while holding input_buffers_!");
|
||||
|
@ -32,6 +32,7 @@ import android.media.MediaCodec;
|
||||
import android.media.MediaCodecInfo;
|
||||
import android.media.MediaCodecList;
|
||||
import android.media.MediaFormat;
|
||||
import android.media.MediaCodecInfo.CodecCapabilities;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.util.Log;
|
||||
@ -51,28 +52,49 @@ class MediaCodecVideoEncoder {
|
||||
private static final String TAG = "MediaCodecVideoEncoder";
|
||||
|
||||
private static final int DEQUEUE_TIMEOUT = 0; // Non-blocking, no wait.
|
||||
private Thread mediaCodecThread;
|
||||
private MediaCodec mediaCodec;
|
||||
private ByteBuffer[] outputBuffers;
|
||||
private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
|
||||
private Thread mediaCodecThread;
|
||||
// List of supported HW VP8 codecs.
|
||||
private static final String[] supportedHwCodecPrefixes =
|
||||
{"OMX.qcom.", "OMX.Nvidia." };
|
||||
// Bitrate mode
|
||||
private static final int VIDEO_ControlRateConstant = 2;
|
||||
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
|
||||
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
|
||||
private static final int
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
|
||||
// Allowable color formats supported by codec - in order of preference.
|
||||
private static final int[] supportedColorList = {
|
||||
CodecCapabilities.COLOR_FormatYUV420Planar,
|
||||
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
|
||||
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
|
||||
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
|
||||
};
|
||||
private int colorFormat;
|
||||
|
||||
private MediaCodecVideoEncoder() {}
|
||||
|
||||
private static boolean isPlatformSupported() {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT)
|
||||
return false; // MediaCodec.setParameters is missing.
|
||||
|
||||
if (!Build.MODEL.equals("Nexus 5")) {
|
||||
// TODO(fischman): Currently the N5 is the only >=KK device containing a
|
||||
// HW VP8 encoder, so don't bother with any others. When this list grows,
|
||||
// update the KEY_COLOR_FORMAT logic below.
|
||||
return false;
|
||||
// Helper struct for findVp8HwEncoder() below.
|
||||
private static class EncoderProperties {
|
||||
EncoderProperties(String codecName, int colorFormat) {
|
||||
this.codecName = codecName;
|
||||
this.colorFormat = colorFormat;
|
||||
}
|
||||
public final String codecName; // OpenMax component name for VP8 codec.
|
||||
public final int colorFormat; // Color format supported by codec.
|
||||
}
|
||||
|
||||
private static EncoderProperties findVp8HwEncoder() {
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT)
|
||||
return null; // MediaCodec.setParameters is missing.
|
||||
|
||||
for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
|
||||
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
|
||||
if (!info.isEncoder())
|
||||
if (!info.isEncoder()) {
|
||||
continue;
|
||||
}
|
||||
String name = null;
|
||||
for (String mimeType : info.getSupportedTypes()) {
|
||||
if (mimeType.equals(VP8_MIME_TYPE)) {
|
||||
@ -80,18 +102,39 @@ class MediaCodecVideoEncoder {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (name == null)
|
||||
if (name == null) {
|
||||
continue; // No VP8 support in this codec; try the next one.
|
||||
if (name.startsWith("OMX.google.") || name.startsWith("OMX.SEC.")) {
|
||||
// SW encoder is highest priority VP8 codec; unlikely we can get HW.
|
||||
// "OMX.google." is known-software, while "OMX.SEC." is sometimes SW &
|
||||
// sometimes HW, although not VP8 HW in any known device, so treat as SW
|
||||
// here (b/9735008 #20).
|
||||
return false;
|
||||
}
|
||||
return true; // Yay, passed the gauntlet of pre-requisites!
|
||||
Log.d(TAG, "Found candidate encoder " + name);
|
||||
CodecCapabilities capabilities =
|
||||
info.getCapabilitiesForType(VP8_MIME_TYPE);
|
||||
for (int colorFormat : capabilities.colorFormats) {
|
||||
Log.d(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
|
||||
}
|
||||
|
||||
// Check if this is supported HW encoder
|
||||
for (String hwCodecPrefix : supportedHwCodecPrefixes) {
|
||||
if (!name.startsWith(hwCodecPrefix)) {
|
||||
continue;
|
||||
}
|
||||
// Check if codec supports either yuv420 or nv12
|
||||
for (int supportedColorFormat : supportedColorList) {
|
||||
for (int codecColorFormat : capabilities.colorFormats) {
|
||||
if (codecColorFormat == supportedColorFormat) {
|
||||
// Found supported HW VP8 encoder
|
||||
Log.d(TAG, "Found target encoder " + name +
|
||||
". Color: 0x" + Integer.toHexString(codecColorFormat));
|
||||
return new EncoderProperties(name, codecColorFormat);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false; // No VP8 encoder.
|
||||
return null; // No HW VP8 encoder.
|
||||
}
|
||||
|
||||
private static boolean isPlatformSupported() {
|
||||
return findVp8HwEncoder() != null;
|
||||
}
|
||||
|
||||
private static int bitRate(int kbps) {
|
||||
@ -113,32 +156,39 @@ class MediaCodecVideoEncoder {
|
||||
|
||||
// Return the array of input buffers, or null on failure.
|
||||
private ByteBuffer[] initEncode(int width, int height, int kbps) {
|
||||
Log.d(TAG, "initEncode: " + width + " x " + height +
|
||||
". @ " + kbps + " kbps. Color: 0x" + Integer.toHexString(colorFormat));
|
||||
if (mediaCodecThread != null) {
|
||||
throw new RuntimeException("Forgot to release()?");
|
||||
}
|
||||
EncoderProperties properties = findVp8HwEncoder();
|
||||
if (properties == null) {
|
||||
throw new RuntimeException("Can not find HW VP8 encoder");
|
||||
}
|
||||
mediaCodecThread = Thread.currentThread();
|
||||
try {
|
||||
MediaFormat format =
|
||||
MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height);
|
||||
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate(kbps));
|
||||
// Arbitrary choices.
|
||||
format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
|
||||
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
|
||||
// Default WebRTC settings
|
||||
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 450);
|
||||
// TODO(fischman): when there is more than just the N5 with a VP8 HW
|
||||
// encoder, negotiate input colorformats with the codec. For now
|
||||
// hard-code qcom's supported value. See isPlatformSupported above.
|
||||
format.setInteger(
|
||||
MediaFormat.KEY_COLOR_FORMAT,
|
||||
MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
|
||||
mediaCodec = MediaCodec.createEncoderByType(VP8_MIME_TYPE);
|
||||
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 100);
|
||||
Log.d(TAG, " Format: " + format);
|
||||
mediaCodec = MediaCodec.createByCodecName(properties.codecName);
|
||||
if (mediaCodec == null) {
|
||||
return null;
|
||||
}
|
||||
mediaCodec.configure(
|
||||
format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
|
||||
mediaCodec.start();
|
||||
colorFormat = properties.colorFormat;
|
||||
outputBuffers = mediaCodec.getOutputBuffers();
|
||||
return mediaCodec.getInputBuffers();
|
||||
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
|
||||
Log.d(TAG, "Input buffers: " + inputBuffers.length +
|
||||
". Output buffers: " + outputBuffers.length);
|
||||
return inputBuffers;
|
||||
} catch (IllegalStateException e) {
|
||||
Log.e(TAG, "initEncode failed", e);
|
||||
return null;
|
||||
@ -155,6 +205,7 @@ class MediaCodecVideoEncoder {
|
||||
// indicate this in queueInputBuffer() below and guarantee _this_ frame
|
||||
// be encoded as a key frame, but sadly that flag is ignored. Instead,
|
||||
// we request a key frame "soon".
|
||||
Log.d(TAG, "Sync frame request");
|
||||
Bundle b = new Bundle();
|
||||
b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
|
||||
mediaCodec.setParameters(b);
|
||||
@ -170,6 +221,7 @@ class MediaCodecVideoEncoder {
|
||||
}
|
||||
|
||||
private void release() {
|
||||
Log.d(TAG, "release");
|
||||
checkOnMediaCodecThread();
|
||||
try {
|
||||
mediaCodec.stop();
|
||||
@ -182,13 +234,14 @@ class MediaCodecVideoEncoder {
|
||||
}
|
||||
|
||||
private boolean setRates(int kbps, int frameRateIgnored) {
|
||||
// frameRate argument is ignored - HW encoder is supposed to use
|
||||
// video frame timestamps for bit allocation.
|
||||
checkOnMediaCodecThread();
|
||||
Log.v(TAG, "setRates: " + kbps + " kbps");
|
||||
try {
|
||||
Bundle params = new Bundle();
|
||||
params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitRate(kbps));
|
||||
mediaCodec.setParameters(params);
|
||||
// Sure would be nice to honor the frameRate argument to this function,
|
||||
// but MediaCodec doesn't expose that particular knob. b/12977358
|
||||
return true;
|
||||
} catch (IllegalStateException e) {
|
||||
Log.e(TAG, "setRates failed", e);
|
||||
|
Loading…
Reference in New Issue
Block a user