Add VP9 codec to VCM and vie_auto_test.

Include VP9 tests in videoprocessor_integrationtests.
Include end-to-end send/receiveVP9 test.
Passes trybots.

R=kjellander@webrtc.org, mflodman@webrtc.org, stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/29449004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7422 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
marpan@webrtc.org 2014-10-10 16:44:47 +00:00
parent 3cefbc99f4
commit 573c78e31c
28 changed files with 1146 additions and 160 deletions

View File

@ -42,6 +42,7 @@
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
'gen_core_neon_offsets_gyp%': '<(gen_core_neon_offsets_gyp)',
'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
'webrtc_vp9_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp9',
'rbe_components_path%': '<(webrtc_root)/modules/remote_bitrate_estimator',
'include_opus%': 1,
},
@ -52,6 +53,7 @@
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
'gen_core_neon_offsets_gyp%': '<(gen_core_neon_offsets_gyp)',
'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
'webrtc_vp9_dir%': '<(webrtc_vp9_dir)',
'include_opus%': '<(include_opus)',
'rtc_relative_path%': 1,
'rbe_components_path%': '<(rbe_components_path)',

View File

@ -27,6 +27,7 @@ char kTSanDefaultSuppressions[] =
"race:rtc::MessageQueue::Quit\n"
"race:FileVideoCapturerTest::VideoCapturerListener::OnFrameCaptured\n"
"race:vp8cx_remove_encoder_threads\n"
"race:third_party/libvpx/source/libvpx/vp9/common/vp9_scan.h\n"
// Usage of trace callback and trace level is racy in libjingle_media_unittests.
// https://code.google.com/p/webrtc/issues/detail?id=3372

View File

@ -597,35 +597,45 @@ struct VideoCodecVP8 {
}
};
// VP9 specific
struct VideoCodecVP9 {
VideoCodecComplexity complexity;
int resilience;
unsigned char numberOfTemporalLayers;
bool denoisingOn;
bool frameDroppingOn;
int keyFrameInterval;
bool adaptiveQpMode;
};
// H264 specific.
struct VideoCodecH264
{
VideoCodecProfile profile;
bool frameDroppingOn;
int keyFrameInterval;
// These are NULL/0 if not externally negotiated.
const uint8_t* spsData;
size_t spsLen;
const uint8_t* ppsData;
size_t ppsLen;
struct VideoCodecH264 {
VideoCodecProfile profile;
bool frameDroppingOn;
int keyFrameInterval;
// These are NULL/0 if not externally negotiated.
const uint8_t* spsData;
size_t spsLen;
const uint8_t* ppsData;
size_t ppsLen;
};
// Video codec types
enum VideoCodecType
{
kVideoCodecVP8,
kVideoCodecH264,
kVideoCodecI420,
kVideoCodecRED,
kVideoCodecULPFEC,
kVideoCodecGeneric,
kVideoCodecUnknown
enum VideoCodecType {
kVideoCodecVP8,
kVideoCodecVP9,
kVideoCodecH264,
kVideoCodecI420,
kVideoCodecRED,
kVideoCodecULPFEC,
kVideoCodecGeneric,
kVideoCodecUnknown
};
union VideoCodecUnion
{
VideoCodecVP8 VP8;
VideoCodecH264 H264;
union VideoCodecUnion {
VideoCodecVP8 VP8;
VideoCodecVP9 VP9;
VideoCodecH264 H264;
};

View File

@ -51,6 +51,7 @@
#define VIDEOCODEC_I420
#define VIDEOCODEC_VP8
#define VIDEOCODEC_VP9
#define VIDEOCODEC_H264
// ============================================================================

View File

@ -96,6 +96,7 @@
'<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio',
'<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8',
'<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/test/test.gyp:test_support_main',
'<(webrtc_root)/test/test.gyp:frame_generator',
@ -323,6 +324,7 @@
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(webrtc_root)/common_video/common_video.gyp:common_video',
'<(webrtc_root)/modules/video_coding/codecs/vp8/vp8.gyp:webrtc_vp8',
'<(webrtc_root)/modules/video_coding/codecs/vp9/vp9.gyp:webrtc_vp9',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/test/metrics.gyp:metrics',
'<(webrtc_root)/test/test.gyp:test_support',

View File

@ -77,6 +77,7 @@ source_set("video_coding") {
":video_coding_utility",
":webrtc_i420",
":webrtc_vp8",
":webrtc_vp9",
"../../common_video",
"../../system_wrappers",
]
@ -159,3 +160,35 @@ source_set("webrtc_vp8") {
]
}
}
source_set("webrtc_vp9") {
sources = [
"codecs/vp9/include/vp9.h",
"codecs/vp9/vp9_impl.cc",
"codecs/vp9/vp9_impl.h",
]
configs += [ "../..:common_config" ]
public_configs = [ "../..:common_inherited_config" ]
if (is_clang) {
# Suppress warnings from Chrome's Clang plugins.
# See http://code.google.com/p/webrtc/issues/detail?id=163 for details.
configs -= [ "//build/config/clang:find_bad_constructs" ]
}
# TODO(kjellander): Remove once libvpx has changed it's libvpx_config to be
# in direct_dependent_configs.
configs += [ "//third_party/libvpx:libvpx_config" ]
deps = [
":video_coding_utility",
"../../common_video",
"../../system_wrappers",
]
if (rtc_build_libvpx) {
deps += [
"//third_party/libvpx",
]
}
}

View File

@ -41,6 +41,19 @@ struct CodecSpecificInfoVP8 {
int8_t keyIdx; // Negative value to skip keyIdx.
};
struct CodecSpecificInfoVP9 {
bool hasReceivedSLI;
uint8_t pictureIdSLI;
bool hasReceivedRPSI;
uint64_t pictureIdRPSI;
int16_t pictureId; // Negative value to skip pictureId.
bool nonReference;
uint8_t temporalIdx;
bool layerSync;
int tl0PicIdx; // Negative value to skip tl0PicIdx.
int8_t keyIdx; // Negative value to skip keyIdx.
};
struct CodecSpecificInfoGeneric {
uint8_t simulcast_idx;
};
@ -50,6 +63,7 @@ struct CodecSpecificInfoH264 {};
union CodecSpecificInfoUnion {
CodecSpecificInfoGeneric generic;
CodecSpecificInfoVP8 VP8;
CodecSpecificInfoVP9 VP9;
CodecSpecificInfoH264 H264;
};

View File

@ -16,6 +16,7 @@
#include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h"
#include "webrtc/modules/video_coding/codecs/test/videoprocessor.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "webrtc/modules/video_coding/main/interface/video_coding.h"
#include "webrtc/test/testsupport/fileutils.h"
@ -37,6 +38,7 @@ const int kBaseKeyFrameInterval = 3000;
// Codec and network settings.
struct CodecConfigPars {
VideoCodecType codec_type;
float packet_loss;
int num_temporal_layers;
int key_frame_interval;
@ -136,6 +138,7 @@ class VideoProcessorIntegrationTest: public testing::Test {
float start_bitrate_;
// Codec and network settings.
VideoCodecType codec_type_;
float packet_loss_;
int num_temporal_layers_;
int key_frame_interval_;
@ -149,8 +152,15 @@ class VideoProcessorIntegrationTest: public testing::Test {
virtual ~VideoProcessorIntegrationTest() {}
void SetUpCodecConfig() {
encoder_ = VP8Encoder::Create();
decoder_ = VP8Decoder::Create();
if (codec_type_ == kVideoCodecVP8) {
encoder_ = VP8Encoder::Create();
decoder_ = VP8Decoder::Create();
VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
} else if (codec_type_ == kVideoCodecVP9) {
encoder_ = VP9Encoder::Create();
decoder_ = VP9Decoder::Create();
VideoCodingModule::Codec(kVideoCodecVP9, &codec_settings_);
}
// CIF is currently used for all tests below.
// Setup the TestConfig struct for processing of a clip in CIF resolution.
@ -169,26 +179,42 @@ class VideoProcessorIntegrationTest: public testing::Test {
config_.keyframe_interval = key_frame_interval_;
config_.networking_config.packet_loss_probability = packet_loss_;
// Get a codec configuration struct and configure it.
VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
// Configure codec settings.
config_.codec_settings = &codec_settings_;
config_.codec_settings->startBitrate = start_bitrate_;
config_.codec_settings->width = kCIFWidth;
config_.codec_settings->height = kCIFHeight;
// These features may be set depending on the test.
config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
error_concealment_on_;
config_.codec_settings->codecSpecific.VP8.denoisingOn =
denoising_on_;
config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
num_temporal_layers_;
config_.codec_settings->codecSpecific.VP8.frameDroppingOn =
frame_dropper_on_;
config_.codec_settings->codecSpecific.VP8.automaticResizeOn =
spatial_resize_on_;
config_.codec_settings->codecSpecific.VP8.keyFrameInterval =
kBaseKeyFrameInterval;
// These features may be set depending on the test.
switch (config_.codec_settings->codecType) {
case kVideoCodecVP8:
config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
error_concealment_on_;
config_.codec_settings->codecSpecific.VP8.denoisingOn =
denoising_on_;
config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
num_temporal_layers_;
config_.codec_settings->codecSpecific.VP8.frameDroppingOn =
frame_dropper_on_;
config_.codec_settings->codecSpecific.VP8.automaticResizeOn =
spatial_resize_on_;
config_.codec_settings->codecSpecific.VP8.keyFrameInterval =
kBaseKeyFrameInterval;
break;
case kVideoCodecVP9:
config_.codec_settings->codecSpecific.VP9.denoisingOn =
denoising_on_;
config_.codec_settings->codecSpecific.VP9.numberOfTemporalLayers =
num_temporal_layers_;
config_.codec_settings->codecSpecific.VP9.frameDroppingOn =
frame_dropper_on_;
config_.codec_settings->codecSpecific.VP9.keyFrameInterval =
kBaseKeyFrameInterval;
break;
default:
assert(false);
break;
}
frame_reader_ =
new webrtc::test::FrameReaderImpl(config_.input_filename,
config_.frame_length_in_bytes);
@ -405,6 +431,7 @@ class VideoProcessorIntegrationTest: public testing::Test {
CodecConfigPars process,
RateControlMetrics* rc_metrics) {
// Codec/config settings.
codec_type_ = process.codec_type;
start_bitrate_ = rate_profile.target_bit_rate[0];
packet_loss_ = process.packet_loss;
key_frame_interval_ = process.key_frame_interval;
@ -514,6 +541,7 @@ void SetRateProfilePars(RateProfile* rate_profile,
}
void SetCodecParameters(CodecConfigPars* process_settings,
VideoCodecType codec_type,
float packet_loss,
int key_frame_interval,
int num_temporal_layers,
@ -521,6 +549,7 @@ void SetCodecParameters(CodecConfigPars* process_settings,
bool denoising_on,
bool frame_dropper_on,
bool spatial_resize_on) {
process_settings->codec_type = codec_type;
process_settings->packet_loss = packet_loss;
process_settings->key_frame_interval = key_frame_interval;
process_settings->num_temporal_layers = num_temporal_layers,
@ -560,7 +589,126 @@ void SetRateControlMetrics(RateControlMetrics* rc_metrics,
rc_metrics[update_index].num_spatial_resizes = num_spatial_resizes;
}
// Run with no packet loss and fixed bitrate. Quality should be very high.
// VP9: Run with no packet loss and fixed bitrate. Quality should be very high.
// One key frame (first frame only) in sequence. Setting |key_frame_interval|
// to -1 below means no periodic key frames in test.
TEST_F(VideoProcessorIntegrationTest, Process0PercentPacketLossVP9) {
// Bitrate and frame rate profile.
RateProfile rate_profile;
SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
rate_profile.num_frames = kNbrFramesShort;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, kVideoCodecVP9, 0.0f, -1, 1, false,
false, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 37.5, 36.0, 0.94, 0.93);
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
ProcessFramesAndVerify(quality_metrics,
rate_profile,
process_settings,
rc_metrics);
}
// VP9: Run with 5% packet loss and fixed bitrate. Quality should be a bit
// lower. One key frame (first frame only) in sequence.
TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLossVP9) {
// Bitrate and frame rate profile.
RateProfile rate_profile;
SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
rate_profile.num_frames = kNbrFramesShort;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, kVideoCodecVP9, 0.05f, -1, 1, false,
false, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 17.0, 15.0, 0.45, 0.38);
// Metrics for rate control.
RateControlMetrics rc_metrics[1];
SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
ProcessFramesAndVerify(quality_metrics,
rate_profile,
process_settings,
rc_metrics);
}
// VP9: Run with no packet loss, with varying bitrate (3 rate updates):
// low to high to medium. Check that quality and encoder response to the new
// target rate/per-frame bandwidth (for each rate update) is within limits.
// One key frame (first frame only) in sequence.
TEST_F(VideoProcessorIntegrationTest, ProcessNoLossChangeBitRateVP9) {
// Bitrate and frame rate profile.
RateProfile rate_profile;
SetRateProfilePars(&rate_profile, 0, 200, 30, 0);
SetRateProfilePars(&rate_profile, 1, 800, 30, 100);
SetRateProfilePars(&rate_profile, 2, 500, 30, 200);
rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, kVideoCodecVP9, 0.0f, -1, 1, false,
false, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 36.0, 32.0, 0.90, 0.85);
// Metrics for rate control.
RateControlMetrics rc_metrics[3];
SetRateControlMetrics(rc_metrics, 0, 0, 30, 20, 20, 20, 0);
SetRateControlMetrics(rc_metrics, 1, 2, 0, 20, 20, 60, 0);
SetRateControlMetrics(rc_metrics, 2, 0, 0, 20, 20, 30, 0);
ProcessFramesAndVerify(quality_metrics,
rate_profile,
process_settings,
rc_metrics);
}
// VP9: Run with no packet loss, with an update (decrease) in frame rate.
// Lower frame rate means higher per-frame-bandwidth, so easier to encode.
// At the low bitrate in this test, this means better rate control after the
// update(s) to lower frame rate. So expect less frame drops, and max values
// for the rate control metrics can be lower. One key frame (first frame only).
// Note: quality after update should be higher but we currently compute quality
// metrics averaged over whole sequence run.
TEST_F(VideoProcessorIntegrationTest,
ProcessNoLossChangeFrameRateFrameDropVP9) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
RateProfile rate_profile;
SetRateProfilePars(&rate_profile, 0, 50, 24, 0);
SetRateProfilePars(&rate_profile, 1, 50, 15, 100);
SetRateProfilePars(&rate_profile, 2, 50, 10, 200);
rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, kVideoCodecVP9, 0.0f, -1, 1, false,
false, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 30.0, 18.0, 0.80, 0.40);
// Metrics for rate control.
RateControlMetrics rc_metrics[3];
SetRateControlMetrics(rc_metrics, 0, 30, 30, 60, 15, 40, 0);
SetRateControlMetrics(rc_metrics, 1, 15, 0, 50, 10, 30, 0);
SetRateControlMetrics(rc_metrics, 2, 5, 0, 38, 10, 30, 0);
ProcessFramesAndVerify(quality_metrics,
rate_profile,
process_settings,
rc_metrics);
}
// TODO(marpan): Add temporal layer test for VP9, once changes are in
// vp9 wrapper for this.
// VP8: Run with no packet loss and fixed bitrate. Quality should be very high.
// One key frame (first frame only) in sequence. Setting |key_frame_interval|
// to -1 below means no periodic key frames in test.
TEST_F(VideoProcessorIntegrationTest, ProcessZeroPacketLoss) {
@ -571,7 +719,8 @@ TEST_F(VideoProcessorIntegrationTest, ProcessZeroPacketLoss) {
rate_profile.num_frames = kNbrFramesShort;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, 0.0f, -1, 1, false, true, true, false);
SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1, 1, false,
true, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 34.95, 33.0, 0.90, 0.89);
@ -584,8 +733,8 @@ TEST_F(VideoProcessorIntegrationTest, ProcessZeroPacketLoss) {
rc_metrics);
}
// Run with 5% packet loss and fixed bitrate. Quality should be a bit lower.
// One key frame (first frame only) in sequence.
// VP8: Run with 5% packet loss and fixed bitrate. Quality should be a bit
// lower. One key frame (first frame only) in sequence.
TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
// Bitrate and frame rate profile.
RateProfile rate_profile;
@ -594,7 +743,8 @@ TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
rate_profile.num_frames = kNbrFramesShort;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, 0.05f, -1, 1, false, true, true, false);
SetCodecParameters(&process_settings, kVideoCodecVP8, 0.05f, -1, 1, false,
true, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 20.0, 16.0, 0.60, 0.40);
@ -607,7 +757,7 @@ TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
rc_metrics);
}
// Run with 10% packet loss and fixed bitrate. Quality should be even lower.
// VP8: Run with 10% packet loss and fixed bitrate. Quality should be lower.
// One key frame (first frame only) in sequence.
TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
// Bitrate and frame rate profile.
@ -617,7 +767,8 @@ TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
rate_profile.num_frames = kNbrFramesShort;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, 0.1f, -1, 1, false, true, true, false);
SetCodecParameters(&process_settings, kVideoCodecVP8, 0.1f, -1, 1, false,
true, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 19.0, 16.0, 0.50, 0.35);
@ -639,12 +790,12 @@ TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
// disabled on Android. Some quality parameter in the above test has been
// adjusted to also pass for |cpu_speed| <= 12.
// Run with no packet loss, with varying bitrate (3 rate updates):
// VP8: Run with no packet loss, with varying bitrate (3 rate updates):
// low to high to medium. Check that quality and encoder response to the new
// target rate/per-frame bandwidth (for each rate update) is within limits.
// One key frame (first frame only) in sequence.
TEST_F(VideoProcessorIntegrationTest,
DISABLED_ON_ANDROID(ProcessNoLossChangeBitRate)) {
DISABLED_ON_ANDROID(ProcessNoLossChangeBitRateVP8)) {
// Bitrate and frame rate profile.
RateProfile rate_profile;
SetRateProfilePars(&rate_profile, 0, 200, 30, 0);
@ -654,7 +805,8 @@ TEST_F(VideoProcessorIntegrationTest,
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, 0.0f, -1, 1, false, true, true, false);
SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1, 1, false,
true, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 34.0, 32.0, 0.85, 0.80);
@ -669,15 +821,15 @@ TEST_F(VideoProcessorIntegrationTest,
rc_metrics);
}
// Run with no packet loss, with an update (decrease) in frame rate.
// VP8: Run with no packet loss, with an update (decrease) in frame rate.
// Lower frame rate means higher per-frame-bandwidth, so easier to encode.
// At the bitrate in this test, this means better rate control after the
// update(s) to lower frame rate. So expect less frame drops, and max values
// for the rate control metrics can be lower. One key frame (first frame only).
// Note: quality after update should be higher but we currently compute quality
// metrics avergaed over whole sequence run.
// metrics averaged over whole sequence run.
TEST_F(VideoProcessorIntegrationTest,
DISABLED_ON_ANDROID(ProcessNoLossChangeFrameRateFrameDrop)) {
DISABLED_ON_ANDROID(ProcessNoLossChangeFrameRateFrameDropVP8)) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
RateProfile rate_profile;
@ -688,7 +840,8 @@ TEST_F(VideoProcessorIntegrationTest,
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, 0.0f, -1, 1, false, true, true, false);
SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1, 1, false,
true, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 31.0, 22.0, 0.80, 0.65);
@ -706,7 +859,7 @@ TEST_F(VideoProcessorIntegrationTest,
// Run with no packet loss, at low bitrate. During this time we should've
// resized once.
TEST_F(VideoProcessorIntegrationTest,
DISABLED_ON_ANDROID(ProcessNoLossSpatialResizeFrameDrop)) {
DISABLED_ON_ANDROID(ProcessNoLossSpatialResizeFrameDropVP8)) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
RateProfile rate_profile;
@ -715,8 +868,8 @@ TEST_F(VideoProcessorIntegrationTest,
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(
&process_settings, 0.0f, kNbrFramesLong, 1, false, true, true, true);
SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, kNbrFramesLong,
1, false, true, true, true);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 25.0, 15.0, 0.70, 0.40);
@ -729,13 +882,13 @@ TEST_F(VideoProcessorIntegrationTest,
rc_metrics);
}
// Run with no packet loss, with 3 temporal layers, with a rate update in the
// middle of the sequence. The max values for the frame size mismatch and
// VP8: Run with no packet loss, with 3 temporal layers, with a rate update in
// the middle of the sequence. The max values for the frame size mismatch and
// encoding rate mismatch are applied to each layer.
// No dropped frames in this test, and internal spatial resizer is off.
// One key frame (first frame only) in sequence, so no spatial resizing.
TEST_F(VideoProcessorIntegrationTest,
DISABLED_ON_ANDROID(ProcessNoLossTemporalLayers)) {
DISABLED_ON_ANDROID(ProcessNoLossTemporalLayersVP8)) {
config_.networking_config.packet_loss_probability = 0;
// Bitrate and frame rate profile.
RateProfile rate_profile;
@ -745,7 +898,8 @@ TEST_F(VideoProcessorIntegrationTest,
rate_profile.num_frames = kNbrFramesLong;
// Codec/network settings.
CodecConfigPars process_settings;
SetCodecParameters(&process_settings, 0.0f, -1, 3, false, true, true, false);
SetCodecParameters(&process_settings, kVideoCodecVP8, 0.0f, -1, 3, false,
true, true, false);
// Metrics for expected quality.
QualityMetrics quality_metrics;
SetQualityMetrics(&quality_metrics, 32.5, 30.0, 0.85, 0.80);

View File

@ -35,73 +35,20 @@ class VP8EncoderImpl : public VP8Encoder {
virtual ~VP8EncoderImpl();
// Free encoder memory.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int Release();
// Initialize the encoder with the information from the codecSettings
//
// Input:
// - codec_settings : Codec settings
// - number_of_cores : Number of cores available for the encoder
// - max_payload_size : The maximum size each payload is allowed
// to have. Usually MTU - overhead.
//
// Return value : Set bit rate if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
// WEBRTC_VIDEO_CODEC_ERR_SIZE
// WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED
// WEBRTC_VIDEO_CODEC_MEMORY
// WEBRTC_VIDEO_CODEC_ERROR
virtual int InitEncode(const VideoCodec* codec_settings,
int number_of_cores,
uint32_t max_payload_size);
// Encode an I420 image (as a part of a video stream). The encoded image
// will be returned to the user through the encode complete callback.
//
// Input:
// - input_image : Image to be encoded
// - frame_types : Frame type to be generated by the encoder.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
// WEBRTC_VIDEO_CODEC_MEMORY
// WEBRTC_VIDEO_CODEC_ERROR
// WEBRTC_VIDEO_CODEC_TIMEOUT
virtual int Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types);
// Register an encode complete callback object.
//
// Input:
// - callback : Callback object which handles encoded images.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
// Inform the encoder of the new packet loss rate and the round-trip time of
// the network.
//
// - packet_loss : Fraction lost
// (loss rate in percent = 100 * packetLoss / 255)
// - rtt : Round-trip time in milliseconds
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors: WEBRTC_VIDEO_CODEC_ERROR
//
virtual int SetChannelParameters(uint32_t packet_loss, int rtt);
// Inform the encoder about the new target bit rate.
//
// - new_bitrate_kbit : New target bit rate
// - frame_rate : The target frame rate
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate);
private:
@ -150,61 +97,20 @@ class VP8DecoderImpl : public VP8Decoder {
virtual ~VP8DecoderImpl();
// Initialize the decoder.
//
// Return value : WEBRTC_VIDEO_CODEC_OK.
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERROR
virtual int InitDecode(const VideoCodec* inst, int number_of_cores);
// Decode encoded image (as a part of a video stream). The decoded image
// will be returned to the user through the decode complete callback.
//
// Input:
// - input_image : Encoded image to be decoded
// - missing_frames : True if one or more frames have been lost
// since the previous decode call.
// - fragmentation : Specifies the start and length of each VP8
// partition.
// - codec_specific_info : pointer to specific codec data
// - render_time_ms : Render time in Ms
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERROR
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
virtual int Decode(const EncodedImage& input_image,
bool missing_frames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codec_specific_info,
int64_t /*render_time_ms*/);
// Register a decode complete callback object.
//
// Input:
// - callback : Callback object which handles decoded images.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int RegisterDecodeCompleteCallback(DecodedImageCallback* callback);
// Free decoder memory.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERROR
virtual int Release();
// Reset decoder state and prepare for a new call.
//
// Return value : WEBRTC_VIDEO_CODEC_OK.
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_UNINITIALIZED
// WEBRTC_VIDEO_CODEC_ERROR
virtual int Reset();
// Create a copy of the codec and its internal state.
//
// Return value : A copy of the instance if OK, NULL otherwise.
virtual VideoDecoder* Copy();
private:

View File

@ -0,0 +1,35 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_INCLUDE_VP9_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_INCLUDE_VP9_H_
#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
namespace webrtc {
class VP9Encoder : public VideoEncoder {
public:
static VP9Encoder* Create();
virtual ~VP9Encoder() {}
};
class VP9Decoder : public VideoDecoder {
public:
static VP9Decoder* Create();
virtual ~VP9Decoder() {}
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_INCLUDE_VP9_H_

View File

@ -0,0 +1,36 @@
# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
{
'includes': [
'../../../../build/common.gypi',
],
'targets': [
{
'target_name': 'webrtc_vp9',
'type': 'static_library',
'dependencies': [
'<(webrtc_root)/common_video/common_video.gyp:common_video',
'<(webrtc_root)/modules/video_coding/utility/video_coding_utility.gyp:video_coding_utility',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
],
'conditions': [
['build_libvpx==1', {
'dependencies': [
'<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
],
}],
],
'sources': [
'include/vp9.h',
'vp9_impl.cc',
'vp9_impl.h',
],
},
],
}

View File

@ -0,0 +1,487 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*
*/
#include "webrtc/modules/video_coding/codecs/vp9/vp9_impl.h"
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <vector>
#include "vpx/vpx_encoder.h"
#include "vpx/vpx_decoder.h"
#include "vpx/vp8cx.h"
#include "vpx/vp8dx.h"
#include "webrtc/common.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/system_wrappers/interface/tick_util.h"
#include "webrtc/system_wrappers/interface/trace_event.h"
namespace webrtc {
VP9Encoder* VP9Encoder::Create() {
return new VP9EncoderImpl();
}
VP9EncoderImpl::VP9EncoderImpl()
: encoded_image_(),
encoded_complete_callback_(NULL),
inited_(false),
timestamp_(0),
picture_id_(0),
cpu_speed_(3),
rc_max_intra_target_(0),
encoder_(NULL),
config_(NULL),
raw_(NULL) {
memset(&codec_, 0, sizeof(codec_));
uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
srand(seed);
}
VP9EncoderImpl::~VP9EncoderImpl() {
Release();
}
int VP9EncoderImpl::Release() {
if (encoded_image_._buffer != NULL) {
delete [] encoded_image_._buffer;
encoded_image_._buffer = NULL;
}
if (encoder_ != NULL) {
if (vpx_codec_destroy(encoder_)) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
delete encoder_;
encoder_ = NULL;
}
if (config_ != NULL) {
delete config_;
config_ = NULL;
}
if (raw_ != NULL) {
vpx_img_free(raw_);
raw_ = NULL;
}
inited_ = false;
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9EncoderImpl::SetRates(uint32_t new_bitrate_kbit,
uint32_t new_framerate) {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (encoder_->err) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
if (new_framerate < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
// Update bit rate
if (codec_.maxBitrate > 0 && new_bitrate_kbit > codec_.maxBitrate) {
new_bitrate_kbit = codec_.maxBitrate;
}
config_->rc_target_bitrate = new_bitrate_kbit;
codec_.maxFramerate = new_framerate;
// Update encoder context
if (vpx_codec_enc_config_set(encoder_, config_)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
int number_of_cores,
uint32_t /*max_payload_size*/) {
if (inst == NULL) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (inst->maxFramerate < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
// Allow zero to represent an unspecified maxBitRate
if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (inst->width < 1 || inst->height < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (number_of_cores < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
int retVal = Release();
if (retVal < 0) {
return retVal;
}
if (encoder_ == NULL) {
encoder_ = new vpx_codec_ctx_t;
}
if (config_ == NULL) {
config_ = new vpx_codec_enc_cfg_t;
}
timestamp_ = 0;
if (&codec_ != inst) {
codec_ = *inst;
}
// Random start 16 bits is enough.
picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
// Allocate memory for encoded image
if (encoded_image_._buffer != NULL) {
delete [] encoded_image_._buffer;
}
encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_._completeFrame = true;
// Creating a wrapper to the image - setting image data to NULL. Actual
// pointer will be set in encode. Setting align to 1, as it is meaningless
// (actual memory is not allocated).
raw_ = vpx_img_wrap(NULL, IMG_FMT_I420, codec_.width, codec_.height,
1, NULL);
// Populate encoder configuration with default values.
if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
config_->g_w = codec_.width;
config_->g_h = codec_.height;
config_->rc_target_bitrate = inst->startBitrate; // in kbit/s
config_->g_error_resilient = 1;
// Setting the time base of the codec.
config_->g_timebase.num = 1;
config_->g_timebase.den = 90000;
config_->g_lag_in_frames = 0; // 0- no frame lagging
config_->g_threads = 1;
// Rate control settings.
config_->rc_dropframe_thresh = inst->codecSpecific.VP9.frameDroppingOn ?
30 : 0;
config_->rc_end_usage = VPX_CBR;
config_->g_pass = VPX_RC_ONE_PASS;
config_->rc_min_quantizer = 2;
config_->rc_max_quantizer = 56;
config_->rc_undershoot_pct = 50;
config_->rc_overshoot_pct = 50;
config_->rc_buf_initial_sz = 500;
config_->rc_buf_optimal_sz = 600;
config_->rc_buf_sz = 1000;
// Set the maximum target size of any key-frame.
rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz);
if (inst->codecSpecific.VP9.keyFrameInterval > 0) {
config_->kf_mode = VPX_KF_AUTO;
config_->kf_max_dist = inst->codecSpecific.VP9.keyFrameInterval;
} else {
config_->kf_mode = VPX_KF_DISABLED;
}
return InitAndSetControlSettings(inst);
}
int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
if (vpx_codec_enc_init(encoder_, vpx_codec_vp9_cx(), config_, 0)) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
// Only positive speeds, currently: 0 - 7.
// O means slowest/best quality, 7 means fastest/lowest quality.
// TODO(marpan): Speeds 5-7 are speed settings for real-time mode, on desktop.
// Currently set to 5, update to 6 (for faster encoding) after some subjective
// quality tests.
cpu_speed_ = 5;
// Note: some of these codec controls still use "VP8" in the control name.
// TODO(marpan): Update this in the next/future libvpx version.
vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
rc_max_intra_target_);
vpx_codec_control(encoder_, VP9E_SET_AQ_MODE,
inst->codecSpecific.VP9.adaptiveQpMode ? 3 : 0);
// TODO(marpan): Enable in future libvpx roll: waiting for SSE2 optimization.
// #if !defined(WEBRTC_ARCH_ARM)
// vpx_codec_control(encoder_, VP9E_SET_NOISE_SENSITIVITY,
// inst->codecSpecific.VP9.denoisingOn ? 1 : 0);
// #endif
inited_ = true;
return WEBRTC_VIDEO_CODEC_OK;
}
uint32_t VP9EncoderImpl::MaxIntraTarget(uint32_t optimal_buffer_size) {
// Set max to the optimal buffer level (normalized by target BR),
// and scaled by a scale_par.
// Max target size = scale_par * optimal_buffer_size * targetBR[Kbps].
// This value is presented in percentage of perFrameBw:
// perFrameBw = targetBR[Kbps] * 1000 / framerate.
// The target in % is as follows:
float scale_par = 0.5;
uint32_t target_pct =
optimal_buffer_size * scale_par * codec_.maxFramerate / 10;
// Don't go below 3 times the per frame bandwidth.
const uint32_t min_intra_size = 300;
return (target_pct < min_intra_size) ? min_intra_size: target_pct;
}
int VP9EncoderImpl::Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (input_image.IsZeroSize()) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (encoded_complete_callback_ == NULL) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
VideoFrameType frame_type = kDeltaFrame;
// We only support one stream at the moment.
if (frame_types && frame_types->size() > 0) {
frame_type = (*frame_types)[0];
}
// Image in vpx_image_t format.
// Input image is const. VPX's raw image is not defined as const.
raw_->planes[PLANE_Y] = const_cast<uint8_t*>(input_image.buffer(kYPlane));
raw_->planes[PLANE_U] = const_cast<uint8_t*>(input_image.buffer(kUPlane));
raw_->planes[PLANE_V] = const_cast<uint8_t*>(input_image.buffer(kVPlane));
raw_->stride[VPX_PLANE_Y] = input_image.stride(kYPlane);
raw_->stride[VPX_PLANE_U] = input_image.stride(kUPlane);
raw_->stride[VPX_PLANE_V] = input_image.stride(kVPlane);
int flags = 0;
bool send_keyframe = (frame_type == kKeyFrame);
if (send_keyframe) {
// Key frame request from caller.
flags = VPX_EFLAG_FORCE_KF;
}
assert(codec_.maxFramerate > 0);
uint32_t duration = 90000 / codec_.maxFramerate;
if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
VPX_DL_REALTIME)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
timestamp_ += duration;
return GetEncodedPartitions(input_image);
}
void VP9EncoderImpl::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
const vpx_codec_cx_pkt& pkt,
uint32_t timestamp) {
assert(codec_specific != NULL);
codec_specific->codecType = kVideoCodecVP9;
CodecSpecificInfoVP9 *vp9_info = &(codec_specific->codecSpecific.VP9);
vp9_info->pictureId = picture_id_;
vp9_info->keyIdx = kNoKeyIdx;
vp9_info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
// TODO(marpan): Temporal layers are supported in the current VP9 version,
// but for now use 1 temporal layer encoding. Will update this when temporal
// layer support for VP9 is added in webrtc.
vp9_info->temporalIdx = kNoTemporalIdx;
vp9_info->layerSync = false;
vp9_info->tl0PicIdx = kNoTl0PicIdx;
picture_id_ = (picture_id_ + 1) & 0x7FFF;
}
int VP9EncoderImpl::GetEncodedPartitions(const I420VideoFrame& input_image) {
vpx_codec_iter_t iter = NULL;
encoded_image_._length = 0;
encoded_image_._frameType = kDeltaFrame;
RTPFragmentationHeader frag_info;
// Note: no data partitioning in VP9, so 1 partition only. We keep this
// fragmentation data for now, until VP9 packetizer is implemented.
frag_info.VerifyAndAllocateFragmentationHeader(1);
int part_idx = 0;
CodecSpecificInfo codec_specific;
const vpx_codec_cx_pkt_t *pkt = NULL;
while ((pkt = vpx_codec_get_cx_data(encoder_, &iter)) != NULL) {
switch (pkt->kind) {
case VPX_CODEC_CX_FRAME_PKT: {
memcpy(&encoded_image_._buffer[encoded_image_._length],
pkt->data.frame.buf,
pkt->data.frame.sz);
frag_info.fragmentationOffset[part_idx] = encoded_image_._length;
frag_info.fragmentationLength[part_idx] =
static_cast<uint32_t>(pkt->data.frame.sz);
frag_info.fragmentationPlType[part_idx] = 0;
frag_info.fragmentationTimeDiff[part_idx] = 0;
encoded_image_._length += static_cast<uint32_t>(pkt->data.frame.sz);
assert(encoded_image_._length <= encoded_image_._size);
break;
}
default: {
break;
}
}
// End of frame.
if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
// Check if encoded frame is a key frame.
if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
encoded_image_._frameType = kKeyFrame;
}
PopulateCodecSpecific(&codec_specific, *pkt, input_image.timestamp());
break;
}
}
if (encoded_image_._length > 0) {
TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_._length);
encoded_image_._timeStamp = input_image.timestamp();
encoded_image_.capture_time_ms_ = input_image.render_time_ms();
encoded_image_._encodedHeight = raw_->d_h;
encoded_image_._encodedWidth = raw_->d_w;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
&frag_info);
}
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9EncoderImpl::SetChannelParameters(uint32_t packet_loss, int rtt) {
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9EncoderImpl::RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) {
encoded_complete_callback_ = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
VP9Decoder* VP9Decoder::Create() {
return new VP9DecoderImpl();
}
VP9DecoderImpl::VP9DecoderImpl()
: decode_complete_callback_(NULL),
inited_(false),
decoder_(NULL),
key_frame_required_(true) {
memset(&codec_, 0, sizeof(codec_));
}
VP9DecoderImpl::~VP9DecoderImpl() {
inited_ = true; // in order to do the actual release
Release();
}
int VP9DecoderImpl::Reset() {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
InitDecode(&codec_, 1);
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9DecoderImpl::InitDecode(const VideoCodec* inst, int number_of_cores) {
if (inst == NULL) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
int ret_val = Release();
if (ret_val < 0) {
return ret_val;
}
if (decoder_ == NULL) {
decoder_ = new vpx_dec_ctx_t;
}
vpx_codec_dec_cfg_t cfg;
// Setting number of threads to a constant value (1)
cfg.threads = 1;
cfg.h = cfg.w = 0; // set after decode
vpx_codec_flags_t flags = 0;
if (vpx_codec_dec_init(decoder_, vpx_codec_vp9_dx(), &cfg, flags)) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
if (&codec_ != inst) {
// Save VideoCodec instance for later; mainly for duplicating the decoder.
codec_ = *inst;
}
inited_ = true;
// Always start with a complete key frame.
key_frame_required_ = true;
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9DecoderImpl::Decode(const EncodedImage& input_image,
bool missing_frames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codec_specific_info,
int64_t /*render_time_ms*/) {
if (!inited_) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
if (decode_complete_callback_ == NULL) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
// Always start with a complete key frame.
if (key_frame_required_) {
if (input_image._frameType != kKeyFrame)
return WEBRTC_VIDEO_CODEC_ERROR;
// We have a key frame - is it complete?
if (input_image._completeFrame) {
key_frame_required_ = false;
} else {
return WEBRTC_VIDEO_CODEC_ERROR;
}
}
vpx_codec_iter_t iter = NULL;
vpx_image_t* img;
uint8_t* buffer = input_image._buffer;
if (input_image._length == 0) {
buffer = NULL; // Triggers full frame concealment.
}
if (vpx_codec_decode(decoder_,
buffer,
input_image._length,
0,
VPX_DL_REALTIME)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
img = vpx_codec_get_frame(decoder_, &iter);
int ret = ReturnFrame(img, input_image._timeStamp);
if (ret != 0) {
return ret;
}
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
if (img == NULL) {
// Decoder OK and NULL image => No show frame.
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
int half_height = (img->d_h + 1) / 2;
int size_y = img->stride[VPX_PLANE_Y] * img->d_h;
int size_u = img->stride[VPX_PLANE_U] * half_height;
int size_v = img->stride[VPX_PLANE_V] * half_height;
decoded_image_.CreateFrame(size_y, img->planes[VPX_PLANE_Y],
size_u, img->planes[VPX_PLANE_U],
size_v, img->planes[VPX_PLANE_V],
img->d_w, img->d_h,
img->stride[VPX_PLANE_Y],
img->stride[VPX_PLANE_U],
img->stride[VPX_PLANE_V]);
decoded_image_.set_timestamp(timestamp);
int ret = decode_complete_callback_->Decoded(decoded_image_);
if (ret != 0)
return ret;
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9DecoderImpl::RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) {
decode_complete_callback_ = callback;
return WEBRTC_VIDEO_CODEC_OK;
}
int VP9DecoderImpl::Release() {
if (decoder_ != NULL) {
if (vpx_codec_destroy(decoder_)) {
return WEBRTC_VIDEO_CODEC_MEMORY;
}
delete decoder_;
decoder_ = NULL;
}
inited_ = false;
return WEBRTC_VIDEO_CODEC_OK;
}
} // namespace webrtc

View File

@ -0,0 +1,115 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*
*/
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
// VPX forward declaration
typedef struct vpx_codec_ctx vpx_codec_ctx_t;
typedef struct vpx_codec_ctx vpx_dec_ctx_t;
typedef struct vpx_codec_enc_cfg vpx_codec_enc_cfg_t;
typedef struct vpx_image vpx_image_t;
typedef struct vpx_ref_frame vpx_ref_frame_t;
struct vpx_codec_cx_pkt;
namespace webrtc {
class VP9EncoderImpl : public VP9Encoder {
public:
VP9EncoderImpl();
virtual ~VP9EncoderImpl();
virtual int Release() OVERRIDE;
virtual int InitEncode(const VideoCodec* codec_settings,
int number_of_cores,
uint32_t max_payload_size) OVERRIDE;
virtual int Encode(const I420VideoFrame& input_image,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) OVERRIDE;
virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback)
OVERRIDE;
virtual int SetChannelParameters(uint32_t packet_loss, int rtt) OVERRIDE;
virtual int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate) OVERRIDE;
private:
// Call encoder initialize function and set control settings.
int InitAndSetControlSettings(const VideoCodec* inst);
void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
const vpx_codec_cx_pkt& pkt,
uint32_t timestamp);
int GetEncodedPartitions(const I420VideoFrame& input_image);
// Determine maximum target for Intra frames
//
// Input:
// - optimal_buffer_size : Optimal buffer size
// Return Value : Max target size for Intra frames represented as
// percentage of the per frame bandwidth
uint32_t MaxIntraTarget(uint32_t optimal_buffer_size);
EncodedImage encoded_image_;
EncodedImageCallback* encoded_complete_callback_;
VideoCodec codec_;
bool inited_;
int64_t timestamp_;
uint16_t picture_id_;
int cpu_speed_;
uint32_t rc_max_intra_target_;
vpx_codec_ctx_t* encoder_;
vpx_codec_enc_cfg_t* config_;
vpx_image_t* raw_;
};
class VP9DecoderImpl : public VP9Decoder {
public:
VP9DecoderImpl();
virtual ~VP9DecoderImpl();
virtual int InitDecode(const VideoCodec* inst, int number_of_cores) OVERRIDE;
virtual int Decode(const EncodedImage& input_image,
bool missing_frames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codec_specific_info,
int64_t /*render_time_ms*/) OVERRIDE;
virtual int RegisterDecodeCompleteCallback(DecodedImageCallback* callback)
OVERRIDE;
virtual int Release() OVERRIDE;
virtual int Reset() OVERRIDE;
private:
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
I420VideoFrame decoded_image_;
DecodedImageCallback* decode_complete_callback_;
bool inited_;
vpx_dec_ctx_t* decoder_;
VideoCodec codec_;
bool key_frame_required_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP9_IMPL_H_

View File

@ -39,6 +39,7 @@ namespace webrtc {
#define VCM_RED_PAYLOAD_TYPE 96
#define VCM_ULPFEC_PAYLOAD_TYPE 97
#define VCM_VP8_PAYLOAD_TYPE 100
#define VCM_VP9_PAYLOAD_TYPE 101
#define VCM_I420_PAYLOAD_TYPE 124
#define VCM_H264_PAYLOAD_TYPE 127

View File

@ -19,6 +19,9 @@
#ifdef VIDEOCODEC_VP8
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#endif
#ifdef VIDEOCODEC_VP9
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#endif
#include "webrtc/modules/video_coding/main/source/internal_defines.h"
#include "webrtc/system_wrappers/interface/logging.h"
@ -39,6 +42,20 @@ VideoCodecVP8 VideoEncoder::GetDefaultVp8Settings() {
return vp8_settings;
}
VideoCodecVP9 VideoEncoder::GetDefaultVp9Settings() {
VideoCodecVP9 vp9_settings;
memset(&vp9_settings, 0, sizeof(vp9_settings));
vp9_settings.resilience = 1;
vp9_settings.numberOfTemporalLayers = 1;
vp9_settings.denoisingOn = false;
vp9_settings.frameDroppingOn = true;
vp9_settings.keyFrameInterval = 3000;
vp9_settings.adaptiveQpMode = true;
return vp9_settings;
}
VideoCodecH264 VideoEncoder::GetDefaultH264Settings() {
VideoCodecH264 h264_settings;
memset(&h264_settings, 0, sizeof(h264_settings));
@ -126,6 +143,24 @@ bool VCMCodecDataBase::Codec(int list_id,
return true;
}
#endif
#ifdef VIDEOCODEC_VP9
case VCM_VP9_IDX: {
strncpy(settings->plName, "VP9", 4);
settings->codecType = kVideoCodecVP9;
// 96 to 127 dynamic payload types for video codecs.
settings->plType = VCM_VP9_PAYLOAD_TYPE;
settings->startBitrate = 100;
settings->minBitrate = VCM_MIN_BITRATE;
settings->maxBitrate = 0;
settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
settings->width = VCM_DEFAULT_CODEC_WIDTH;
settings->height = VCM_DEFAULT_CODEC_HEIGHT;
settings->numberOfSimulcastStreams = 0;
settings->qpMax = 56;
settings->codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
return true;
}
#endif
#ifdef VIDEOCODEC_H264
case VCM_H264_IDX: {
strncpy(settings->plName, "H264", 5);
@ -362,6 +397,13 @@ bool VCMCodecDataBase::RequiresEncoderReset(const VideoCodec& new_send_codec) {
return true;
}
break;
case kVideoCodecVP9:
if (memcmp(&new_send_codec.codecSpecific.VP9,
&send_codec_.codecSpecific.VP9,
sizeof(new_send_codec.codecSpecific.VP9)) != 0) {
return true;
}
break;
case kVideoCodecH264:
if (memcmp(&new_send_codec.codecSpecific.H264,
&send_codec_.codecSpecific.H264,
@ -635,6 +677,10 @@ VCMGenericEncoder* VCMCodecDataBase::CreateEncoder(
case kVideoCodecVP8:
return new VCMGenericEncoder(*(VP8Encoder::Create()));
#endif
#ifdef VIDEOCODEC_VP9
case kVideoCodecVP9:
return new VCMGenericEncoder(*(VP9Encoder::Create()));
#endif
#ifdef VIDEOCODEC_I420
case kVideoCodecI420:
return new VCMGenericEncoder(*(new I420Encoder));
@ -662,6 +708,10 @@ VCMGenericDecoder* VCMCodecDataBase::CreateDecoder(VideoCodecType type) const {
case kVideoCodecVP8:
return new VCMGenericDecoder(*(VP8Decoder::Create()));
#endif
#ifdef VIDEOCODEC_VP9
case kVideoCodecVP9:
return new VCMGenericDecoder(*(VP9Decoder::Create()));
#endif
#ifdef VIDEOCODEC_I420
case kVideoCodecI420:
return new VCMGenericDecoder(*(new I420Decoder));

View File

@ -39,10 +39,15 @@ inline uint32_t MaskWord64ToUWord32(int64_t w64)
#else
#define VCM_VP8_IDX VCM_NO_CODEC_IDX
#endif
#ifdef VIDEOCODEC_H264
#define VCM_H264_IDX (VCM_VP8_IDX + 1)
#ifdef VIDEOCODEC_VP9
#define VCM_VP9_IDX (VCM_VP8_IDX + 1)
#else
#define VCM_H264_IDX VCM_VP8_IDX
#define VCM_VP9_IDX VCM_VP8_IDX
#endif
#ifdef VIDEOCODEC_H264
#define VCM_H264_IDX (VCM_VP9_IDX + 1)
#else
#define VCM_H264_IDX VCM_VP9_IDX
#endif
#ifdef VIDEOCODEC_I420
#define VCM_I420_IDX (VCM_H264_IDX + 1)

View File

@ -17,6 +17,7 @@
'<(webrtc_root)/modules/video_coding/utility/video_coding_utility.gyp:video_coding_utility',
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
'<(webrtc_vp9_dir)/vp9.gyp:webrtc_vp9',
],
'sources': [
# interfaces

View File

@ -101,6 +101,9 @@ VCMNTEncodeCompleteCallback::SendData(
rtpInfo.type.Video.codecHeader.VP8.pictureId =
videoHdr->codecHeader.VP8.pictureId;
break;
case kVideoCodecVP9:
// Leave for now, until we add kRtpVideoVp9 to RTP.
break;
default:
assert(false);
return -1;

View File

@ -82,6 +82,9 @@ VCMEncodeCompleteCallback::SendData(
rtpInfo.type.Video.codecHeader.VP8.pictureId =
videoHdr->codecHeader.VP8.pictureId;
break;
case webrtc::kRtpVideoGeneric:
// Leave for now, until we add kRtpVideoVp9 to RTP.
break;
default:
assert(false);
return -1;

View File

@ -151,6 +151,7 @@ webrtc::RtpVideoCodecTypes ConvertCodecType(const char* plname) {
if (strncmp(plname,"VP8" , 3) == 0) {
return webrtc::kRtpVideoVp8;
} else {
return webrtc::kRtpVideoNone; // Default value
// Default value.
return webrtc::kRtpVideoGeneric;
}
}

View File

@ -63,6 +63,8 @@ int ParseArguments(CmdArgs& args) {
args.codecName = FLAGS_codec;
if (args.codecName == "VP8") {
args.codecType = kVideoCodecVP8;
} else if (args.codecName == "VP9") {
args.codecType = kVideoCodecVP9;
} else if (args.codecName == "I420") {
args.codecType = kVideoCodecI420;
} else {

View File

@ -20,6 +20,7 @@
#include "webrtc/config.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_header_parser.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -46,6 +47,8 @@ VideoEncoder* VideoEncoder::Create(VideoEncoder::EncoderType codec_type) {
switch (codec_type) {
case kVp8:
return VP8Encoder::Create();
case kVp9:
return VP9Encoder::Create();
}
assert(false);
return NULL;

View File

@ -19,6 +19,9 @@
#include "webrtc/call.h"
#include "webrtc/frame_callback.h"
#include "webrtc/modules/rtp_rtcp/source/rtcp_utility.h"
#include "webrtc/modules/video_coding/codecs/vp8/include/vp8.h"
#include "webrtc/modules/video_coding/codecs/vp9/include/vp9.h"
#include "webrtc/modules/video_coding/main/interface/video_coding_defines.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/event_wrapper.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
@ -222,6 +225,58 @@ TEST_F(EndToEndTest, TransmitsFirstFrame) {
DestroyStreams();
}
TEST_F(EndToEndTest, SendsAndReceivesVP9) {
class VP9Observer : public test::EndToEndTest, public VideoRenderer {
public:
VP9Observer()
: EndToEndTest(2 * kDefaultTimeoutMs),
encoder_(VideoEncoder::Create(VideoEncoder::kVp9)),
decoder_(VP9Decoder::Create()),
frame_counter_(0) {}
virtual void PerformTest() OVERRIDE {
EXPECT_EQ(kEventSignaled, Wait())
<< "Timed out while waiting for enough frames to be decoded.";
}
virtual void ModifyConfigs(
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) OVERRIDE {
send_config->encoder_settings.encoder = encoder_.get();
send_config->encoder_settings.payload_name = "VP9";
send_config->encoder_settings.payload_type = VCM_VP9_PAYLOAD_TYPE;
encoder_config->streams[0].min_bitrate_bps = 50000;
encoder_config->streams[0].target_bitrate_bps =
encoder_config->streams[0].max_bitrate_bps = 2000000;
(*receive_configs)[0].renderer = this;
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config->encoder_settings);
(*receive_configs)[0].codecs.resize(1);
(*receive_configs)[0].codecs[0] = codec;
(*receive_configs)[0].external_decoders.resize(1);
(*receive_configs)[0].external_decoders[0].payload_type =
send_config->encoder_settings.payload_type;
(*receive_configs)[0].external_decoders[0].decoder = decoder_.get();
}
virtual void RenderFrame(const I420VideoFrame& video_frame,
int time_to_render_ms) OVERRIDE {
const int kRequiredFrames = 500;
if (++frame_counter_ == kRequiredFrames)
observation_complete_->Set();
}
private:
scoped_ptr<webrtc::VideoEncoder> encoder_;
scoped_ptr<webrtc::VideoDecoder> decoder_;
int frame_counter_;
} test;
RunBaseTest(&test);
}
TEST_F(EndToEndTest, SendsAndReceivesH264) {
class H264Observer : public test::EndToEndTest, public VideoRenderer {
public:

View File

@ -305,6 +305,8 @@ bool VideoSendStream::ReconfigureVideoEncoder(
memset(&video_codec, 0, sizeof(video_codec));
if (config_.encoder_settings.payload_name == "VP8") {
video_codec.codecType = kVideoCodecVP8;
} else if (config_.encoder_settings.payload_name == "VP9") {
video_codec.codecType = kVideoCodecVP9;
} else if (config_.encoder_settings.payload_name == "H264") {
video_codec.codecType = kVideoCodecH264;
} else {
@ -321,6 +323,8 @@ bool VideoSendStream::ReconfigureVideoEncoder(
if (video_codec.codecType == kVideoCodecVP8) {
video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
} else if (video_codec.codecType == kVideoCodecVP9) {
video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
} else if (video_codec.codecType == kVideoCodecH264) {
video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
}

View File

@ -40,28 +40,84 @@ class VideoEncoder {
public:
enum EncoderType {
kVp8,
kVp9,
};
static VideoEncoder* Create(EncoderType codec_type);
static VideoCodecVP8 GetDefaultVp8Settings();
static VideoCodecVP9 GetDefaultVp9Settings();
static VideoCodecH264 GetDefaultH264Settings();
virtual ~VideoEncoder() {}
// Initialize the encoder with the information from the codecSettings
//
// Input:
// - codec_settings : Codec settings
// - number_of_cores : Number of cores available for the encoder
// - max_payload_size : The maximum size each payload is allowed
// to have. Usually MTU - overhead.
//
// Return value : Set bit rate if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
// WEBRTC_VIDEO_CODEC_ERR_SIZE
// WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED
// WEBRTC_VIDEO_CODEC_MEMORY
// WEBRTC_VIDEO_CODEC_ERROR
virtual int32_t InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
uint32_t max_payload_size) = 0;
// Register an encode complete callback object.
//
// Input:
// - callback : Callback object which handles encoded images.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t RegisterEncodeCompleteCallback(
EncodedImageCallback* callback) = 0;
// Free encoder memory.
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t Release() = 0;
// Encode an I420 image (as a part of a video stream). The encoded image
// will be returned to the user through the encode complete callback.
//
// Input:
// - frame : Image to be encoded
// - frame_types : Frame type to be generated by the encoder.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors:
// WEBRTC_VIDEO_CODEC_ERR_PARAMETER
// WEBRTC_VIDEO_CODEC_MEMORY
// WEBRTC_VIDEO_CODEC_ERROR
// WEBRTC_VIDEO_CODEC_TIMEOUT
virtual int32_t Encode(const I420VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info,
const std::vector<VideoFrameType>* frame_types) = 0;
// Inform the encoder of the new packet loss rate and the round-trip time of
// the network.
//
// Input:
// - packet_loss : Fraction lost
// (loss rate in percent = 100 * packetLoss / 255)
// - rtt : Round-trip time in milliseconds
// Return value : WEBRTC_VIDEO_CODEC_OK if OK
// <0 - Errors: WEBRTC_VIDEO_CODEC_ERROR
virtual int32_t SetChannelParameters(uint32_t packet_loss, int rtt) = 0;
// Inform the encoder about the new target bit rate.
//
// Input:
// - bitrate : New target bit rate
// - framerate : The target frame rate
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t SetRates(uint32_t bitrate, uint32_t framerate) = 0;
virtual int32_t SetPeriodicKeyFrames(bool enable) { return -1; }

View File

@ -98,6 +98,9 @@ void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
case webrtc::kVideoCodecVP8:
ViETest::Log("\tcodecType: VP8");
break;
case webrtc::kVideoCodecVP9:
ViETest::Log("\tcodecType: VP9");
break;
case webrtc::kVideoCodecI420:
ViETest::Log("\tcodecType: I420");
break;

View File

@ -640,6 +640,8 @@ bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
return false;
} else if ((video_codec.codecType == kVideoCodecVP8 &&
strncmp(video_codec.plName, "VP8", 4) == 0) ||
(video_codec.codecType == kVideoCodecVP9 &&
strncmp(video_codec.plName, "VP9", 4) == 0) ||
(video_codec.codecType == kVideoCodecI420 &&
strncmp(video_codec.plName, "I420", 4) == 0) ||
(video_codec.codecType == kVideoCodecH264 &&

View File

@ -32,6 +32,7 @@
'video_engine/video_engine.gyp:*',
'voice_engine/voice_engine.gyp:*',
'<(webrtc_vp8_dir)/vp8.gyp:*',
'<(webrtc_vp9_dir)/vp9.gyp:*',
],
},
'targets': [