Use external VideoDecoders in VideoReceiveStream.

Removes direct VideoCodec use from the new API, exposes VideoDecoders
through webrtc/video_decoder.h similar to VideoEncoders.

Also includes some preparation for wiring up external decoders in
WebRtcVideoEngine2 by adding AllocatedDecoders that specify whether they
were allocated internally or externally.

Additionally addresses a data race in VideoReceiver that was exposed with this change.

R=mflodman@webrtc.org, stefan@webrtc.org
TBR=pthatcher@webrtc.org
BUG=2854,1667

Review URL: https://webrtc-codereview.appspot.com/27829004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7560 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org 2014-10-29 15:28:39 +00:00
parent 2dd3134e50
commit 776e6f289c
21 changed files with 261 additions and 234 deletions

View File

@ -1377,6 +1377,9 @@ class VideoMediaChannelTest : public testing::Test,
EXPECT_EQ(0, renderer_.num_rendered_frames());
EXPECT_TRUE(SendFrame());
EXPECT_FRAME_WAIT(1, 640, 400, kTimeout);
// Wait for one frame so they don't get dropped because we send frames too
// tightly.
rtc::Thread::Current()->ProcessMessages(30);
// Remove the capturer.
EXPECT_TRUE(channel_->SetCapturer(kSsrc, NULL));
// Wait for one black frame for removing the capturer.

View File

@ -42,6 +42,7 @@
#include "webrtc/base/logging.h"
#include "webrtc/base/stringutils.h"
#include "webrtc/call.h"
#include "webrtc/video_decoder.h"
#include "webrtc/video_encoder.h"
#define UNIMPLEMENTED \
@ -1008,8 +1009,8 @@ bool WebRtcVideoChannel2::AddRecvStream(const StreamParams& sp) {
webrtc::VideoReceiveStream::Config config;
ConfigureReceiverRtp(&config, sp);
receive_streams_[ssrc] =
new WebRtcVideoReceiveStream(call_.get(), config, recv_codecs_);
receive_streams_[ssrc] = new WebRtcVideoReceiveStream(
call_.get(), external_decoder_factory_, config, recv_codecs_);
return true;
}
@ -1854,11 +1855,13 @@ void WebRtcVideoChannel2::WebRtcVideoSendStream::RecreateWebRtcStream() {
WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
webrtc::Call* call,
WebRtcVideoDecoderFactory* external_decoder_factory,
const webrtc::VideoReceiveStream::Config& config,
const std::vector<VideoCodecSettings>& recv_codecs)
: call_(call),
stream_(NULL),
config_(config),
external_decoder_factory_(external_decoder_factory),
renderer_(NULL),
last_width_(-1),
last_height_(-1) {
@ -1869,6 +1872,7 @@ WebRtcVideoChannel2::WebRtcVideoReceiveStream::WebRtcVideoReceiveStream(
WebRtcVideoChannel2::WebRtcVideoReceiveStream::~WebRtcVideoReceiveStream() {
call_->DestroyVideoReceiveStream(stream_);
ClearDecoders();
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetRecvCodecs(
@ -1877,24 +1881,18 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::SetRecvCodecs(
// TODO(pbos): Base receive codecs off recv_codecs_ and set up using a
// DecoderFactory similar to send side. Pending webrtc:2854.
// Also set up default codecs if there's nothing in recv_codecs_.
webrtc::VideoCodec codec;
memset(&codec, 0, sizeof(codec));
ClearDecoders();
codec.plType = kDefaultVideoCodecPref.payload_type;
strcpy(codec.plName, kDefaultVideoCodecPref.name);
codec.codecType = webrtc::kVideoCodecVP8;
codec.codecSpecific.VP8.resilience = webrtc::kResilientStream;
codec.codecSpecific.VP8.numberOfTemporalLayers = 1;
codec.codecSpecific.VP8.denoisingOn = true;
codec.codecSpecific.VP8.errorConcealmentOn = false;
codec.codecSpecific.VP8.automaticResizeOn = false;
codec.codecSpecific.VP8.frameDroppingOn = true;
codec.codecSpecific.VP8.keyFrameInterval = 3000;
// Bitrates don't matter and are ignored for the receiver. This is put in to
// have the current underlying implementation accept the VideoCodec.
codec.minBitrate = codec.startBitrate = codec.maxBitrate = 300;
config_.codecs.clear();
config_.codecs.push_back(codec);
AllocatedDecoder allocated_decoder(
webrtc::VideoDecoder::Create(webrtc::VideoDecoder::kVp8), false);
allocated_decoders_.push_back(allocated_decoder);
webrtc::VideoReceiveStream::Decoder decoder;
decoder.decoder = allocated_decoder.decoder;
decoder.payload_type = kDefaultVideoCodecPref.payload_type;
decoder.payload_name = "VP8";
config_.decoders.push_back(decoder);
config_.rtp.fec = recv_codecs.front().fec;
@ -1919,6 +1917,18 @@ void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RecreateWebRtcStream() {
stream_->Start();
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::ClearDecoders() {
for (size_t i = 0; i < allocated_decoders_.size(); ++i) {
if (allocated_decoders_[i].external) {
external_decoder_factory_->DestroyVideoDecoder(
allocated_decoders_[i].decoder);
} else {
delete allocated_decoders_[i].decoder;
}
}
allocated_decoders_.clear();
}
void WebRtcVideoChannel2::WebRtcVideoReceiveStream::RenderFrame(
const webrtc::I420VideoFrame& frame,
int time_to_render_ms) {

View File

@ -389,6 +389,7 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
public:
WebRtcVideoReceiveStream(
webrtc::Call*,
WebRtcVideoDecoderFactory* external_decoder_factory,
const webrtc::VideoReceiveStream::Config& config,
const std::vector<VideoCodecSettings>& recv_codecs);
~WebRtcVideoReceiveStream();
@ -405,14 +406,26 @@ class WebRtcVideoChannel2 : public rtc::MessageHandler,
VideoReceiverInfo GetVideoReceiverInfo();
private:
struct AllocatedDecoder {
AllocatedDecoder(webrtc::VideoDecoder* decoder, bool external)
: decoder(decoder), external(external) {}
webrtc::VideoDecoder* decoder;
bool external;
};
void SetSize(int width, int height);
void RecreateWebRtcStream();
void ClearDecoders();
webrtc::Call* const call_;
webrtc::VideoReceiveStream* stream_;
webrtc::VideoReceiveStream::Config config_;
WebRtcVideoDecoderFactory* const external_decoder_factory_;
std::vector<AllocatedDecoder> allocated_decoders_;
rtc::CriticalSection renderer_lock_;
cricket::VideoRenderer* renderer_ GUARDED_BY(renderer_lock_);
int last_width_ GUARDED_BY(renderer_lock_);

View File

@ -174,9 +174,6 @@ void FakeVideoReceiveStream::Stop() {
receiving_ = false;
}
void FakeVideoReceiveStream::GetCurrentReceiveCodec(webrtc::VideoCodec* codec) {
}
FakeCall::FakeCall(const webrtc::Call::Config& config)
: config_(config), network_state_(kNetworkUp) {
SetVideoCodecs(GetDefaultVideoCodecs());
@ -1622,8 +1619,8 @@ TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsAcceptDefaultCodecs) {
FakeVideoReceiveStream* stream = AddRecvStream();
webrtc::VideoReceiveStream::Config config = stream->GetConfig();
EXPECT_STREQ(engine_.codecs()[0].name.c_str(), config.codecs[0].plName);
EXPECT_EQ(engine_.codecs()[0].id, config.codecs[0].plType);
EXPECT_EQ(engine_.codecs()[0].name, config.decoders[0].payload_name);
EXPECT_EQ(engine_.codecs()[0].id, config.decoders[0].payload_type);
}
TEST_F(WebRtcVideoChannel2Test, SetRecvCodecsRejectUnsupportedCodec) {

View File

@ -87,7 +87,6 @@ class FakeVideoReceiveStream : public webrtc::VideoReceiveStream {
virtual void Start() OVERRIDE;
virtual void Stop() OVERRIDE;
virtual void GetCurrentReceiveCodec(webrtc::VideoCodec* codec);
webrtc::VideoReceiveStream::Config config_;
bool receiving_;

View File

@ -18,6 +18,7 @@
#include "webrtc/modules/interface/module_common_types.h"
#include "webrtc/modules/video_coding/codecs/interface/video_error_codes.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_decoder.h"
#include "webrtc/video_encoder.h"
namespace webrtc
@ -62,96 +63,6 @@ struct CodecSpecificInfo
CodecSpecificInfoUnion codecSpecific;
};
class DecodedImageCallback
{
public:
virtual ~DecodedImageCallback() {};
// Callback function which is called when an image has been decoded.
//
// Input:
// - decodedImage : The decoded image.
//
// Return value : 0 if OK, < 0 otherwise.
virtual int32_t Decoded(I420VideoFrame& decodedImage) = 0;
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {return -1;}
virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId) {return -1;}
};
class VideoDecoder
{
public:
virtual ~VideoDecoder() {};
// Initialize the decoder with the information from the VideoCodec.
//
// Input:
// - inst : Codec settings
// - numberOfCores : Number of cores available for the decoder
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t InitDecode(const VideoCodec* codecSettings, int32_t numberOfCores) = 0;
// Decode encoded image (as a part of a video stream). The decoded image
// will be returned to the user through the decode complete callback.
//
// Input:
// - inputImage : Encoded image to be decoded
// - missingFrames : True if one or more frames have been lost
// since the previous decode call.
// - fragmentation : Specifies where the encoded frame can be
// split into separate fragments. The meaning
// of fragment is codec specific, but often
// means that each fragment is decodable by
// itself.
// - codecSpecificInfo : Pointer to codec specific data
// - renderTimeMs : System time to render in milliseconds. Only
// used by decoders with internal rendering.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t
Decode(const EncodedImage& inputImage,
bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo = NULL,
int64_t renderTimeMs = -1) = 0;
// Register an decode complete callback object.
//
// Input:
// - callback : Callback object which handles decoded images.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t RegisterDecodeCompleteCallback(DecodedImageCallback* callback) = 0;
// Free decoder memory.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t Release() = 0;
// Reset decoder state and prepare for a new call.
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t Reset() = 0;
// Codec configuration data sent out-of-band, i.e. in SIP call setup
//
// Input/Output:
// - buffer : Buffer pointer to the configuration data
// - size : The size of the configuration data in
// bytes
//
// Return value : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
virtual int32_t SetCodecConfigParameters(const uint8_t* /*buffer*/, int32_t /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
// Create a copy of the codec and its internal state.
//
// Return value : A copy of the instance if OK, NULL otherwise.
virtual VideoDecoder* Copy() { return NULL; }
};
} // namespace webrtc
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H

View File

@ -186,7 +186,8 @@ class VideoReceiver {
void RegisterPreDecodeImageCallback(EncodedImageCallback* observer);
protected:
int32_t Decode(const webrtc::VCMEncodedFrame& frame);
int32_t Decode(const webrtc::VCMEncodedFrame& frame)
EXCLUSIVE_LOCKS_REQUIRED(_receiveCritSect);
int32_t RequestKeyFrame();
int32_t RequestSliceLossIndication(const uint64_t pictureID) const;
int32_t NackList(uint16_t* nackList, uint16_t* size);
@ -230,7 +231,7 @@ class VideoReceiver {
size_t max_nack_list_size_ GUARDED_BY(process_crit_sect_);
EncodedImageCallback* pre_decode_image_callback_ GUARDED_BY(_receiveCritSect);
VCMCodecDataBase _codecDataBase;
VCMCodecDataBase _codecDataBase GUARDED_BY(_receiveCritSect);
VCMProcessTimer _receiveStatsTimer;
VCMProcessTimer _retransmissionTimer;
VCMProcessTimer _keyRequestTimer;

View File

@ -280,11 +280,11 @@ int32_t VideoReceiver::InitializeReceiver() {
if (ret < 0) {
return ret;
}
_codecDataBase.ResetReceiver();
_timing.Reset();
{
CriticalSectionScoped receive_cs(_receiveCritSect);
_codecDataBase.ResetReceiver();
_timing.Reset();
_receiverInited = true;
}
@ -369,6 +369,7 @@ int VideoReceiver::RegisterRenderBufferSizeCallback(
// Should be called as often as possible to get the most out of the decoder.
int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
int64_t nextRenderTimeMs;
bool supports_render_scheduling;
{
CriticalSectionScoped cs(_receiveCritSect);
if (!_receiverInited) {
@ -377,6 +378,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
if (!_codecDataBase.DecoderRegistered()) {
return VCM_NO_CODEC_REGISTERED;
}
supports_render_scheduling = _codecDataBase.SupportsRenderScheduling();
}
const bool dualReceiverEnabledNotReceiving = (
@ -385,7 +387,7 @@ int32_t VideoReceiver::Decode(uint16_t maxWaitTimeMs) {
VCMEncodedFrame* frame =
_receiver.FrameForDecoding(maxWaitTimeMs,
nextRenderTimeMs,
_codecDataBase.SupportsRenderScheduling(),
supports_render_scheduling,
&_dualReceiver);
if (dualReceiverEnabledNotReceiving && _dualReceiver.State() == kReceiving) {

View File

@ -99,23 +99,15 @@ void CallTest::CreateSendConfig(size_t num_streams) {
void CallTest::CreateMatchingReceiveConfigs() {
assert(!send_config_.rtp.ssrcs.empty());
assert(receive_configs_.empty());
assert(fake_decoders_.empty());
assert(allocated_decoders_.empty());
VideoReceiveStream::Config config;
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config_.encoder_settings);
config.codecs.push_back(codec);
config.rtp.local_ssrc = kReceiverLocalSsrc;
if (send_config_.encoder_settings.encoder == &fake_encoder_) {
config.external_decoders.resize(1);
config.external_decoders[0].payload_type =
send_config_.encoder_settings.payload_type;
}
for (size_t i = 0; i < send_config_.rtp.ssrcs.size(); ++i) {
if (send_config_.encoder_settings.encoder == &fake_encoder_) {
FakeDecoder* decoder = new FakeDecoder();
fake_decoders_.push_back(decoder);
config.external_decoders[0].decoder = decoder;
}
VideoReceiveStream::Decoder decoder =
test::CreateMatchingDecoder(send_config_.encoder_settings);
allocated_decoders_.push_back(decoder.decoder);
config.decoders.clear();
config.decoders.push_back(decoder);
config.rtp.remote_ssrc = send_config_.rtp.ssrcs[i];
receive_configs_.push_back(config);
}
@ -150,7 +142,7 @@ void CallTest::DestroyStreams() {
for (size_t i = 0; i < receive_streams_.size(); ++i)
receiver_call_->DestroyVideoReceiveStream(receive_streams_[i]);
receive_streams_.clear();
fake_decoders_.clear();
allocated_decoders_.clear();
}
const unsigned int CallTest::kDefaultTimeoutMs = 30 * 1000;

View File

@ -74,7 +74,7 @@ class CallTest : public ::testing::Test {
scoped_ptr<test::FrameGeneratorCapturer> frame_generator_capturer_;
test::FakeEncoder fake_encoder_;
ScopedVector<test::FakeDecoder> fake_decoders_;
ScopedVector<VideoDecoder> allocated_decoders_;
};
class BaseTest : public RtpRtcpObserver {

View File

@ -12,8 +12,8 @@
#include <assert.h>
#include <string.h>
#include "webrtc/video_encoder.h"
#include "webrtc/video_engine/vie_defines.h"
#include "webrtc/test/fake_decoder.h"
#include "webrtc/video_decoder.h"
namespace webrtc {
namespace test {
@ -53,33 +53,17 @@ std::vector<VideoStream> CreateVideoStreams(size_t num_streams) {
return stream_settings;
}
VideoCodec CreateDecoderVideoCodec(
VideoReceiveStream::Decoder CreateMatchingDecoder(
const VideoSendStream::Config::EncoderSettings& encoder_settings) {
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
codec.plType = encoder_settings.payload_type;
strcpy(codec.plName, encoder_settings.payload_name.c_str());
VideoReceiveStream::Decoder decoder;
decoder.payload_type = encoder_settings.payload_type;
decoder.payload_name = encoder_settings.payload_name;
if (encoder_settings.payload_name == "VP8") {
codec.codecType = kVideoCodecVP8;
} else if (encoder_settings.payload_name == "H264") {
codec.codecType = kVideoCodecH264;
decoder.decoder = VideoDecoder::Create(VideoDecoder::kVp8);
} else {
codec.codecType = kVideoCodecGeneric;
decoder.decoder = new FakeDecoder();
}
if (codec.codecType == kVideoCodecVP8) {
codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
} else if (codec.codecType == kVideoCodecH264) {
codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
}
codec.width = 320;
codec.height = 180;
codec.startBitrate = codec.minBitrate = codec.maxBitrate = 300;
return codec;
return decoder;
}
} // namespace test
} // namespace webrtc

View File

@ -10,13 +10,14 @@
#ifndef WEBRTC_TEST_ENCODER_SETTINGS_H_
#define WEBRTC_TEST_ENCODER_SETTINGS_H_
#include "webrtc/video_receive_stream.h"
#include "webrtc/video_send_stream.h"
namespace webrtc {
namespace test {
std::vector<VideoStream> CreateVideoStreams(size_t num_streams);
VideoCodec CreateDecoderVideoCodec(
VideoReceiveStream::Decoder CreateMatchingDecoder(
const VideoSendStream::Config::EncoderSettings& encoder_settings);
} // namespace test
} // namespace webrtc

View File

@ -151,11 +151,7 @@ class BitrateEstimatorTest : public test::CallTest {
encoder_config_.streams = test::CreateVideoStreams(1);
receive_config_ = VideoReceiveStream::Config();
assert(receive_config_.codecs.empty());
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config_.encoder_settings);
receive_config_.codecs.push_back(codec);
// receive_config_.external_decoders will be set by every stream separately.
// receive_config_.decoders will be set by every stream separately.
receive_config_.rtp.remote_ssrc = send_config_.rtp.ssrcs[0];
receive_config_.rtp.local_ssrc = kReceiverLocalSsrc;
receive_config_.rtp.extensions.push_back(
@ -206,12 +202,13 @@ class BitrateEstimatorTest : public test::CallTest {
send_stream_->Start();
frame_generator_capturer_->Start();
ExternalVideoDecoder decoder;
VideoReceiveStream::Decoder decoder;
decoder.decoder = &fake_decoder_;
decoder.payload_type = test_->send_config_.encoder_settings.payload_type;
decoder.payload_name = test_->send_config_.encoder_settings.payload_name;
test_->receive_config_.decoders.push_back(decoder);
test_->receive_config_.rtp.remote_ssrc = test_->send_config_.rtp.ssrcs[0];
test_->receive_config_.rtp.local_ssrc++;
test_->receive_config_.external_decoders.push_back(decoder);
receive_stream_ = test_->receiver_call_->CreateVideoReceiveStream(
test_->receive_config_);
receive_stream_->Start();

View File

@ -51,6 +51,15 @@ VideoEncoder* VideoEncoder::Create(VideoEncoder::EncoderType codec_type) {
return NULL;
}
VideoDecoder* VideoDecoder::Create(VideoDecoder::DecoderType codec_type) {
switch (codec_type) {
case kVp8:
return VP8Decoder::Create();
}
assert(false);
return NULL;
}
const int Call::Config::kDefaultStartBitrateBps = 300000;
namespace internal {

View File

@ -247,14 +247,12 @@ TEST_F(EndToEndTest, SendsAndReceivesH264) {
encoder_config->streams[0].max_bitrate_bps = 2000000;
(*receive_configs)[0].renderer = this;
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config->encoder_settings);
(*receive_configs)[0].codecs.resize(1);
(*receive_configs)[0].codecs[0] = codec;
(*receive_configs)[0].external_decoders.resize(1);
(*receive_configs)[0].external_decoders[0].payload_type =
(*receive_configs)[0].decoders.resize(1);
(*receive_configs)[0].decoders[0].payload_type =
send_config->encoder_settings.payload_type;
(*receive_configs)[0].external_decoders[0].decoder = &fake_decoder_;
(*receive_configs)[0].decoders[0].payload_name =
send_config->encoder_settings.payload_name;
(*receive_configs)[0].decoders[0].decoder = &fake_decoder_;
}
virtual void RenderFrame(const I420VideoFrame& video_frame,
@ -977,6 +975,7 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
for (size_t i = 0; i < kNumStreams; ++i)
encoders[i].reset(VideoEncoder::Create(VideoEncoder::kVp8));
ScopedVector<VideoDecoder> allocated_decoders;
for (size_t i = 0; i < kNumStreams; ++i) {
uint32_t ssrc = codec_settings[i].ssrc;
int width = codec_settings[i].width;
@ -1004,9 +1003,10 @@ TEST_F(EndToEndTest, SendsAndReceivesMultipleStreams) {
receive_config.renderer = observers[i];
receive_config.rtp.remote_ssrc = ssrc;
receive_config.rtp.local_ssrc = kReceiverLocalSsrc;
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config.encoder_settings);
receive_config.codecs.push_back(codec);
VideoReceiveStream::Decoder decoder =
test::CreateMatchingDecoder(send_config.encoder_settings);
allocated_decoders.push_back(decoder.decoder);
receive_config.decoders.push_back(decoder);
receive_streams[i] =
receiver_call->CreateVideoReceiveStream(receive_config);
receive_streams[i]->Start();

View File

@ -159,9 +159,9 @@ void Loopback() {
receive_config.rtp.rtx[kRtxPayloadType].ssrc = kSendRtxSsrc;
receive_config.rtp.rtx[kRtxPayloadType].payload_type = kRtxPayloadType;
receive_config.renderer = loopback_video.get();
VideoCodec codec =
test::CreateDecoderVideoCodec(send_config.encoder_settings);
receive_config.codecs.push_back(codec);
VideoReceiveStream::Decoder decoder =
test::CreateMatchingDecoder(send_config.encoder_settings);
receive_config.decoders.push_back(decoder);
VideoReceiveStream* receive_stream =
call->CreateVideoReceiveStream(receive_config);
@ -179,6 +179,8 @@ void Loopback() {
call->DestroyVideoReceiveStream(receive_stream);
call->DestroyVideoSendStream(send_stream);
delete decoder.decoder;
transport.StopSending();
}
} // namespace webrtc

View File

@ -30,6 +30,7 @@
#include "webrtc/test/video_capturer.h"
#include "webrtc/test/video_renderer.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_decoder.h"
namespace webrtc {
namespace flags {
@ -212,8 +213,9 @@ void RtpReplay() {
VideoSendStream::Config::EncoderSettings encoder_settings;
encoder_settings.payload_name = flags::Codec();
encoder_settings.payload_type = flags::PayloadType();
VideoCodec codec = test::CreateDecoderVideoCodec(encoder_settings);
receive_config.codecs.push_back(codec);
VideoReceiveStream::Decoder decoder =
test::CreateMatchingDecoder(encoder_settings);
receive_config.decoders.push_back(decoder);
VideoReceiveStream* receive_stream =
call->CreateVideoReceiveStream(receive_config);
@ -271,6 +273,8 @@ void RtpReplay() {
}
call->DestroyVideoReceiveStream(receive_stream);
delete decoder.decoder;
}
} // namespace webrtc

View File

@ -19,6 +19,7 @@
#include "webrtc/system_wrappers/interface/clock.h"
#include "webrtc/system_wrappers/interface/logging.h"
#include "webrtc/video/receive_statistics_proxy.h"
#include "webrtc/video_encoder.h"
#include "webrtc/video_engine/include/vie_base.h"
#include "webrtc/video_engine/include/vie_capture.h"
#include "webrtc/video_engine/include/vie_codec.h"
@ -31,6 +32,35 @@
namespace webrtc {
namespace internal {
namespace {
VideoCodec CreateDecoderVideoCodec(const VideoReceiveStream::Decoder& decoder) {
VideoCodec codec;
memset(&codec, 0, sizeof(codec));
codec.plType = decoder.payload_type;
strcpy(codec.plName, decoder.payload_name.c_str());
if (decoder.payload_name == "VP8") {
codec.codecType = kVideoCodecVP8;
} else if (decoder.payload_name == "H264") {
codec.codecType = kVideoCodecH264;
} else {
codec.codecType = kVideoCodecGeneric;
}
if (codec.codecType == kVideoCodecVP8) {
codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
} else if (codec.codecType == kVideoCodecH264) {
codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
}
codec.width = 320;
codec.height = 180;
codec.startBitrate = codec.minBitrate = codec.maxBitrate =
Call::Config::kDefaultStartBitrateBps / 1000;
return codec;
}
} // namespace
VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
const VideoReceiveStream::Config& config,
@ -118,15 +148,6 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
}
}
assert(!config_.codecs.empty());
for (size_t i = 0; i < config_.codecs.size(); ++i) {
if (codec_->SetReceiveCodec(channel_, config_.codecs[i]) != 0) {
// TODO(pbos): Abort gracefully, this can be a runtime error.
// Factor out to an Init() method.
abort();
}
}
stats_proxy_.reset(new ReceiveStatisticsProxy(
config_.rtp.local_ssrc, clock_, rtp_rtcp_, codec_, channel_));
@ -142,8 +163,9 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
abort();
external_codec_ = ViEExternalCodec::GetInterface(video_engine);
for (size_t i = 0; i < config_.external_decoders.size(); ++i) {
const ExternalVideoDecoder& decoder = config_.external_decoders[i];
assert(!config_.decoders.empty());
for (size_t i = 0; i < config_.decoders.size(); ++i) {
const Decoder& decoder = config_.decoders[i];
if (external_codec_->RegisterExternalReceiveCodec(
channel_,
decoder.payload_type,
@ -153,6 +175,14 @@ VideoReceiveStream::VideoReceiveStream(webrtc::VideoEngine* video_engine,
// TODO(pbos): Abort gracefully? Can this be a runtime error?
abort();
}
VideoCodec codec = CreateDecoderVideoCodec(decoder);
if (codec_->SetReceiveCodec(channel_, codec) != 0) {
// TODO(pbos): Abort gracefully, this can be a runtime error.
// Factor out to an Init() method.
abort();
}
}
render_ = ViERender::GetInterface(video_engine);
@ -183,9 +213,9 @@ VideoReceiveStream::~VideoReceiveStream() {
render_->RemoveRenderer(channel_);
for (size_t i = 0; i < config_.external_decoders.size(); ++i) {
for (size_t i = 0; i < config_.decoders.size(); ++i) {
external_codec_->DeRegisterExternalReceiveCodec(
channel_, config_.external_decoders[i].payload_type);
channel_, config_.decoders[i].payload_type);
}
network_->DeregisterSendTransport(channel_);
@ -225,10 +255,6 @@ VideoReceiveStream::Stats VideoReceiveStream::GetStats() const {
return stats_proxy_->GetStats();
}
void VideoReceiveStream::GetCurrentReceiveCodec(VideoCodec* receive_codec) {
// TODO(pbos): Implement
}
bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) {
return network_->ReceivedRTCPPacket(
channel_, packet, static_cast<int>(length)) == 0;

View File

@ -53,8 +53,6 @@ class VideoReceiveStream : public webrtc::VideoReceiveStream,
virtual void Stop() OVERRIDE;
virtual Stats GetStats() const OVERRIDE;
virtual void GetCurrentReceiveCodec(VideoCodec* receive_codec) OVERRIDE;
// Overrides I420FrameCallback.
virtual void FrameCallback(I420VideoFrame* video_frame) OVERRIDE;

74
webrtc/video_decoder.h Normal file
View File

@ -0,0 +1,74 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_VIDEO_DECODER_H_
#define WEBRTC_VIDEO_DECODER_H_
#include <vector>
#include "webrtc/common_types.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_frame.h"
namespace webrtc {
class RTPFragmentationHeader;
// TODO(pbos): Expose these through a public (root) header or change these APIs.
struct CodecSpecificInfo;
struct VideoCodec;
class DecodedImageCallback {
public:
virtual ~DecodedImageCallback() {}
virtual int32_t Decoded(I420VideoFrame& decodedImage) = 0;
virtual int32_t ReceivedDecodedReferenceFrame(const uint64_t pictureId) {
return -1;
}
virtual int32_t ReceivedDecodedFrame(const uint64_t pictureId) { return -1; }
};
class VideoDecoder {
public:
enum DecoderType {
kVp8,
};
static VideoDecoder* Create(DecoderType codec_type);
virtual ~VideoDecoder() {}
virtual int32_t InitDecode(const VideoCodec* codecSettings,
int32_t numberOfCores) = 0;
virtual int32_t Decode(const EncodedImage& inputImage,
bool missingFrames,
const RTPFragmentationHeader* fragmentation,
const CodecSpecificInfo* codecSpecificInfo = NULL,
int64_t renderTimeMs = -1) = 0;
virtual int32_t RegisterDecodeCompleteCallback(
DecodedImageCallback* callback) = 0;
virtual int32_t Release() = 0;
virtual int32_t Reset() = 0;
virtual int32_t SetCodecConfigParameters(const uint8_t* /*buffer*/,
int32_t /*size*/) {
return -1;
}
virtual VideoDecoder* Copy() { return NULL; }
};
} // namespace webrtc
#endif // WEBRTC_VIDEO_DECODER_H_

View File

@ -31,30 +31,38 @@ enum RtcpMode { kRtcpCompound, kRtcpReducedSize };
class VideoDecoder;
// TODO(mflodman) Move all these settings to VideoDecoder and move the
// declaration to common_types.h.
struct ExternalVideoDecoder {
ExternalVideoDecoder()
: decoder(NULL), payload_type(0), renderer(false), expected_delay_ms(0) {}
// The actual decoder.
VideoDecoder* decoder;
// Received RTP packets with this payload type will be sent to this decoder
// instance.
int payload_type;
// 'true' if the decoder handles rendering as well.
bool renderer;
// The expected delay for decoding and rendering, i.e. the frame will be
// delivered this many milliseconds, if possible, earlier than the ideal
// render time.
// Note: Ignored if 'renderer' is false.
int expected_delay_ms;
};
class VideoReceiveStream {
public:
// TODO(mflodman) Move all these settings to VideoDecoder and move the
// declaration to common_types.h.
struct Decoder {
Decoder()
: decoder(NULL),
payload_type(0),
renderer(false),
expected_delay_ms(0) {}
// The actual decoder instance.
VideoDecoder* decoder;
// Received RTP packets with this payload type will be sent to this decoder
// instance.
int payload_type;
// Name of the decoded payload (such as VP8). Maps back to the depacketizer
// used to unpack incoming packets.
std::string payload_name;
// 'true' if the decoder handles rendering as well.
bool renderer;
// The expected delay for decoding and rendering, i.e. the frame will be
// delivered this many milliseconds, if possible, earlier than the ideal
// render time.
// Note: Ignored if 'renderer' is false.
int expected_delay_ms;
};
struct Stats : public StreamStats {
Stats()
: network_frame_rate(0),
@ -81,8 +89,9 @@ class VideoReceiveStream {
pre_decode_callback(NULL),
pre_render_callback(NULL),
target_delay_ms(0) {}
// Codecs the receive stream can receive.
std::vector<VideoCodec> codecs;
// Decoders for every payload that we can receive.
std::vector<Decoder> decoders;
// Receive-stream specific RTP settings.
struct Rtp {
@ -162,10 +171,6 @@ class VideoReceiveStream {
// stream. 'NULL' disables the callback.
I420FrameCallback* pre_render_callback;
// External video decoders to be used if incoming payload type matches the
// registered type for an external decoder.
std::vector<ExternalVideoDecoder> external_decoders;
// Target delay in milliseconds. A positive value indicates this stream is
// used for streaming instead of a real-time call.
int target_delay_ms;
@ -173,10 +178,9 @@ class VideoReceiveStream {
virtual void Start() = 0;
virtual void Stop() = 0;
virtual Stats GetStats() const = 0;
// TODO(mflodman) Replace this with callback.
virtual void GetCurrentReceiveCodec(VideoCodec* receive_codec) = 0;
// TODO(pbos): Add info on currently-received codec to Stats.
virtual Stats GetStats() const = 0;
protected:
virtual ~VideoReceiveStream() {}