Reland "Avoid critsect for protection- and qm setting callbacks in

VideoSender."

The original Cl is uploaded as patch set 1, the fix in ps#2 and I'll rebase in ps#3.

BUG=4534
R=pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/46769004

Cr-Commit-Position: refs/heads/master@{#9000}
This commit is contained in:
mflodman 2015-04-14 21:28:08 +02:00
parent 73ba7a690f
commit fcf54bdabb
18 changed files with 383 additions and 458 deletions

View File

@ -88,7 +88,6 @@ ProducerFec::ProducerFec(ForwardErrorCorrection* fec)
media_packets_fec_(), media_packets_fec_(),
fec_packets_(), fec_packets_(),
num_frames_(0), num_frames_(0),
incomplete_frame_(false),
num_first_partition_(0), num_first_partition_(0),
minimum_media_packets_fec_(1), minimum_media_packets_fec_(1),
params_(), params_(),
@ -125,9 +124,8 @@ RedPacket* ProducerFec::BuildRedPacket(const uint8_t* data_buffer,
size_t payload_length, size_t payload_length,
size_t rtp_header_length, size_t rtp_header_length,
int red_pl_type) { int red_pl_type) {
RedPacket* red_packet = new RedPacket(payload_length + RedPacket* red_packet = new RedPacket(
kREDForFECHeaderLength + payload_length + kREDForFECHeaderLength + rtp_header_length);
rtp_header_length);
int pl_type = data_buffer[1] & 0x7f; int pl_type = data_buffer[1] & 0x7f;
red_packet->CreateHeader(data_buffer, rtp_header_length, red_packet->CreateHeader(data_buffer, rtp_header_length,
red_pl_type, pl_type); red_pl_type, pl_type);
@ -142,7 +140,7 @@ int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
if (media_packets_fec_.empty()) { if (media_packets_fec_.empty()) {
params_ = new_params_; params_ = new_params_;
} }
incomplete_frame_ = true; bool complete_frame = false;
const bool marker_bit = (data_buffer[1] & kRtpMarkerBitMask) ? true : false; const bool marker_bit = (data_buffer[1] & kRtpMarkerBitMask) ? true : false;
if (media_packets_fec_.size() < ForwardErrorCorrection::kMaxMediaPackets) { if (media_packets_fec_.size() < ForwardErrorCorrection::kMaxMediaPackets) {
// Generic FEC can only protect up to kMaxMediaPackets packets. // Generic FEC can only protect up to kMaxMediaPackets packets.
@ -153,13 +151,13 @@ int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
} }
if (marker_bit) { if (marker_bit) {
++num_frames_; ++num_frames_;
incomplete_frame_ = false; complete_frame = true;
} }
// Produce FEC over at most |params_.max_fec_frames| frames, or as soon as: // Produce FEC over at most |params_.max_fec_frames| frames, or as soon as:
// (1) the excess overhead (actual overhead - requested/target overhead) is // (1) the excess overhead (actual overhead - requested/target overhead) is
// less than |kMaxExcessOverhead|, and // less than |kMaxExcessOverhead|, and
// (2) at least |minimum_media_packets_fec_| media packets is reached. // (2) at least |minimum_media_packets_fec_| media packets is reached.
if (!incomplete_frame_ && if (complete_frame &&
(num_frames_ == params_.max_fec_frames || (num_frames_ == params_.max_fec_frames ||
(ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) { (ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) {
assert(num_first_partition_ <= assert(num_first_partition_ <=
@ -206,37 +204,43 @@ bool ProducerFec::MinimumMediaPacketsReached() {
} }
bool ProducerFec::FecAvailable() const { bool ProducerFec::FecAvailable() const {
return (fec_packets_.size() > 0); return !fec_packets_.empty();
} }
RedPacket* ProducerFec::GetFecPacket(int red_pl_type, size_t ProducerFec::NumAvailableFecPackets() const {
int fec_pl_type, return fec_packets_.size();
uint16_t seq_num, }
size_t rtp_header_length) {
if (fec_packets_.empty()) std::vector<RedPacket*> ProducerFec::GetFecPackets(int red_pl_type,
return NULL; int fec_pl_type,
// Build FEC packet. The FEC packets in |fec_packets_| doesn't uint16_t first_seq_num,
// have RTP headers, so we're reusing the header from the last size_t rtp_header_length) {
// media packet. std::vector<RedPacket*> fec_packets;
ForwardErrorCorrection::Packet* packet_to_send = fec_packets_.front(); fec_packets.reserve(fec_packets_.size());
ForwardErrorCorrection::Packet* last_media_packet = media_packets_fec_.back(); uint16_t sequence_number = first_seq_num;
RedPacket* return_packet = new RedPacket(packet_to_send->length + while (!fec_packets_.empty()) {
kREDForFECHeaderLength + // Build FEC packet. The FEC packets in |fec_packets_| doesn't
rtp_header_length); // have RTP headers, so we're reusing the header from the last
return_packet->CreateHeader(last_media_packet->data, // media packet.
rtp_header_length, ForwardErrorCorrection::Packet* packet_to_send = fec_packets_.front();
red_pl_type, ForwardErrorCorrection::Packet* last_media_packet =
fec_pl_type); media_packets_fec_.back();
return_packet->SetSeqNum(seq_num);
return_packet->ClearMarkerBit(); RedPacket* red_packet = new RedPacket(
return_packet->AssignPayload(packet_to_send->data, packet_to_send->length); packet_to_send->length + kREDForFECHeaderLength + rtp_header_length);
fec_packets_.pop_front(); red_packet->CreateHeader(last_media_packet->data, rtp_header_length,
if (fec_packets_.empty()) { red_pl_type, fec_pl_type);
// Done with all the FEC packets. Reset for next run. red_packet->SetSeqNum(sequence_number++);
DeletePackets(); red_packet->ClearMarkerBit();
num_frames_ = 0; red_packet->AssignPayload(packet_to_send->data, packet_to_send->length);
fec_packets.push_back(red_packet);
fec_packets_.pop_front();
} }
return return_packet; DeletePackets();
num_frames_ = 0;
return fec_packets;
} }
int ProducerFec::Overhead() const { int ProducerFec::Overhead() const {

View File

@ -12,6 +12,7 @@
#define WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_ #define WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
#include <list> #include <list>
#include <vector>
#include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h" #include "webrtc/modules/rtp_rtcp/source/forward_error_correction.h"
@ -45,6 +46,7 @@ class ProducerFec {
void SetFecParameters(const FecProtectionParams* params, void SetFecParameters(const FecProtectionParams* params,
int max_fec_frames); int max_fec_frames);
// The caller is expected to delete the memory when done.
RedPacket* BuildRedPacket(const uint8_t* data_buffer, RedPacket* BuildRedPacket(const uint8_t* data_buffer,
size_t payload_length, size_t payload_length,
size_t rtp_header_length, size_t rtp_header_length,
@ -59,11 +61,14 @@ class ProducerFec {
bool MinimumMediaPacketsReached(); bool MinimumMediaPacketsReached();
bool FecAvailable() const; bool FecAvailable() const;
size_t NumAvailableFecPackets() const;
RedPacket* GetFecPacket(int red_pl_type, // GetFecPackets allocates memory and creates FEC packets, but the caller is
int fec_pl_type, // assumed to delete the memory when done with the packets.
uint16_t seq_num, std::vector<RedPacket*> GetFecPackets(int red_pl_type,
size_t rtp_header_length); int fec_pl_type,
uint16_t first_seq_num,
size_t rtp_header_length);
private: private:
void DeletePackets(); void DeletePackets();
@ -72,7 +77,6 @@ class ProducerFec {
std::list<ForwardErrorCorrection::Packet*> media_packets_fec_; std::list<ForwardErrorCorrection::Packet*> media_packets_fec_;
std::list<ForwardErrorCorrection::Packet*> fec_packets_; std::list<ForwardErrorCorrection::Packet*> fec_packets_;
int num_frames_; int num_frames_;
bool incomplete_frame_;
int num_first_partition_; int num_first_partition_;
int minimum_media_packets_fec_; int minimum_media_packets_fec_;
FecProtectionParams params_; FecProtectionParams params_;

View File

@ -77,19 +77,19 @@ TEST_F(ProducerFecTest, OneFrameFec) {
} }
EXPECT_TRUE(producer_->FecAvailable()); EXPECT_TRUE(producer_->FecAvailable());
uint16_t seq_num = generator_->NextSeqNum(); uint16_t seq_num = generator_->NextSeqNum();
RedPacket* packet = producer_->GetFecPacket(kRedPayloadType, std::vector<RedPacket*> packets = producer_->GetFecPackets(kRedPayloadType,
kFecPayloadType, kFecPayloadType,
seq_num, seq_num,
kRtpHeaderSize); kRtpHeaderSize);
EXPECT_FALSE(producer_->FecAvailable()); EXPECT_FALSE(producer_->FecAvailable());
ASSERT_TRUE(packet != NULL); ASSERT_EQ(1u, packets.size());
VerifyHeader(seq_num, last_timestamp, VerifyHeader(seq_num, last_timestamp,
kRedPayloadType, kFecPayloadType, packet, false); kRedPayloadType, kFecPayloadType, packets.front(), false);
while (!rtp_packets.empty()) { while (!rtp_packets.empty()) {
delete rtp_packets.front(); delete rtp_packets.front();
rtp_packets.pop_front(); rtp_packets.pop_front();
} }
delete packet; delete packets.front();
} }
TEST_F(ProducerFecTest, TwoFrameFec) { TEST_F(ProducerFecTest, TwoFrameFec) {
@ -120,39 +120,36 @@ TEST_F(ProducerFecTest, TwoFrameFec) {
} }
EXPECT_TRUE(producer_->FecAvailable()); EXPECT_TRUE(producer_->FecAvailable());
uint16_t seq_num = generator_->NextSeqNum(); uint16_t seq_num = generator_->NextSeqNum();
RedPacket* packet = producer_->GetFecPacket(kRedPayloadType, std::vector<RedPacket*> packets = producer_->GetFecPackets(kRedPayloadType,
kFecPayloadType, kFecPayloadType,
seq_num, seq_num,
kRtpHeaderSize); kRtpHeaderSize);
EXPECT_FALSE(producer_->FecAvailable()); EXPECT_FALSE(producer_->FecAvailable());
EXPECT_TRUE(packet != NULL); ASSERT_EQ(1u, packets.size());
VerifyHeader(seq_num, last_timestamp, VerifyHeader(seq_num, last_timestamp, kRedPayloadType, kFecPayloadType,
kRedPayloadType, kFecPayloadType, packet, false); packets.front(), false);
while (!rtp_packets.empty()) { while (!rtp_packets.empty()) {
delete rtp_packets.front(); delete rtp_packets.front();
rtp_packets.pop_front(); rtp_packets.pop_front();
} }
delete packet; delete packets.front();
} }
TEST_F(ProducerFecTest, BuildRedPacket) { TEST_F(ProducerFecTest, BuildRedPacket) {
generator_->NewFrame(1); generator_->NewFrame(1);
RtpPacket* packet = generator_->NextPacket(0, 10); RtpPacket* packet = generator_->NextPacket(0, 10);
RedPacket* red_packet = producer_->BuildRedPacket(packet->data, rtc::scoped_ptr<RedPacket> red_packet(producer_->BuildRedPacket(
packet->length - packet->data, packet->length - kRtpHeaderSize, kRtpHeaderSize,
kRtpHeaderSize, kRedPayloadType));
kRtpHeaderSize,
kRedPayloadType);
EXPECT_EQ(packet->length + 1, red_packet->length()); EXPECT_EQ(packet->length + 1, red_packet->length());
VerifyHeader(packet->header.header.sequenceNumber, VerifyHeader(packet->header.header.sequenceNumber,
packet->header.header.timestamp, packet->header.header.timestamp,
kRedPayloadType, kRedPayloadType,
packet->header.header.payloadType, packet->header.header.payloadType,
red_packet, red_packet.get(),
true); // Marker bit set. true); // Marker bit set.
for (int i = 0; i < 10; ++i) for (int i = 0; i < 10; ++i)
EXPECT_EQ(i, red_packet->data()[kRtpHeaderSize + 1 + i]); EXPECT_EQ(i, red_packet->data()[kRtpHeaderSize + 1 + i]);
delete red_packet;
delete packet; delete packet;
} }

View File

@ -407,7 +407,7 @@ int32_t ModuleRtpRtcpImpl::SendOutgoingData(
} }
return rtp_sender_.SendOutgoingData( return rtp_sender_.SendOutgoingData(
frame_type, payload_type, time_stamp, capture_time_ms, payload_data, frame_type, payload_type, time_stamp, capture_time_ms, payload_data,
payload_size, fragmentation, NULL, rtp_video_hdr); payload_size, fragmentation, rtp_video_hdr);
} }
bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc, bool ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,

View File

@ -323,14 +323,14 @@ int32_t RTPSender::RegisterPayload(
} }
return -1; return -1;
} }
int32_t ret_val = -1; int32_t ret_val = 0;
RtpUtility::Payload* payload = NULL; RtpUtility::Payload* payload = NULL;
if (audio_configured_) { if (audio_configured_) {
// TODO(mflodman): Change to CreateAudioPayload and make static.
ret_val = audio_->RegisterAudioPayload(payload_name, payload_number, ret_val = audio_->RegisterAudioPayload(payload_name, payload_number,
frequency, channels, rate, payload); frequency, channels, rate, payload);
} else { } else {
ret_val = video_->RegisterVideoPayload(payload_name, payload_number, rate, payload = video_->CreateVideoPayload(payload_name, payload_number, rate);
payload);
} }
if (payload) { if (payload) {
payload_type_map_[payload_number] = payload; payload_type_map_[payload_number] = payload;
@ -489,7 +489,6 @@ int32_t RTPSender::SendOutgoingData(FrameType frame_type,
const uint8_t* payload_data, const uint8_t* payload_data,
size_t payload_size, size_t payload_size,
const RTPFragmentationHeader* fragmentation, const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codec_info,
const RTPVideoHeader* rtp_hdr) { const RTPVideoHeader* rtp_hdr) {
uint32_t ssrc; uint32_t ssrc;
{ {
@ -526,7 +525,7 @@ int32_t RTPSender::SendOutgoingData(FrameType frame_type,
ret_val = ret_val =
video_->SendVideo(video_type, frame_type, payload_type, video_->SendVideo(video_type, frame_type, payload_type,
capture_timestamp, capture_time_ms, payload_data, capture_timestamp, capture_time_ms, payload_data,
payload_size, fragmentation, codec_info, rtp_hdr); payload_size, fragmentation, rtp_hdr);
} }
CriticalSectionScoped cs(statistics_crit_.get()); CriticalSectionScoped cs(statistics_crit_.get());
@ -745,7 +744,8 @@ int RTPSender::SelectiveRetransmissions() const {
int RTPSender::SetSelectiveRetransmissions(uint8_t settings) { int RTPSender::SetSelectiveRetransmissions(uint8_t settings) {
if (!video_) if (!video_)
return -1; return -1;
return video_->SetSelectiveRetransmissions(settings); video_->SetSelectiveRetransmissions(settings);
return 0;
} }
void RTPSender::OnReceivedNACK(const std::list<uint16_t>& nack_sequence_numbers, void RTPSender::OnReceivedNACK(const std::list<uint16_t>& nack_sequence_numbers,
@ -1086,9 +1086,11 @@ size_t RTPSender::RTPHeaderLength() const {
return rtp_header_length; return rtp_header_length;
} }
uint16_t RTPSender::IncrementSequenceNumber() { uint16_t RTPSender::AllocateSequenceNumber(uint16_t packets_to_send) {
CriticalSectionScoped cs(send_critsect_.get()); CriticalSectionScoped cs(send_critsect_.get());
return sequence_number_++; uint16_t first_allocated_sequence_number = sequence_number_;
sequence_number_ += packets_to_send;
return first_allocated_sequence_number;
} }
void RTPSender::ResetDataCounters() { void RTPSender::ResetDataCounters() {
@ -1729,14 +1731,6 @@ int32_t RTPSender::RED(int8_t *payload_type) const {
return audio_->RED(*payload_type); return audio_->RED(*payload_type);
} }
// Video
VideoCodecInformation *RTPSender::CodecInformationVideo() {
if (audio_configured_) {
return NULL;
}
return video_->CodecInformationVideo();
}
RtpVideoCodecTypes RTPSender::VideoCodecType() const { RtpVideoCodecTypes RTPSender::VideoCodecType() const {
assert(!audio_configured_ && "Sender is an audio stream!"); assert(!audio_configured_ && "Sender is an audio stream!");
return video_->VideoCodecType(); return video_->VideoCodecType();
@ -1762,8 +1756,8 @@ int32_t RTPSender::SetGenericFECStatus(bool enable,
if (audio_configured_) { if (audio_configured_) {
return -1; return -1;
} }
return video_->SetGenericFECStatus(enable, payload_type_red, video_->SetGenericFECStatus(enable, payload_type_red, payload_type_fec);
payload_type_fec); return 0;
} }
int32_t RTPSender::GenericFECStatus(bool* enable, int32_t RTPSender::GenericFECStatus(bool* enable,
@ -1772,8 +1766,8 @@ int32_t RTPSender::GenericFECStatus(bool* enable,
if (audio_configured_) { if (audio_configured_) {
return -1; return -1;
} }
return video_->GenericFECStatus( video_->GenericFECStatus(*enable, *payload_type_red, *payload_type_fec);
*enable, *payload_type_red, *payload_type_fec); return 0;
} }
int32_t RTPSender::SetFecParameters( int32_t RTPSender::SetFecParameters(
@ -1782,7 +1776,8 @@ int32_t RTPSender::SetFecParameters(
if (audio_configured_) { if (audio_configured_) {
return -1; return -1;
} }
return video_->SetFecParameters(delta_params, key_params); video_->SetFecParameters(delta_params, key_params);
return 0;
} }
void RTPSender::BuildRtxPacket(uint8_t* buffer, size_t* length, void RTPSender::BuildRtxPacket(uint8_t* buffer, size_t* length,

View File

@ -24,8 +24,8 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h" #include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h" #include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h" #include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
#include "webrtc/modules/rtp_rtcp/source/video_codec_information.h"
#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1. #define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1.
@ -61,7 +61,10 @@ class RTPSenderInterface {
bool inc_sequence_number = true) = 0; bool inc_sequence_number = true) = 0;
virtual size_t RTPHeaderLength() const = 0; virtual size_t RTPHeaderLength() const = 0;
virtual uint16_t IncrementSequenceNumber() = 0; // Returns the next sequence number to use for a packet and allocates
// 'packets_to_send' number of sequence numbers. It's important all allocated
// sequence numbers are used in sequence to avoid perceived packet loss.
virtual uint16_t AllocateSequenceNumber(uint16_t packets_to_send) = 0;
virtual uint16_t SequenceNumber() const = 0; virtual uint16_t SequenceNumber() const = 0;
virtual size_t MaxPayloadLength() const = 0; virtual size_t MaxPayloadLength() const = 0;
virtual size_t MaxDataPayloadLength() const = 0; virtual size_t MaxDataPayloadLength() const = 0;
@ -155,7 +158,6 @@ class RTPSender : public RTPSenderInterface {
const uint8_t* payload_data, const uint8_t* payload_data,
size_t payload_size, size_t payload_size,
const RTPFragmentationHeader* fragmentation, const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codec_info = NULL,
const RTPVideoHeader* rtp_hdr = NULL); const RTPVideoHeader* rtp_hdr = NULL);
// RTP header extension // RTP header extension
@ -227,7 +229,7 @@ class RTPSender : public RTPSenderInterface {
const bool inc_sequence_number = true) override; const bool inc_sequence_number = true) override;
size_t RTPHeaderLength() const override; size_t RTPHeaderLength() const override;
uint16_t IncrementSequenceNumber() override; uint16_t AllocateSequenceNumber(uint16_t packets_to_send) override;
size_t MaxPayloadLength() const override; size_t MaxPayloadLength() const override;
uint16_t PacketOverHead() const override; uint16_t PacketOverHead() const override;
@ -261,9 +263,6 @@ class RTPSender : public RTPSenderInterface {
// Get payload type for Redundant Audio Data RFC 2198. // Get payload type for Redundant Audio Data RFC 2198.
int32_t RED(int8_t *payload_type) const; int32_t RED(int8_t *payload_type) const;
// Video.
VideoCodecInformation *CodecInformationVideo();
RtpVideoCodecTypes VideoCodecType() const; RtpVideoCodecTypes VideoCodecType() const;
uint32_t MaxConfiguredBitrateVideo() const; uint32_t MaxConfiguredBitrateVideo() const;

View File

@ -1353,7 +1353,7 @@ TEST_F(RtpSenderVideoTest, SendVideoWithCVO) {
rtp_sender_video_->SendVideo(kRtpVideoGeneric, kVideoFrameKey, kPayload, rtp_sender_video_->SendVideo(kRtpVideoGeneric, kVideoFrameKey, kPayload,
kTimestamp, 0, packet_, sizeof(packet_), NULL, kTimestamp, 0, packet_, sizeof(packet_), NULL,
NULL, &hdr); &hdr);
RtpHeaderExtensionMap map; RtpHeaderExtensionMap map;
map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId); map.Register(kRtpExtensionVideoRotation, kVideoRotationExtensionId);

View File

@ -10,10 +10,12 @@
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h" #include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include <assert.h>
#include <stdlib.h> #include <stdlib.h>
#include <string.h> #include <string.h>
#include <vector>
#include "webrtc/base/checks.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/byte_io.h" #include "webrtc/modules/rtp_rtcp/source/byte_io.h"
#include "webrtc/modules/rtp_rtcp/source/producer_fec.h" #include "webrtc/modules/rtp_rtcp/source/producer_fec.h"
@ -33,8 +35,8 @@ struct RtpPacket {
RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender) RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
: _rtpSender(*rtpSender), : _rtpSender(*rtpSender),
crit_(CriticalSectionWrapper::CreateCriticalSection()),
_videoType(kRtpVideoGeneric), _videoType(kRtpVideoGeneric),
_videoCodecInformation(NULL),
_maxBitrate(0), _maxBitrate(0),
_retransmissionSettings(kRetransmitBaseLayer), _retransmissionSettings(kRetransmitBaseLayer),
@ -43,7 +45,6 @@ RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
_fecEnabled(false), _fecEnabled(false),
_payloadTypeRED(-1), _payloadTypeRED(-1),
_payloadTypeFEC(-1), _payloadTypeFEC(-1),
_numberFirstPartition(0),
delta_fec_params_(), delta_fec_params_(),
key_fec_params_(), key_fec_params_(),
producer_fec_(&_fec), producer_fec_(&_fec),
@ -57,9 +58,6 @@ RTPSenderVideo::RTPSenderVideo(Clock* clock, RTPSenderInterface* rtpSender)
} }
RTPSenderVideo::~RTPSenderVideo() { RTPSenderVideo::~RTPSenderVideo() {
if (_videoCodecInformation) {
delete _videoCodecInformation;
}
} }
void RTPSenderVideo::SetVideoCodecType(RtpVideoCodecTypes videoType) { void RTPSenderVideo::SetVideoCodecType(RtpVideoCodecTypes videoType) {
@ -70,11 +68,11 @@ RtpVideoCodecTypes RTPSenderVideo::VideoCodecType() const {
return _videoType; return _videoType;
} }
int32_t RTPSenderVideo::RegisterVideoPayload( // Static.
RtpUtility::Payload* RTPSenderVideo::CreateVideoPayload(
const char payloadName[RTP_PAYLOAD_NAME_SIZE], const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const int8_t payloadType, const int8_t payloadType,
const uint32_t maxBitRate, const uint32_t maxBitRate) {
RtpUtility::Payload*& payload) {
RtpVideoCodecTypes videoType = kRtpVideoGeneric; RtpVideoCodecTypes videoType = kRtpVideoGeneric;
if (RtpUtility::StringCompare(payloadName, "VP8", 3)) { if (RtpUtility::StringCompare(payloadName, "VP8", 3)) {
videoType = kRtpVideoVp8; videoType = kRtpVideoVp8;
@ -85,103 +83,94 @@ int32_t RTPSenderVideo::RegisterVideoPayload(
} else { } else {
videoType = kRtpVideoGeneric; videoType = kRtpVideoGeneric;
} }
payload = new RtpUtility::Payload; RtpUtility::Payload* payload = new RtpUtility::Payload();
payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0; payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1); strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
payload->typeSpecific.Video.videoCodecType = videoType; payload->typeSpecific.Video.videoCodecType = videoType;
payload->typeSpecific.Video.maxRate = maxBitRate; payload->typeSpecific.Video.maxRate = maxBitRate;
payload->audio = false; payload->audio = false;
return 0; return payload;
} }
int32_t RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer, void RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
const size_t payload_length, const size_t payload_length,
const size_t rtp_header_length, const size_t rtp_header_length,
const uint32_t capture_timestamp, uint16_t seq_num,
int64_t capture_time_ms, const uint32_t capture_timestamp,
StorageType storage, int64_t capture_time_ms,
bool protect) { StorageType storage) {
if (_fecEnabled) { if (_rtpSender.SendToNetwork(data_buffer, payload_length, rtp_header_length,
int ret = 0; capture_time_ms, storage,
size_t fec_overhead_sent = 0; PacedSender::kNormalPriority) == 0) {
size_t video_sent = 0; _videoBitrate.Update(payload_length + rtp_header_length);
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketNormal", "timestamp", capture_timestamp,
"seqnum", seq_num);
} else {
LOG(LS_WARNING) << "Failed to send video packet " << seq_num;
}
}
RedPacket* red_packet = producer_fec_.BuildRedPacket( void RTPSenderVideo::SendVideoPacketAsRed(uint8_t* data_buffer,
data_buffer, payload_length, rtp_header_length, _payloadTypeRED); const size_t payload_length,
const size_t rtp_header_length,
uint16_t media_seq_num,
const uint32_t capture_timestamp,
int64_t capture_time_ms,
StorageType media_packet_storage,
bool protect) {
rtc::scoped_ptr<RedPacket> red_packet;
std::vector<RedPacket*> fec_packets;
StorageType fec_storage = kDontRetransmit;
uint16_t next_fec_sequence_number = 0;
{
// Only protect while creating RED and FEC packets, not when sending.
CriticalSectionScoped cs(crit_.get());
red_packet.reset(producer_fec_.BuildRedPacket(
data_buffer, payload_length, rtp_header_length, _payloadTypeRED));
if (protect) {
producer_fec_.AddRtpPacketAndGenerateFec(data_buffer, payload_length,
rtp_header_length);
}
uint16_t num_fec_packets = producer_fec_.NumAvailableFecPackets();
if (num_fec_packets > 0) {
next_fec_sequence_number =
_rtpSender.AllocateSequenceNumber(num_fec_packets);
fec_packets = producer_fec_.GetFecPackets(
_payloadTypeRED, _payloadTypeFEC, next_fec_sequence_number,
rtp_header_length);
DCHECK_EQ(num_fec_packets, fec_packets.size());
if (_retransmissionSettings & kRetransmitFECPackets)
fec_storage = kAllowRetransmission;
}
}
if (_rtpSender.SendToNetwork(
red_packet->data(), red_packet->length() - rtp_header_length,
rtp_header_length, capture_time_ms, media_packet_storage,
PacedSender::kNormalPriority) == 0) {
_videoBitrate.Update(red_packet->length());
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketRed", "timestamp", capture_timestamp, "Video::PacketRed", "timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber()); "seqnum", media_seq_num);
// Sending the media packet with RED header. } else {
int packet_success = LOG(LS_WARNING) << "Failed to send RED packet " << media_seq_num;
_rtpSender.SendToNetwork(red_packet->data(), }
red_packet->length() - rtp_header_length, for (RedPacket* fec_packet : fec_packets) {
rtp_header_length, if (_rtpSender.SendToNetwork(
capture_time_ms, fec_packet->data(), fec_packet->length() - rtp_header_length,
storage, rtp_header_length, capture_time_ms, fec_storage,
PacedSender::kNormalPriority); PacedSender::kNormalPriority) == 0) {
_fecOverheadRate.Update(fec_packet->length());
ret |= packet_success;
if (packet_success == 0) {
video_sent += red_packet->length();
}
delete red_packet;
red_packet = NULL;
if (protect) {
ret = producer_fec_.AddRtpPacketAndGenerateFec(
data_buffer, payload_length, rtp_header_length);
if (ret != 0)
return ret;
}
while (producer_fec_.FecAvailable()) {
red_packet =
producer_fec_.GetFecPacket(_payloadTypeRED,
_payloadTypeFEC,
_rtpSender.IncrementSequenceNumber(),
rtp_header_length);
StorageType storage = kDontRetransmit;
if (_retransmissionSettings & kRetransmitFECPackets) {
storage = kAllowRetransmission;
}
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"), TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketFec", "timestamp", capture_timestamp, "Video::PacketFec", "timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber()); "seqnum", next_fec_sequence_number);
// Sending FEC packet with RED header. } else {
int packet_success = LOG(LS_WARNING) << "Failed to send FEC packet "
_rtpSender.SendToNetwork(red_packet->data(), << next_fec_sequence_number;
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
ret |= packet_success;
if (packet_success == 0) {
fec_overhead_sent += red_packet->length();
}
delete red_packet;
red_packet = NULL;
} }
_videoBitrate.Update(video_sent); delete fec_packet;
_fecOverheadRate.Update(fec_overhead_sent); ++next_fec_sequence_number;
return ret;
} }
TRACE_EVENT_INSTANT2(TRACE_DISABLED_BY_DEFAULT("webrtc_rtp"),
"Video::PacketNormal", "timestamp", capture_timestamp,
"seqnum", _rtpSender.SequenceNumber());
int ret = _rtpSender.SendToNetwork(data_buffer,
payload_length,
rtp_header_length,
capture_time_ms,
storage,
PacedSender::kNormalPriority);
if (ret == 0) {
_videoBitrate.Update(payload_length + rtp_header_length);
}
return ret;
} }
int32_t RTPSenderVideo::SendRTPIntraRequest() { int32_t RTPSenderVideo::SendRTPIntraRequest() {
@ -204,9 +193,10 @@ int32_t RTPSenderVideo::SendRTPIntraRequest() {
data, 0, length, -1, kDontStore, PacedSender::kNormalPriority); data, 0, length, -1, kDontStore, PacedSender::kNormalPriority);
} }
int32_t RTPSenderVideo::SetGenericFECStatus(const bool enable, void RTPSenderVideo::SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED, const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC) { const uint8_t payloadTypeFEC) {
CriticalSectionScoped cs(crit_.get());
_fecEnabled = enable; _fecEnabled = enable;
_payloadTypeRED = payloadTypeRED; _payloadTypeRED = payloadTypeRED;
_payloadTypeFEC = payloadTypeFEC; _payloadTypeFEC = payloadTypeFEC;
@ -215,19 +205,19 @@ int32_t RTPSenderVideo::SetGenericFECStatus(const bool enable,
delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1; delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type = delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type =
kFecMaskRandom; kFecMaskRandom;
return 0;
} }
int32_t RTPSenderVideo::GenericFECStatus(bool& enable, void RTPSenderVideo::GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED, uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const { uint8_t& payloadTypeFEC) const {
CriticalSectionScoped cs(crit_.get());
enable = _fecEnabled; enable = _fecEnabled;
payloadTypeRED = _payloadTypeRED; payloadTypeRED = _payloadTypeRED;
payloadTypeFEC = _payloadTypeFEC; payloadTypeFEC = _payloadTypeFEC;
return 0;
} }
size_t RTPSenderVideo::FECPacketOverhead() const { size_t RTPSenderVideo::FECPacketOverhead() const {
CriticalSectionScoped cs(crit_.get());
if (_fecEnabled) { if (_fecEnabled) {
// Overhead is FEC headers plus RED for FEC header plus anything in RTP // Overhead is FEC headers plus RED for FEC header plus anything in RTP
// header beyond the 12 bytes base header (CSRC list, extensions...) // header beyond the 12 bytes base header (CSRC list, extensions...)
@ -240,14 +230,13 @@ size_t RTPSenderVideo::FECPacketOverhead() const {
return 0; return 0;
} }
int32_t RTPSenderVideo::SetFecParameters( void RTPSenderVideo::SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* delta_params, const FecProtectionParams* key_params) {
const FecProtectionParams* key_params) { CriticalSectionScoped cs(crit_.get());
assert(delta_params); DCHECK(delta_params);
assert(key_params); DCHECK(key_params);
delta_fec_params_ = *delta_params; delta_fec_params_ = *delta_params;
key_fec_params_ = *key_params; key_fec_params_ = *key_params;
return 0;
} }
int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType, int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
@ -258,49 +247,26 @@ int32_t RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
const uint8_t* payloadData, const uint8_t* payloadData,
const size_t payloadSize, const size_t payloadSize,
const RTPFragmentationHeader* fragmentation, const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codecInfo,
const RTPVideoHeader* rtpHdr) { const RTPVideoHeader* rtpHdr) {
if (payloadSize == 0) { if (payloadSize == 0) {
return -1; return -1;
} }
if (frameType == kVideoFrameKey) { rtc::scoped_ptr<RtpPacketizer> packetizer(
producer_fec_.SetFecParameters(&key_fec_params_, _numberFirstPartition); RtpPacketizer::Create(videoType, _rtpSender.MaxDataPayloadLength(),
} else { &(rtpHdr->codecHeader), frameType));
producer_fec_.SetFecParameters(&delta_fec_params_, _numberFirstPartition);
StorageType storage = kDontStore;
bool fec_enabled = false;
{
CriticalSectionScoped cs(crit_.get());
FecProtectionParams* fec_params =
frameType == kVideoFrameKey ? &key_fec_params_ : &delta_fec_params_;
producer_fec_.SetFecParameters(fec_params, 0);
storage = packetizer->GetStorageType(_retransmissionSettings);
fec_enabled = _fecEnabled;
} }
// Default setting for number of first partition packets:
// Will be extracted in SendVP8 for VP8 codec; other codecs use 0
_numberFirstPartition = 0;
return Send(videoType, frameType, payloadType, captureTimeStamp,
capture_time_ms, payloadData, payloadSize, fragmentation, rtpHdr)
? 0
: -1;
}
VideoCodecInformation* RTPSenderVideo::CodecInformationVideo() {
return _videoCodecInformation;
}
void RTPSenderVideo::SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate) {
_maxBitrate = maxBitrate;
}
uint32_t RTPSenderVideo::MaxConfiguredBitrateVideo() const {
return _maxBitrate;
}
bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const int8_t payloadType,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const uint8_t* payloadData,
const size_t payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpHdr) {
// Register CVO rtp header extension at the first time when we receive a frame // Register CVO rtp header extension at the first time when we receive a frame
// with pending rotation. // with pending rotation.
RTPSenderInterface::CVOMode cvo_mode = RTPSenderInterface::kCVONone; RTPSenderInterface::CVOMode cvo_mode = RTPSenderInterface::kCVONone;
@ -311,10 +277,6 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
uint16_t rtp_header_length = _rtpSender.RTPHeaderLength(); uint16_t rtp_header_length = _rtpSender.RTPHeaderLength();
size_t payload_bytes_to_send = payloadSize; size_t payload_bytes_to_send = payloadSize;
const uint8_t* data = payloadData; const uint8_t* data = payloadData;
size_t max_payload_length = _rtpSender.MaxDataPayloadLength();
rtc::scoped_ptr<RtpPacketizer> packetizer(RtpPacketizer::Create(
videoType, max_payload_length, &(rtpHdr->codecHeader), frameType));
// TODO(changbin): we currently don't support to configure the codec to // TODO(changbin): we currently don't support to configure the codec to
// output multiple partitions for VP8. Should remove below check after the // output multiple partitions for VP8. Should remove below check after the
@ -328,16 +290,14 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
while (!last) { while (!last) {
uint8_t dataBuffer[IP_PACKET_SIZE] = {0}; uint8_t dataBuffer[IP_PACKET_SIZE] = {0};
size_t payload_bytes_in_packet = 0; size_t payload_bytes_in_packet = 0;
if (!packetizer->NextPacket( if (!packetizer->NextPacket(&dataBuffer[rtp_header_length],
&dataBuffer[rtp_header_length], &payload_bytes_in_packet, &last)) { &payload_bytes_in_packet, &last)) {
return false; return -1;
} }
// Write RTP header. // Write RTP header.
// Set marker bit true if this is the last packet in frame. // Set marker bit true if this is the last packet in frame.
_rtpSender.BuildRTPheader( _rtpSender.BuildRTPheader(
dataBuffer, payloadType, last, captureTimeStamp, capture_time_ms); dataBuffer, payloadType, last, captureTimeStamp, capture_time_ms);
// According to // According to
// http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/
// ts_126114v120700p.pdf Section 7.4.5: // ts_126114v120700p.pdf Section 7.4.5:
@ -350,7 +310,7 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
// value sent. // value sent.
// Here we are adding it to every packet of every frame at this point. // Here we are adding it to every packet of every frame at this point.
if (!rtpHdr) { if (!rtpHdr) {
assert(!_rtpSender.IsRtpHeaderExtensionRegistered( DCHECK(!_rtpSender.IsRtpHeaderExtensionRegistered(
kRtpExtensionVideoRotation)); kRtpExtensionVideoRotation));
} else if (cvo_mode == RTPSenderInterface::kCVOActivated) { } else if (cvo_mode == RTPSenderInterface::kCVOActivated) {
// Checking whether CVO header extension is registered will require taking // Checking whether CVO header extension is registered will require taking
@ -365,22 +325,29 @@ bool RTPSenderVideo::Send(const RtpVideoCodecTypes videoType,
_rtpSender.UpdateVideoRotation(dataBuffer, packetSize, rtp_header, _rtpSender.UpdateVideoRotation(dataBuffer, packetSize, rtp_header,
rtpHdr->rotation); rtpHdr->rotation);
} }
if (SendVideoPacket(dataBuffer, if (fec_enabled) {
payload_bytes_in_packet, SendVideoPacketAsRed(dataBuffer, payload_bytes_in_packet,
rtp_header_length, rtp_header_length, _rtpSender.SequenceNumber(),
captureTimeStamp, captureTimeStamp, capture_time_ms, storage,
capture_time_ms, packetizer->GetProtectionType() == kProtectedPacket);
packetizer->GetStorageType(_retransmissionSettings), } else {
packetizer->GetProtectionType() == kProtectedPacket)) { SendVideoPacket(dataBuffer, payload_bytes_in_packet, rtp_header_length,
LOG(LS_WARNING) << packetizer->ToString() _rtpSender.SequenceNumber(), captureTimeStamp,
<< " failed to send packet number " capture_time_ms, storage);
<< _rtpSender.SequenceNumber();
} }
} }
TRACE_EVENT_ASYNC_END1( TRACE_EVENT_ASYNC_END1(
"webrtc", "Video", capture_time_ms, "timestamp", _rtpSender.Timestamp()); "webrtc", "Video", capture_time_ms, "timestamp", _rtpSender.Timestamp());
return true; return 0;
}
void RTPSenderVideo::SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate) {
_maxBitrate = maxBitrate;
}
uint32_t RTPSenderVideo::MaxConfiguredBitrateVideo() const {
return _maxBitrate;
} }
void RTPSenderVideo::ProcessBitrate() { void RTPSenderVideo::ProcessBitrate() {
@ -397,12 +364,13 @@ uint32_t RTPSenderVideo::FecOverheadRate() const {
} }
int RTPSenderVideo::SelectiveRetransmissions() const { int RTPSenderVideo::SelectiveRetransmissions() const {
CriticalSectionScoped cs(crit_.get());
return _retransmissionSettings; return _retransmissionSettings;
} }
int RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) { void RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) {
CriticalSectionScoped cs(crit_.get());
_retransmissionSettings = settings; _retransmissionSettings = settings;
return 0;
} }
} // namespace webrtc } // namespace webrtc

View File

@ -13,6 +13,8 @@
#include <list> #include <list>
#include "webrtc/base/scoped_ptr.h"
#include "webrtc/base/thread_annotations.h"
#include "webrtc/common_types.h" #include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h" #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h" #include "webrtc/modules/rtp_rtcp/source/bitrate.h"
@ -37,10 +39,10 @@ class RTPSenderVideo {
size_t FECPacketOverhead() const; size_t FECPacketOverhead() const;
int32_t RegisterVideoPayload(const char payloadName[RTP_PAYLOAD_NAME_SIZE], static RtpUtility::Payload* CreateVideoPayload(
const int8_t payloadType, const char payloadName[RTP_PAYLOAD_NAME_SIZE],
const uint32_t maxBitRate, const int8_t payloadType,
RtpUtility::Payload*& payload); const uint32_t maxBitRate);
int32_t SendVideo(const RtpVideoCodecTypes videoType, int32_t SendVideo(const RtpVideoCodecTypes videoType,
const FrameType frameType, const FrameType frameType,
@ -50,30 +52,27 @@ class RTPSenderVideo {
const uint8_t* payloadData, const uint8_t* payloadData,
const size_t payloadSize, const size_t payloadSize,
const RTPFragmentationHeader* fragmentation, const RTPFragmentationHeader* fragmentation,
VideoCodecInformation* codecInfo,
const RTPVideoHeader* rtpHdr); const RTPVideoHeader* rtpHdr);
int32_t SendRTPIntraRequest(); int32_t SendRTPIntraRequest();
void SetVideoCodecType(RtpVideoCodecTypes type); void SetVideoCodecType(RtpVideoCodecTypes type);
VideoCodecInformation* CodecInformationVideo();
void SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate); void SetMaxConfiguredBitrateVideo(const uint32_t maxBitrate);
uint32_t MaxConfiguredBitrateVideo() const; uint32_t MaxConfiguredBitrateVideo() const;
// FEC // FEC
int32_t SetGenericFECStatus(const bool enable, void SetGenericFECStatus(const bool enable,
const uint8_t payloadTypeRED, const uint8_t payloadTypeRED,
const uint8_t payloadTypeFEC); const uint8_t payloadTypeFEC);
int32_t GenericFECStatus(bool& enable, void GenericFECStatus(bool& enable,
uint8_t& payloadTypeRED, uint8_t& payloadTypeRED,
uint8_t& payloadTypeFEC) const; uint8_t& payloadTypeFEC) const;
int32_t SetFecParameters(const FecProtectionParams* delta_params, void SetFecParameters(const FecProtectionParams* delta_params,
const FecProtectionParams* key_params); const FecProtectionParams* key_params);
void ProcessBitrate(); void ProcessBitrate();
@ -81,45 +80,43 @@ class RTPSenderVideo {
uint32_t FecOverheadRate() const; uint32_t FecOverheadRate() const;
int SelectiveRetransmissions() const; int SelectiveRetransmissions() const;
int SetSelectiveRetransmissions(uint8_t settings); void SetSelectiveRetransmissions(uint8_t settings);
protected: private:
virtual int32_t SendVideoPacket(uint8_t* dataBuffer, void SendVideoPacket(uint8_t* dataBuffer,
const size_t payloadLength, const size_t payloadLength,
const size_t rtpHeaderLength, const size_t rtpHeaderLength,
const uint32_t capture_timestamp, uint16_t seq_num,
int64_t capture_time_ms, const uint32_t capture_timestamp,
StorageType storage, int64_t capture_time_ms,
bool protect); StorageType storage);
private: void SendVideoPacketAsRed(uint8_t* dataBuffer,
bool Send(const RtpVideoCodecTypes videoType, const size_t payloadLength,
const FrameType frameType, const size_t rtpHeaderLength,
const int8_t payloadType, uint16_t video_seq_num,
const uint32_t captureTimeStamp, const uint32_t capture_timestamp,
int64_t capture_time_ms, int64_t capture_time_ms,
const uint8_t* payloadData, StorageType media_packet_storage,
const size_t payloadSize, bool protect);
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpHdr);
private:
RTPSenderInterface& _rtpSender; RTPSenderInterface& _rtpSender;
// Should never be held when calling out of this class.
const rtc::scoped_ptr<CriticalSectionWrapper> crit_;
RtpVideoCodecTypes _videoType; RtpVideoCodecTypes _videoType;
VideoCodecInformation* _videoCodecInformation;
uint32_t _maxBitrate; uint32_t _maxBitrate;
int32_t _retransmissionSettings; int32_t _retransmissionSettings GUARDED_BY(crit_);
// FEC // FEC
ForwardErrorCorrection _fec; ForwardErrorCorrection _fec;
bool _fecEnabled; bool _fecEnabled GUARDED_BY(crit_);
int8_t _payloadTypeRED; int8_t _payloadTypeRED GUARDED_BY(crit_);
int8_t _payloadTypeFEC; int8_t _payloadTypeFEC GUARDED_BY(crit_);
unsigned int _numberFirstPartition; FecProtectionParams delta_fec_params_ GUARDED_BY(crit_);
FecProtectionParams delta_fec_params_; FecProtectionParams key_fec_params_ GUARDED_BY(crit_);
FecProtectionParams key_fec_params_; ProducerFec producer_fec_ GUARDED_BY(crit_);
ProducerFec producer_fec_;
// Bitrate used for FEC payload, RED headers, RTP headers for FEC packets // Bitrate used for FEC payload, RED headers, RTP headers for FEC packets
// and any padding overhead. // and any padding overhead.

View File

@ -82,7 +82,9 @@ public:
}; };
static VideoCodingModule* Create( static VideoCodingModule* Create(
VideoEncoderRateObserver* encoder_rate_observer); Clock* clock,
VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback);
static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory); static VideoCodingModule* Create(Clock* clock, EventFactory* event_factory);
@ -267,16 +269,6 @@ public:
virtual int32_t RegisterSendStatisticsCallback( virtual int32_t RegisterSendStatisticsCallback(
VCMSendStatisticsCallback* sendStats) = 0; VCMSendStatisticsCallback* sendStats) = 0;
// Register a video quality settings callback which will be called when
// frame rate/dimensions need to be updated for video quality optimization
//
// Input:
// - videoQMSettings : The callback object to register.
//
// Return value : VCM_OK, on success.
// < 0, on error
virtual int32_t RegisterVideoQMCallback(VCMQMSettingsCallback* videoQMSettings) = 0;
// Register a video protection callback which will be called to deliver // Register a video protection callback which will be called to deliver
// the requested FEC rate and NACK status (on/off). // the requested FEC rate and NACK status (on/off).
// //

View File

@ -245,7 +245,7 @@ uint32_t MediaOptimization::SetTargetRates(
// Update protection settings, when applicable. // Update protection settings, when applicable.
float sent_video_rate_kbps = 0.0f; float sent_video_rate_kbps = 0.0f;
if (selected_method) { if (loss_prot_logic_->SelectedType() != kNone) {
// Update protection method with content metrics. // Update protection method with content metrics.
selected_method->UpdateContentMetrics(content_->ShortTermAvgData()); selected_method->UpdateContentMetrics(content_->ShortTermAvgData());

View File

@ -74,11 +74,13 @@ class VideoCodingModuleImpl : public VideoCodingModule {
VideoCodingModuleImpl(Clock* clock, VideoCodingModuleImpl(Clock* clock,
EventFactory* event_factory, EventFactory* event_factory,
bool owns_event_factory, bool owns_event_factory,
VideoEncoderRateObserver* encoder_rate_observer) VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback)
: VideoCodingModule(), : VideoCodingModule(),
sender_(new vcm::VideoSender(clock, sender_(new vcm::VideoSender(clock,
&post_encode_callback_, &post_encode_callback_,
encoder_rate_observer)), encoder_rate_observer,
qm_settings_callback)),
receiver_(new vcm::VideoReceiver(clock, event_factory)), receiver_(new vcm::VideoReceiver(clock, event_factory)),
own_event_factory_(owns_event_factory ? event_factory : NULL) {} own_event_factory_(owns_event_factory ? event_factory : NULL) {}
@ -161,11 +163,6 @@ class VideoCodingModuleImpl : public VideoCodingModule {
return sender_->RegisterSendStatisticsCallback(sendStats); return sender_->RegisterSendStatisticsCallback(sendStats);
} }
int32_t RegisterVideoQMCallback(
VCMQMSettingsCallback* videoQMSettings) override {
return sender_->RegisterVideoQMCallback(videoQMSettings);
}
int32_t RegisterProtectionCallback( int32_t RegisterProtectionCallback(
VCMProtectionCallback* protection) override { VCMProtectionCallback* protection) override {
return sender_->RegisterProtectionCallback(protection); return sender_->RegisterProtectionCallback(protection);
@ -359,10 +356,11 @@ int32_t VideoCodingModule::Codec(VideoCodecType codecType, VideoCodec* codec) {
} }
VideoCodingModule* VideoCodingModule::Create( VideoCodingModule* VideoCodingModule::Create(
VideoEncoderRateObserver* encoder_rate_observer) { Clock* clock,
return new VideoCodingModuleImpl(Clock::GetRealTimeClock(), VideoEncoderRateObserver* encoder_rate_observer,
new EventFactoryImpl, true, VCMQMSettingsCallback* qm_settings_callback) {
encoder_rate_observer); return new VideoCodingModuleImpl(clock, new EventFactoryImpl, true,
encoder_rate_observer, qm_settings_callback);
} }
VideoCodingModule* VideoCodingModule::Create( VideoCodingModule* VideoCodingModule::Create(
@ -370,7 +368,8 @@ VideoCodingModule* VideoCodingModule::Create(
EventFactory* event_factory) { EventFactory* event_factory) {
assert(clock); assert(clock);
assert(event_factory); assert(event_factory);
return new VideoCodingModuleImpl(clock, event_factory, false, nullptr); return new VideoCodingModuleImpl(clock, event_factory, false, nullptr,
nullptr);
} }
void VideoCodingModule::Destroy(VideoCodingModule* module) { void VideoCodingModule::Destroy(VideoCodingModule* module) {

View File

@ -58,7 +58,8 @@ class VideoSender {
VideoSender(Clock* clock, VideoSender(Clock* clock,
EncodedImageCallback* post_encode_callback, EncodedImageCallback* post_encode_callback,
VideoEncoderRateObserver* encoder_rate_observer); VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback);
~VideoSender(); ~VideoSender();
@ -99,7 +100,6 @@ class VideoSender {
int32_t RegisterTransportCallback(VCMPacketizationCallback* transport); int32_t RegisterTransportCallback(VCMPacketizationCallback* transport);
int32_t RegisterSendStatisticsCallback(VCMSendStatisticsCallback* sendStats); int32_t RegisterSendStatisticsCallback(VCMSendStatisticsCallback* sendStats);
int32_t RegisterVideoQMCallback(VCMQMSettingsCallback* videoQMSettings);
int32_t RegisterProtectionCallback(VCMProtectionCallback* protection); int32_t RegisterProtectionCallback(VCMProtectionCallback* protection);
void SetVideoProtection(bool enable, VCMVideoProtection videoProtection); void SetVideoProtection(bool enable, VCMVideoProtection videoProtection);
@ -139,7 +139,7 @@ class VideoSender {
VideoCodec current_codec_; VideoCodec current_codec_;
rtc::ThreadChecker main_thread_; rtc::ThreadChecker main_thread_;
VCMQMSettingsCallback* qm_settings_callback_; VCMQMSettingsCallback* const qm_settings_callback_;
VCMProtectionCallback* protection_callback_; VCMProtectionCallback* protection_callback_;
}; };

View File

@ -26,7 +26,7 @@ namespace vcm {
class DebugRecorder { class DebugRecorder {
public: public:
DebugRecorder() DebugRecorder()
: cs_(CriticalSectionWrapper::CreateCriticalSection()), file_(NULL) {} : cs_(CriticalSectionWrapper::CreateCriticalSection()), file_(nullptr) {}
~DebugRecorder() { Stop(); } ~DebugRecorder() { Stop(); }
@ -44,7 +44,7 @@ class DebugRecorder {
CriticalSectionScoped cs(cs_.get()); CriticalSectionScoped cs(cs_.get());
if (file_) { if (file_) {
fclose(file_); fclose(file_);
file_ = NULL; file_ = nullptr;
} }
} }
@ -61,7 +61,8 @@ class DebugRecorder {
VideoSender::VideoSender(Clock* clock, VideoSender::VideoSender(Clock* clock,
EncodedImageCallback* post_encode_callback, EncodedImageCallback* post_encode_callback,
VideoEncoderRateObserver* encoder_rate_observer) VideoEncoderRateObserver* encoder_rate_observer,
VCMQMSettingsCallback* qm_settings_callback)
: clock_(clock), : clock_(clock),
recorder_(new DebugRecorder()), recorder_(new DebugRecorder()),
process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()), process_crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
@ -70,16 +71,17 @@ VideoSender::VideoSender(Clock* clock,
_encodedFrameCallback(post_encode_callback), _encodedFrameCallback(post_encode_callback),
_nextFrameTypes(1, kVideoFrameDelta), _nextFrameTypes(1, kVideoFrameDelta),
_mediaOpt(clock_), _mediaOpt(clock_),
_sendStatsCallback(NULL), _sendStatsCallback(nullptr),
_codecDataBase(encoder_rate_observer), _codecDataBase(encoder_rate_observer),
frame_dropper_enabled_(true), frame_dropper_enabled_(true),
_sendStatsTimer(1000, clock_), _sendStatsTimer(1000, clock_),
current_codec_(), current_codec_(),
qm_settings_callback_(NULL), qm_settings_callback_(qm_settings_callback),
protection_callback_(NULL) { protection_callback_(nullptr) {
// Allow VideoSender to be created on one thread but used on another, post // Allow VideoSender to be created on one thread but used on another, post
// construction. This is currently how this class is being used by at least // construction. This is currently how this class is being used by at least
// one external project (diffractor). // one external project (diffractor).
_mediaOpt.EnableQM(qm_settings_callback_ != nullptr);
main_thread_.DetachFromThread(); main_thread_.DetachFromThread();
} }
@ -93,7 +95,7 @@ int32_t VideoSender::Process() {
if (_sendStatsTimer.TimeUntilProcess() == 0) { if (_sendStatsTimer.TimeUntilProcess() == 0) {
_sendStatsTimer.Processed(); _sendStatsTimer.Processed();
CriticalSectionScoped cs(process_crit_sect_.get()); CriticalSectionScoped cs(process_crit_sect_.get());
if (_sendStatsCallback != NULL) { if (_sendStatsCallback != nullptr) {
uint32_t bitRate = _mediaOpt.SentBitRate(); uint32_t bitRate = _mediaOpt.SentBitRate();
uint32_t frameRate = _mediaOpt.SentFrameRate(); uint32_t frameRate = _mediaOpt.SentFrameRate();
_sendStatsCallback->SendStatistics(bitRate, frameRate); _sendStatsCallback->SendStatistics(bitRate, frameRate);
@ -108,8 +110,8 @@ int32_t VideoSender::InitializeSender() {
DCHECK(main_thread_.CalledOnValidThread()); DCHECK(main_thread_.CalledOnValidThread());
CriticalSectionScoped cs(_sendCritSect); CriticalSectionScoped cs(_sendCritSect);
_codecDataBase.ResetSender(); _codecDataBase.ResetSender();
_encoder = NULL; _encoder = nullptr;
_encodedFrameCallback.SetTransportCallback(NULL); _encodedFrameCallback.SetTransportCallback(nullptr);
_mediaOpt.Reset(); // Resetting frame dropper _mediaOpt.Reset(); // Resetting frame dropper
return VCM_OK; return VCM_OK;
} }
@ -124,7 +126,7 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
uint32_t maxPayloadSize) { uint32_t maxPayloadSize) {
DCHECK(main_thread_.CalledOnValidThread()); DCHECK(main_thread_.CalledOnValidThread());
CriticalSectionScoped cs(_sendCritSect); CriticalSectionScoped cs(_sendCritSect);
if (sendCodec == NULL) { if (sendCodec == nullptr) {
return VCM_PARAMETER_ERROR; return VCM_PARAMETER_ERROR;
} }
@ -177,7 +179,7 @@ const VideoCodec& VideoSender::GetSendCodec() const {
int32_t VideoSender::SendCodecBlocking(VideoCodec* currentSendCodec) const { int32_t VideoSender::SendCodecBlocking(VideoCodec* currentSendCodec) const {
CriticalSectionScoped cs(_sendCritSect); CriticalSectionScoped cs(_sendCritSect);
if (currentSendCodec == NULL) { if (currentSendCodec == nullptr) {
return VCM_PARAMETER_ERROR; return VCM_PARAMETER_ERROR;
} }
return _codecDataBase.SendCodec(currentSendCodec) ? 0 : -1; return _codecDataBase.SendCodec(currentSendCodec) ? 0 : -1;
@ -197,13 +199,13 @@ int32_t VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
CriticalSectionScoped cs(_sendCritSect); CriticalSectionScoped cs(_sendCritSect);
if (externalEncoder == NULL) { if (externalEncoder == nullptr) {
bool wasSendCodec = false; bool wasSendCodec = false;
const bool ret = const bool ret =
_codecDataBase.DeregisterExternalEncoder(payloadType, &wasSendCodec); _codecDataBase.DeregisterExternalEncoder(payloadType, &wasSendCodec);
if (wasSendCodec) { if (wasSendCodec) {
// Make sure the VCM doesn't use the de-registered codec // Make sure the VCM doesn't use the de-registered codec
_encoder = NULL; _encoder = nullptr;
} }
return ret ? 0 : -1; return ret ? 0 : -1;
} }
@ -216,7 +218,7 @@ int32_t VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
int32_t VideoSender::CodecConfigParameters(uint8_t* buffer, int32_t VideoSender::CodecConfigParameters(uint8_t* buffer,
int32_t size) const { int32_t size) const {
CriticalSectionScoped cs(_sendCritSect); CriticalSectionScoped cs(_sendCritSect);
if (_encoder != NULL) { if (_encoder != nullptr) {
return _encoder->CodecConfigParameters(buffer, size); return _encoder->CodecConfigParameters(buffer, size);
} }
return VCM_UNINITIALIZED; return VCM_UNINITIALIZED;
@ -266,7 +268,6 @@ int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
// here? This effectively means that the network thread will be blocked for // here? This effectively means that the network thread will be blocked for
// as much as frame encoding period. // as much as frame encoding period.
CriticalSectionScoped sendCs(_sendCritSect);
uint32_t target_rate = _mediaOpt.SetTargetRates(target_bitrate, uint32_t target_rate = _mediaOpt.SetTargetRates(target_bitrate,
lossRate, lossRate,
rtt, rtt,
@ -274,10 +275,11 @@ int32_t VideoSender::SetChannelParameters(uint32_t target_bitrate,
qm_settings_callback_); qm_settings_callback_);
uint32_t input_frame_rate = _mediaOpt.InputFrameRate(); uint32_t input_frame_rate = _mediaOpt.InputFrameRate();
CriticalSectionScoped sendCs(_sendCritSect);
int32_t ret = VCM_UNINITIALIZED; int32_t ret = VCM_UNINITIALIZED;
static_assert(VCM_UNINITIALIZED < 0, "VCM_UNINITIALIZED must be negative."); static_assert(VCM_UNINITIALIZED < 0, "VCM_UNINITIALIZED must be negative.");
if (_encoder != NULL) { if (_encoder != nullptr) {
ret = _encoder->SetChannelParameters(lossRate, rtt); ret = _encoder->SetChannelParameters(lossRate, rtt);
if (ret >= 0) { if (ret >= 0) {
ret = _encoder->SetRates(target_rate, input_frame_rate); ret = _encoder->SetRates(target_rate, input_frame_rate);
@ -304,27 +306,13 @@ int32_t VideoSender::RegisterSendStatisticsCallback(
return VCM_OK; return VCM_OK;
} }
// Register a video quality settings callback which will be called when frame
// rate/dimensions need to be updated for video quality optimization
int32_t VideoSender::RegisterVideoQMCallback(
VCMQMSettingsCallback* qm_settings_callback) {
CriticalSectionScoped cs(_sendCritSect);
DCHECK(qm_settings_callback_ == qm_settings_callback ||
!qm_settings_callback_ ||
!qm_settings_callback) << "Overwriting the previous callback?";
qm_settings_callback_ = qm_settings_callback;
_mediaOpt.EnableQM(qm_settings_callback_ != NULL);
return VCM_OK;
}
// Register a video protection callback which will be called to deliver the // Register a video protection callback which will be called to deliver the
// requested FEC rate and NACK status (on/off). // requested FEC rate and NACK status (on/off).
// Note: this callback is assumed to only be registered once and before it is
// used in this class.
int32_t VideoSender::RegisterProtectionCallback( int32_t VideoSender::RegisterProtectionCallback(
VCMProtectionCallback* protection_callback) { VCMProtectionCallback* protection_callback) {
CriticalSectionScoped cs(_sendCritSect); DCHECK(protection_callback == nullptr || protection_callback_ == nullptr);
DCHECK(protection_callback_ == protection_callback ||
!protection_callback_ ||
!protection_callback) << "Overwriting the previous callback?";
protection_callback_ = protection_callback; protection_callback_ = protection_callback;
return VCM_OK; return VCM_OK;
} }
@ -359,7 +347,7 @@ int32_t VideoSender::AddVideoFrame(const I420VideoFrame& videoFrame,
const VideoContentMetrics* contentMetrics, const VideoContentMetrics* contentMetrics,
const CodecSpecificInfo* codecSpecificInfo) { const CodecSpecificInfo* codecSpecificInfo) {
CriticalSectionScoped cs(_sendCritSect); CriticalSectionScoped cs(_sendCritSect);
if (_encoder == NULL) { if (_encoder == nullptr) {
return VCM_UNINITIALIZED; return VCM_UNINITIALIZED;
} }
// TODO(holmer): Add support for dropping frames per stream. Currently we // TODO(holmer): Add support for dropping frames per stream. Currently we
@ -398,7 +386,7 @@ int32_t VideoSender::IntraFrameRequest(int stream_index) {
return -1; return -1;
} }
_nextFrameTypes[stream_index] = kVideoFrameKey; _nextFrameTypes[stream_index] = kVideoFrameKey;
if (_encoder != NULL && _encoder->InternalSource()) { if (_encoder != nullptr && _encoder->InternalSource()) {
// Try to request the frame if we have an external encoder with // Try to request the frame if we have an external encoder with
// internal source since AddVideoFrame never will be called. // internal source since AddVideoFrame never will be called.
if (_encoder->RequestFrame(_nextFrameTypes) == WEBRTC_VIDEO_CODEC_OK) { if (_encoder->RequestFrame(_nextFrameTypes) == WEBRTC_VIDEO_CODEC_OK) {

View File

@ -177,7 +177,8 @@ class TestVideoSender : public ::testing::Test {
TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {} TestVideoSender() : clock_(1000), packetization_callback_(&clock_) {}
void SetUp() override { void SetUp() override {
sender_.reset(new VideoSender(&clock_, &post_encode_callback_, nullptr)); sender_.reset(
new VideoSender(&clock_, &post_encode_callback_, nullptr, nullptr));
EXPECT_EQ(0, sender_->InitializeSender()); EXPECT_EQ(0, sender_->InitializeSender());
EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_)); EXPECT_EQ(0, sender_->RegisterTransportCallback(&packetization_callback_));
} }

View File

@ -104,7 +104,9 @@ ViEChannel::ViEChannel(int32_t channel_id,
rtp_rtcp_cs_(CriticalSectionWrapper::CreateCriticalSection()), rtp_rtcp_cs_(CriticalSectionWrapper::CreateCriticalSection()),
send_payload_router_(new PayloadRouter()), send_payload_router_(new PayloadRouter()),
vcm_protection_callback_(new ViEChannelProtectionCallback(this)), vcm_protection_callback_(new ViEChannelProtectionCallback(this)),
vcm_(VideoCodingModule::Create(nullptr)), vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
nullptr,
nullptr)),
vie_receiver_(channel_id, vcm_, remote_bitrate_estimator, this), vie_receiver_(channel_id, vcm_, remote_bitrate_estimator, this),
vie_sender_(channel_id), vie_sender_(channel_id),
vie_sync_(vcm_, this), vie_sync_(vcm_, this),

View File

@ -111,10 +111,12 @@ ViEEncoder::ViEEncoder(int32_t channel_id,
: channel_id_(channel_id), : channel_id_(channel_id),
number_of_cores_(number_of_cores), number_of_cores_(number_of_cores),
disable_default_encoder_(disable_default_encoder), disable_default_encoder_(disable_default_encoder),
vcm_(*webrtc::VideoCodingModule::Create(this)), vpm_(VideoProcessingModule::Create(ViEModuleId(-1, channel_id))),
vpm_(*webrtc::VideoProcessingModule::Create(ViEModuleId(-1, channel_id))), qm_callback_(new QMVideoSettingsCallback(vpm_.get())),
vcm_(VideoCodingModule::Create(Clock::GetRealTimeClock(),
this,
qm_callback_.get())),
send_payload_router_(NULL), send_payload_router_(NULL),
vcm_protection_callback_(NULL),
callback_cs_(CriticalSectionWrapper::CreateCriticalSection()), callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
data_cs_(CriticalSectionWrapper::CreateCriticalSection()), data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
pacer_(pacer), pacer_(pacer),
@ -137,7 +139,6 @@ ViEEncoder::ViEEncoder(int32_t channel_id,
picture_id_sli_(0), picture_id_sli_(0),
has_received_rpsi_(false), has_received_rpsi_(false),
picture_id_rpsi_(0), picture_id_rpsi_(0),
qm_callback_(NULL),
video_suspended_(false), video_suspended_(false),
pre_encode_callback_(NULL), pre_encode_callback_(NULL),
start_ms_(Clock::GetRealTimeClock()->TimeInMilliseconds()), start_ms_(Clock::GetRealTimeClock()->TimeInMilliseconds()),
@ -146,18 +147,13 @@ ViEEncoder::ViEEncoder(int32_t channel_id,
} }
bool ViEEncoder::Init() { bool ViEEncoder::Init() {
if (vcm_.InitializeSender() != 0) { if (vcm_->InitializeSender() != 0) {
return false; return false;
} }
vpm_.EnableTemporalDecimation(true); vpm_->EnableTemporalDecimation(true);
// Enable/disable content analysis: off by default for now. // Enable/disable content analysis: off by default for now.
vpm_.EnableContentAnalysis(false); vpm_->EnableContentAnalysis(false);
if (qm_callback_) {
delete qm_callback_;
}
qm_callback_ = new QMVideoSettingsCallback(&vpm_);
if (!disable_default_encoder_) { if (!disable_default_encoder_) {
#ifdef VIDEOCODEC_VP8 #ifdef VIDEOCODEC_VP8
@ -166,25 +162,23 @@ bool ViEEncoder::Init() {
VideoCodecType codec_type = webrtc::kVideoCodecI420; VideoCodecType codec_type = webrtc::kVideoCodecI420;
#endif #endif
VideoCodec video_codec; VideoCodec video_codec;
if (vcm_.Codec(codec_type, &video_codec) != VCM_OK) { if (vcm_->Codec(codec_type, &video_codec) != VCM_OK) {
return false; return false;
} }
{ {
CriticalSectionScoped cs(data_cs_.get()); CriticalSectionScoped cs(data_cs_.get());
send_padding_ = video_codec.numberOfSimulcastStreams > 1; send_padding_ = video_codec.numberOfSimulcastStreams > 1;
} }
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_, if (vcm_->RegisterSendCodec(&video_codec, number_of_cores_,
PayloadRouter::DefaultMaxPayloadLength()) != 0) { PayloadRouter::DefaultMaxPayloadLength()) !=
0) {
return false; return false;
} }
} }
if (vcm_.RegisterTransportCallback(this) != 0) { if (vcm_->RegisterTransportCallback(this) != 0) {
return false; return false;
} }
if (vcm_.RegisterSendStatisticsCallback(this) != 0) { if (vcm_->RegisterSendStatisticsCallback(this) != 0) {
return false;
}
if (vcm_.RegisterVideoQMCallback(qm_callback_) != 0) {
return false; return false;
} }
return true; return true;
@ -194,28 +188,21 @@ void ViEEncoder::StartThreadsAndSetSharedMembers(
scoped_refptr<PayloadRouter> send_payload_router, scoped_refptr<PayloadRouter> send_payload_router,
VCMProtectionCallback* vcm_protection_callback) { VCMProtectionCallback* vcm_protection_callback) {
DCHECK(send_payload_router_ == NULL); DCHECK(send_payload_router_ == NULL);
DCHECK(vcm_protection_callback_ == NULL);
send_payload_router_ = send_payload_router; send_payload_router_ = send_payload_router;
vcm_protection_callback_ = vcm_protection_callback; vcm_->RegisterProtectionCallback(vcm_protection_callback);
module_process_thread_.RegisterModule(vcm_.get());
module_process_thread_.RegisterModule(&vcm_);
} }
void ViEEncoder::StopThreadsAndRemoveSharedMembers() { void ViEEncoder::StopThreadsAndRemoveSharedMembers() {
vcm_.RegisterProtectionCallback(NULL); if (bitrate_allocator_)
vcm_protection_callback_ = NULL; bitrate_allocator_->RemoveBitrateObserver(bitrate_observer_.get());
module_process_thread_.DeRegisterModule(&vcm_); module_process_thread_.DeRegisterModule(vcm_.get());
module_process_thread_.DeRegisterModule(&vpm_); module_process_thread_.DeRegisterModule(vpm_.get());
} }
ViEEncoder::~ViEEncoder() { ViEEncoder::~ViEEncoder() {
UpdateHistograms(); UpdateHistograms();
if (bitrate_allocator_)
bitrate_allocator_->RemoveBitrateObserver(bitrate_observer_.get());
VideoCodingModule::Destroy(&vcm_);
VideoProcessingModule::Destroy(&vpm_);
delete qm_callback_;
} }
void ViEEncoder::UpdateHistograms() { void ViEEncoder::UpdateHistograms() {
@ -225,7 +212,7 @@ void ViEEncoder::UpdateHistograms() {
return; return;
} }
webrtc::VCMFrameCount frames; webrtc::VCMFrameCount frames;
if (vcm_.SentFrameCount(frames) != VCM_OK) { if (vcm_->SentFrameCount(frames) != VCM_OK) {
return; return;
} }
uint32_t total_frames = frames.numKeyFrames + frames.numDeltaFrames; uint32_t total_frames = frames.numKeyFrames + frames.numDeltaFrames;
@ -263,11 +250,11 @@ void ViEEncoder::Restart() {
} }
uint8_t ViEEncoder::NumberOfCodecs() { uint8_t ViEEncoder::NumberOfCodecs() {
return vcm_.NumberOfCodecs(); return vcm_->NumberOfCodecs();
} }
int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) { int32_t ViEEncoder::GetCodec(uint8_t list_index, VideoCodec* video_codec) {
if (vcm_.Codec(list_index, video_codec) != 0) { if (vcm_->Codec(list_index, video_codec) != 0) {
return -1; return -1;
} }
return 0; return 0;
@ -279,7 +266,7 @@ int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
if (encoder == NULL) if (encoder == NULL)
return -1; return -1;
if (vcm_.RegisterExternalEncoder(encoder, pl_type, internal_source) != if (vcm_->RegisterExternalEncoder(encoder, pl_type, internal_source) !=
VCM_OK) { VCM_OK) {
return -1; return -1;
} }
@ -289,15 +276,15 @@ int32_t ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) { int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
DCHECK(send_payload_router_ != NULL); DCHECK(send_payload_router_ != NULL);
webrtc::VideoCodec current_send_codec; webrtc::VideoCodec current_send_codec;
if (vcm_.SendCodec(&current_send_codec) == VCM_OK) { if (vcm_->SendCodec(&current_send_codec) == VCM_OK) {
uint32_t current_bitrate_bps = 0; uint32_t current_bitrate_bps = 0;
if (vcm_.Bitrate(&current_bitrate_bps) != 0) { if (vcm_->Bitrate(&current_bitrate_bps) != 0) {
LOG(LS_WARNING) << "Failed to get the current encoder target bitrate."; LOG(LS_WARNING) << "Failed to get the current encoder target bitrate.";
} }
current_send_codec.startBitrate = (current_bitrate_bps + 500) / 1000; current_send_codec.startBitrate = (current_bitrate_bps + 500) / 1000;
} }
if (vcm_.RegisterExternalEncoder(NULL, pl_type) != VCM_OK) { if (vcm_->RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
return -1; return -1;
} }
@ -317,8 +304,8 @@ int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
// for realz. https://code.google.com/p/chromium/issues/detail?id=348222 // for realz. https://code.google.com/p/chromium/issues/detail?id=348222
current_send_codec.extra_options = NULL; current_send_codec.extra_options = NULL;
size_t max_data_payload_length = send_payload_router_->MaxPayloadLength(); size_t max_data_payload_length = send_payload_router_->MaxPayloadLength();
if (vcm_.RegisterSendCodec(&current_send_codec, number_of_cores_, if (vcm_->RegisterSendCodec(&current_send_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) { max_data_payload_length) != VCM_OK) {
LOG(LS_INFO) << "De-registered the currently used external encoder (" LOG(LS_INFO) << "De-registered the currently used external encoder ("
<< static_cast<int>(pl_type) << ") and therefore tried to " << static_cast<int>(pl_type) << ") and therefore tried to "
<< "register the corresponding internal encoder, but none " << "register the corresponding internal encoder, but none "
@ -331,8 +318,8 @@ int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) { int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
DCHECK(send_payload_router_ != NULL); DCHECK(send_payload_router_ != NULL);
// Setting target width and height for VPM. // Setting target width and height for VPM.
if (vpm_.SetTargetResolution(video_codec.width, video_codec.height, if (vpm_->SetTargetResolution(video_codec.width, video_codec.height,
video_codec.maxFramerate) != VPM_OK) { video_codec.maxFramerate) != VPM_OK) {
return -1; return -1;
} }
@ -372,15 +359,15 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
modified_video_codec.startBitrate = allocated_bitrate_bps / 1000; modified_video_codec.startBitrate = allocated_bitrate_bps / 1000;
size_t max_data_payload_length = send_payload_router_->MaxPayloadLength(); size_t max_data_payload_length = send_payload_router_->MaxPayloadLength();
if (vcm_.RegisterSendCodec(&modified_video_codec, number_of_cores_, if (vcm_->RegisterSendCodec(&modified_video_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) { max_data_payload_length) != VCM_OK) {
return -1; return -1;
} }
return 0; return 0;
} }
int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) { int32_t ViEEncoder::GetEncoder(VideoCodec* video_codec) {
*video_codec = vcm_.GetSendCodec(); *video_codec = vcm_->GetSendCodec();
return 0; return 0;
} }
@ -388,7 +375,7 @@ int32_t ViEEncoder::GetCodecConfigParameters(
unsigned char config_parameters[kConfigParameterSize], unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size) { unsigned char& config_parameters_size) {
int32_t num_parameters = int32_t num_parameters =
vcm_.CodecConfigParameters(config_parameters, kConfigParameterSize); vcm_->CodecConfigParameters(config_parameters, kConfigParameterSize);
if (num_parameters <= 0) { if (num_parameters <= 0) {
config_parameters_size = 0; config_parameters_size = 0;
return -1; return -1;
@ -405,7 +392,7 @@ int32_t ViEEncoder::ScaleInputImage(bool enable) {
LOG_F(LS_ERROR) << "Not supported."; LOG_F(LS_ERROR) << "Not supported.";
return -1; return -1;
} }
vpm_.SetInputFrameResampleMode(resampling_mode); vpm_->SetInputFrameResampleMode(resampling_mode);
return 0; return 0;
} }
@ -424,14 +411,14 @@ int ViEEncoder::GetPaddingNeededBps(int bitrate_bps) const {
} }
VideoCodec send_codec; VideoCodec send_codec;
if (vcm_.SendCodec(&send_codec) != 0) if (vcm_->SendCodec(&send_codec) != 0)
return 0; return 0;
SimulcastStream* stream_configs = send_codec.simulcastStream; SimulcastStream* stream_configs = send_codec.simulcastStream;
// Allocate the bandwidth between the streams. // Allocate the bandwidth between the streams.
std::vector<uint32_t> stream_bitrates = AllocateStreamBitrates( std::vector<uint32_t> stream_bitrates = AllocateStreamBitrates(
bitrate_bps, stream_configs, send_codec.numberOfSimulcastStreams); bitrate_bps, stream_configs, send_codec.numberOfSimulcastStreams);
bool video_is_suspended = vcm_.VideoSuspended(); bool video_is_suspended = vcm_->VideoSuspended();
// Find the max amount of padding we can allow ourselves to send at this // Find the max amount of padding we can allow ourselves to send at this
// point, based on which streams are currently active and what our current // point, based on which streams are currently active and what our current
@ -551,7 +538,7 @@ void ViEEncoder::DeliverFrame(int id,
} }
// Pass frame via preprocessor. // Pass frame via preprocessor.
const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame); const int ret = vpm_->PreprocessFrame(video_frame, &decimated_frame);
if (ret == 1) { if (ret == 1) {
// Drop this frame. // Drop this frame.
return; return;
@ -587,7 +574,7 @@ void ViEEncoder::DeliverFrame(int id,
} }
#ifdef VIDEOCODEC_VP8 #ifdef VIDEOCODEC_VP8
if (vcm_.SendCodec() == webrtc::kVideoCodecVP8) { if (vcm_->SendCodec() == webrtc::kVideoCodecVP8) {
webrtc::CodecSpecificInfo codec_specific_info; webrtc::CodecSpecificInfo codec_specific_info;
codec_specific_info.codecType = webrtc::kVideoCodecVP8; codec_specific_info.codecType = webrtc::kVideoCodecVP8;
{ {
@ -604,12 +591,12 @@ void ViEEncoder::DeliverFrame(int id,
has_received_rpsi_ = false; has_received_rpsi_ = false;
} }
vcm_.AddVideoFrame(*output_frame, vpm_.ContentMetrics(), vcm_->AddVideoFrame(*output_frame, vpm_->ContentMetrics(),
&codec_specific_info); &codec_specific_info);
return; return;
} }
#endif #endif
vcm_.AddVideoFrame(*output_frame); vcm_->AddVideoFrame(*output_frame);
} }
void ViEEncoder::DelayChanged(int id, int frame_delay) { void ViEEncoder::DelayChanged(int id, int frame_delay) {
@ -620,7 +607,7 @@ int ViEEncoder::GetPreferedFrameSettings(int* width,
int* frame_rate) { int* frame_rate) {
webrtc::VideoCodec video_codec; webrtc::VideoCodec video_codec;
memset(&video_codec, 0, sizeof(video_codec)); memset(&video_codec, 0, sizeof(video_codec));
if (vcm_.SendCodec(&video_codec) != VCM_OK) { if (vcm_->SendCodec(&video_codec) != VCM_OK) {
return -1; return -1;
} }
@ -631,13 +618,13 @@ int ViEEncoder::GetPreferedFrameSettings(int* width,
} }
int ViEEncoder::SendKeyFrame() { int ViEEncoder::SendKeyFrame() {
return vcm_.IntraFrameRequest(0); return vcm_->IntraFrameRequest(0);
} }
int32_t ViEEncoder::SendCodecStatistics( int32_t ViEEncoder::SendCodecStatistics(
uint32_t* num_key_frames, uint32_t* num_delta_frames) { uint32_t* num_key_frames, uint32_t* num_delta_frames) {
webrtc::VCMFrameCount sent_frames; webrtc::VCMFrameCount sent_frames;
if (vcm_.SentFrameCount(sent_frames) != VCM_OK) { if (vcm_->SentFrameCount(sent_frames) != VCM_OK) {
return -1; return -1;
} }
*num_key_frames = sent_frames.numKeyFrames; *num_key_frames = sent_frames.numKeyFrames;
@ -651,14 +638,13 @@ uint32_t ViEEncoder::LastObservedBitrateBps() const {
} }
int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const { int ViEEncoder::CodecTargetBitrate(uint32_t* bitrate) const {
if (vcm_.Bitrate(bitrate) != 0) if (vcm_->Bitrate(bitrate) != 0)
return -1; return -1;
return 0; return 0;
} }
int32_t ViEEncoder::UpdateProtectionMethod(bool nack, bool fec) { int32_t ViEEncoder::UpdateProtectionMethod(bool nack, bool fec) {
DCHECK(send_payload_router_ != NULL); DCHECK(send_payload_router_ != NULL);
DCHECK(vcm_protection_callback_ != NULL);
if (fec_enabled_ == fec && nack_enabled_ == nack) { if (fec_enabled_ == fec && nack_enabled_ == nack) {
// No change needed, we're already in correct state. // No change needed, we're already in correct state.
@ -669,35 +655,30 @@ int32_t ViEEncoder::UpdateProtectionMethod(bool nack, bool fec) {
// Set Video Protection for VCM. // Set Video Protection for VCM.
if (fec_enabled_ && nack_enabled_) { if (fec_enabled_ && nack_enabled_) {
vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, true); vcm_->SetVideoProtection(webrtc::kProtectionNackFEC, true);
} else { } else {
vcm_.SetVideoProtection(webrtc::kProtectionFEC, fec_enabled_); vcm_->SetVideoProtection(webrtc::kProtectionFEC, fec_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNackSender, nack_enabled_); vcm_->SetVideoProtection(webrtc::kProtectionNackSender, nack_enabled_);
vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, false); vcm_->SetVideoProtection(webrtc::kProtectionNackFEC, false);
} }
if (fec_enabled_ || nack_enabled_) { if (fec_enabled_ || nack_enabled_) {
vcm_.RegisterProtectionCallback(vcm_protection_callback_);
// The send codec must be registered to set correct MTU. // The send codec must be registered to set correct MTU.
webrtc::VideoCodec codec; webrtc::VideoCodec codec;
if (vcm_.SendCodec(&codec) == 0) { if (vcm_->SendCodec(&codec) == 0) {
uint32_t current_bitrate_bps = 0; uint32_t current_bitrate_bps = 0;
if (vcm_.Bitrate(&current_bitrate_bps) != 0) { if (vcm_->Bitrate(&current_bitrate_bps) != 0) {
LOG_F(LS_WARNING) << LOG_F(LS_WARNING) <<
"Failed to get the current encoder target bitrate."; "Failed to get the current encoder target bitrate.";
} }
// Convert to start bitrate in kbps. // Convert to start bitrate in kbps.
codec.startBitrate = (current_bitrate_bps + 500) / 1000; codec.startBitrate = (current_bitrate_bps + 500) / 1000;
size_t max_payload_length = send_payload_router_->MaxPayloadLength(); size_t max_payload_length = send_payload_router_->MaxPayloadLength();
if (vcm_.RegisterSendCodec(&codec, number_of_cores_, if (vcm_->RegisterSendCodec(&codec, number_of_cores_,
max_payload_length) != 0) { max_payload_length) != 0) {
return -1; return -1;
} }
} }
return 0;
} else {
// FEC and NACK are disabled.
vcm_.RegisterProtectionCallback(NULL);
} }
return 0; return 0;
} }
@ -709,12 +690,12 @@ void ViEEncoder::SetSenderBufferingMode(int target_delay_ms) {
} }
if (target_delay_ms > 0) { if (target_delay_ms > 0) {
// Disable external frame-droppers. // Disable external frame-droppers.
vcm_.EnableFrameDropper(false); vcm_->EnableFrameDropper(false);
vpm_.EnableTemporalDecimation(false); vpm_->EnableTemporalDecimation(false);
} else { } else {
// Real-time mode - enable frame droppers. // Real-time mode - enable frame droppers.
vpm_.EnableTemporalDecimation(true); vpm_->EnableTemporalDecimation(true);
vcm_.EnableFrameDropper(true); vcm_->EnableFrameDropper(true);
} }
} }
@ -804,7 +785,7 @@ void ViEEncoder::OnReceivedIntraFrameRequest(uint32_t ssrc) {
idx = stream_it->second; idx = stream_it->second;
} }
// Release the critsect before triggering key frame. // Release the critsect before triggering key frame.
vcm_.IntraFrameRequest(idx); vcm_->IntraFrameRequest(idx);
} }
void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) { void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
@ -829,7 +810,7 @@ void ViEEncoder::OnLocalSsrcChanged(uint32_t old_ssrc, uint32_t new_ssrc) {
bool ViEEncoder::SetSsrcs(const std::list<unsigned int>& ssrcs) { bool ViEEncoder::SetSsrcs(const std::list<unsigned int>& ssrcs) {
VideoCodec codec; VideoCodec codec;
if (vcm_.SendCodec(&codec) != 0) if (vcm_->SendCodec(&codec) != 0)
return false; return false;
if (codec.numberOfSimulcastStreams > 0 && if (codec.numberOfSimulcastStreams > 0 &&
@ -863,11 +844,11 @@ void ViEEncoder::OnNetworkChanged(uint32_t bitrate_bps,
<< " packet loss " << fraction_lost << " packet loss " << fraction_lost
<< " rtt " << round_trip_time_ms; << " rtt " << round_trip_time_ms;
DCHECK(send_payload_router_ != NULL); DCHECK(send_payload_router_ != NULL);
vcm_.SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms); vcm_->SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
bool video_is_suspended = vcm_.VideoSuspended(); bool video_is_suspended = vcm_->VideoSuspended();
VideoCodec send_codec; VideoCodec send_codec;
if (vcm_.SendCodec(&send_codec) != 0) { if (vcm_->SendCodec(&send_codec) != 0) {
return; return;
} }
SimulcastStream* stream_configs = send_codec.simulcastStream; SimulcastStream* stream_configs = send_codec.simulcastStream;
@ -903,15 +884,15 @@ int32_t ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
} }
int ViEEncoder::StartDebugRecording(const char* fileNameUTF8) { int ViEEncoder::StartDebugRecording(const char* fileNameUTF8) {
return vcm_.StartDebugRecording(fileNameUTF8); return vcm_->StartDebugRecording(fileNameUTF8);
} }
int ViEEncoder::StopDebugRecording() { int ViEEncoder::StopDebugRecording() {
return vcm_.StopDebugRecording(); return vcm_->StopDebugRecording();
} }
void ViEEncoder::SuspendBelowMinBitrate() { void ViEEncoder::SuspendBelowMinBitrate() {
vcm_.SuspendBelowMinBitrate(); vcm_->SuspendBelowMinBitrate();
bitrate_allocator_->EnforceMinBitrate(false); bitrate_allocator_->EnforceMinBitrate(false);
} }
@ -928,11 +909,11 @@ void ViEEncoder::DeRegisterPreEncodeCallback() {
void ViEEncoder::RegisterPostEncodeImageCallback( void ViEEncoder::RegisterPostEncodeImageCallback(
EncodedImageCallback* post_encode_callback) { EncodedImageCallback* post_encode_callback) {
vcm_.RegisterPostEncodeImageCallback(post_encode_callback); vcm_->RegisterPostEncodeImageCallback(post_encode_callback);
} }
void ViEEncoder::DeRegisterPostEncodeImageCallback() { void ViEEncoder::DeRegisterPostEncodeImageCallback() {
vcm_.RegisterPostEncodeImageCallback(NULL); vcm_->RegisterPostEncodeImageCallback(NULL);
} }
void ViEEncoder::RegisterSendStatisticsProxy( void ViEEncoder::RegisterSendStatisticsProxy(

View File

@ -197,10 +197,10 @@ class ViEEncoder
const uint32_t number_of_cores_; const uint32_t number_of_cores_;
const bool disable_default_encoder_; const bool disable_default_encoder_;
VideoCodingModule& vcm_; const rtc::scoped_ptr<VideoProcessingModule> vpm_;
VideoProcessingModule& vpm_; const rtc::scoped_ptr<QMVideoSettingsCallback> qm_callback_;
const rtc::scoped_ptr<VideoCodingModule> vcm_;
scoped_refptr<PayloadRouter> send_payload_router_; scoped_refptr<PayloadRouter> send_payload_router_;
VCMProtectionCallback* vcm_protection_callback_;
rtc::scoped_ptr<CriticalSectionWrapper> callback_cs_; rtc::scoped_ptr<CriticalSectionWrapper> callback_cs_;
rtc::scoped_ptr<CriticalSectionWrapper> data_cs_; rtc::scoped_ptr<CriticalSectionWrapper> data_cs_;
@ -234,8 +234,6 @@ class ViEEncoder
uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_); uint64_t picture_id_rpsi_ GUARDED_BY(data_cs_);
std::map<unsigned int, int> ssrc_streams_ GUARDED_BY(data_cs_); std::map<unsigned int, int> ssrc_streams_ GUARDED_BY(data_cs_);
// Quality modes callback
QMVideoSettingsCallback* qm_callback_;
bool video_suspended_ GUARDED_BY(data_cs_); bool video_suspended_ GUARDED_BY(data_cs_);
I420FrameCallback* pre_encode_callback_ GUARDED_BY(callback_cs_); I420FrameCallback* pre_encode_callback_ GUARDED_BY(callback_cs_);
const int64_t start_ms_; const int64_t start_ms_;