Wire up pacer-based padding.

This connects the pacer-based padding with the RTP modules, which will
generate padding packets roughly according to what the pacer suggests.
It will only generate padding packets of maximum size to keep the number
off padding packets as small as possible. This also sets a limit of how much
padding + media bitrate which the pacer is allowed to "request" from the
RTP modules.

Padding will for now only be generated by the first sending RTP module.

BUG=1837
R=mflodman@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/1612005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@4234 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
stefan@webrtc.org 2013-06-17 12:53:37 +00:00
parent 50fb4afade
commit 508a84b255
11 changed files with 160 additions and 65 deletions

View File

@ -489,6 +489,8 @@ class RtpRtcp : public Module {
virtual void TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms) = 0;
virtual int TimeToSendPadding(int bytes) = 0;
/**************************************************************************
*
* RTCP

View File

@ -157,6 +157,8 @@ class MockRtpRtcp : public RtpRtcp {
const RTPVideoHeader* rtpVideoHdr));
MOCK_METHOD3(TimeToSendPacket,
void(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms));
MOCK_METHOD1(TimeToSendPadding,
int(int bytes));
MOCK_METHOD3(RegisterRtcpObservers,
void(RtcpIntraFrameObserver* intraFrameCallback,
RtcpBandwidthObserver* bandwidthCallback,

View File

@ -977,7 +977,17 @@ void ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
"TimeToSendPacket(ssrc:0x%x sequence_number:%u capture_time_ms:%ll)",
ssrc, sequence_number, capture_time_ms);
if (simulcast_) {
bool no_child_modules = false;
{
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
no_child_modules = !child_modules_.empty();
}
if (no_child_modules) {
// Don't send from default module.
if (SendingMedia() && ssrc == rtp_sender_.SSRC()) {
rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms);
}
} else {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
@ -987,25 +997,35 @@ void ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
}
++it;
}
}
}
int ModuleRtpRtcpImpl::TimeToSendPadding(int bytes) {
WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, id_, "TimeToSendPadding(bytes: %d)",
bytes);
bool no_child_modules = false;
{
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
no_child_modules = child_modules_.empty();
}
if (no_child_modules) {
// Don't send from default module.
if (SendingMedia()) {
return rtp_sender_.TimeToSendPadding(bytes);
}
} else {
bool have_child_modules = !child_modules_.empty();
if (!have_child_modules) {
// Don't send from default module.
if (SendingMedia() && ssrc == rtp_sender_.SSRC()) {
rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms);
}
} else {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
if ((*it)->SendingMedia() && ssrc == (*it)->rtp_sender_.SSRC()) {
(*it)->rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms);
return;
}
++it;
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
// Send padding on one of the modules sending media.
if ((*it)->SendingMedia()) {
return (*it)->rtp_sender_.TimeToSendPadding(bytes);
}
++it;
}
}
return 0;
}
uint16_t ModuleRtpRtcpImpl::MaxPayloadLength() const {

View File

@ -190,6 +190,9 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual void TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms);
// Returns the number of padding bytes actually sent, which can be more or
// less than |bytes|.
virtual int TimeToSendPadding(int bytes);
// RTCP part.
// Get RTCP status.

View File

@ -12,7 +12,6 @@
#include <cstdlib> // srand
#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
@ -377,9 +376,14 @@ int32_t RTPSender::SendOutgoingData(
assert(frame_type != kAudioFrameSpeech && frame_type != kAudioFrameCN);
if (frame_type == kFrameEmpty) {
if (paced_sender_->Enabled()) {
// Padding is driven by the pacer and not by the encoder.
return 0;
}
return SendPaddingAccordingToBitrate(payload_type, capture_timestamp,
capture_time_ms) ? 0 : -1;
}
capture_time_ms_ = capture_time_ms;
return video_->SendVideo(video_type, frame_type, payload_type,
capture_timestamp, capture_time_ms, payload_data,
payload_size, fragmentation, codec_info,
@ -409,7 +413,10 @@ bool RTPSender::SendPaddingAccordingToBitrate(
bytes = bytes_cap;
}
}
return SendPadData(payload_type, capture_timestamp, capture_time_ms, bytes);
int bytes_sent = SendPadData(payload_type, capture_time_ms, bytes,
kDontRetransmit, false);
// We did not manage to send all bytes. Comparing with 31 due to modulus 32.
return bytes - bytes_sent < 31;
}
int RTPSender::BuildPaddingPacket(uint8_t* packet, int header_length,
@ -436,35 +443,54 @@ int RTPSender::BuildPaddingPacket(uint8_t* packet, int header_length,
return padding_bytes_in_packet;
}
bool RTPSender::SendPadData(int8_t payload_type, uint32_t capture_timestamp,
int64_t capture_time_ms, int32_t bytes) {
int RTPSender::SendPadData(int payload_type, int64_t capture_time_ms,
int32_t bytes, StorageType store,
bool force_full_size_packets) {
// Drop this packet if we're not sending media packets.
if (!sending_media_) {
return true;
return bytes;
}
uint8_t data_buffer[IP_PACKET_SIZE];
for (; bytes > 0; bytes -= kMaxPaddingLength) {
// Correct seq num, timestamp and payload type.
int header_length = BuildRTPheader(data_buffer,
payload_type,
false, // No markerbit.
capture_timestamp,
true, // Timestamp provided.
true); // Increment sequence number.
int padding_bytes_in_packet = BuildPaddingPacket(data_buffer, header_length,
bytes);
// Send the packet.
if (SendToNetwork(data_buffer, padding_bytes_in_packet, header_length,
capture_time_ms, kDontRetransmit) < 0) {
uint32_t ssrc;
uint16_t sequence_number;
uint32_t timestamp;
{
CriticalSectionScoped cs(send_critsect_);
timestamp = timestamp_;
if (rtx_ == kRtxOff) {
ssrc = ssrc_;
sequence_number = sequence_number_;
++sequence_number_;
} else {
ssrc = ssrc_rtx_;
sequence_number = sequence_number_rtx_;
++sequence_number_rtx_;
}
}
int padding_bytes_in_packet = 0;
int bytes_sent = 0;
for (; bytes > 0; bytes -= padding_bytes_in_packet) {
// Generate an RTX packet which only contains random padding data.
uint8_t padding_packet[IP_PACKET_SIZE];
int header_length = CreateRTPHeader(padding_packet, payload_type, ssrc,
false, timestamp, sequence_number,
NULL, 0);
// Always send full padding packets.
if (force_full_size_packets && bytes < kMaxPaddingLength)
bytes = kMaxPaddingLength;
padding_bytes_in_packet = BuildPaddingPacket(padding_packet, header_length,
bytes);
if (padding_bytes_in_packet == 0) {
break;
}
if (0 > SendToNetwork(padding_packet, padding_bytes_in_packet,
header_length, capture_time_ms, store,
PacedSender::kLowPriority)) {
// Error sending the packet.
break;
}
bytes_sent += padding_bytes_in_packet;
}
if (bytes > 31) { // 31 due to our modulus 32.
// We did not manage to send all bytes.
return false;
}
return true;
return bytes_sent;
}
void RTPSender::SetStorePacketsStatus(const bool enable,
@ -524,7 +550,7 @@ int32_t RTPSender::ReSendPacket(uint16_t packet_id, uint32_t min_resend_time) {
header.ssrc,
header.sequenceNumber,
capture_time_ms,
length)) {
length - header.headerLength)) {
// We can't send the packet right now.
// We will be called when it is time.
return length;
@ -708,10 +734,24 @@ void RTPSender::TimeToSendPacket(uint16_t sequence_number,
SendPacketToNetwork(data_buffer, length);
}
int RTPSender::TimeToSendPadding(int bytes) {
if (!sending_media_) {
return 0;
}
int payload_type;
{
CriticalSectionScoped cs(send_critsect_);
payload_type = (rtx_ == kRtxOff) ? payload_type_ : payload_type_rtx_;
}
return SendPadData(payload_type, capture_time_ms_, bytes, kDontStore,
true);
}
// TODO(pwestin): send in the RTPHeaderParser to avoid parsing it again.
int32_t RTPSender::SendToNetwork(
uint8_t *buffer, int payload_length, int rtp_header_length,
int64_t capture_time_ms, StorageType storage) {
int64_t capture_time_ms, StorageType storage,
PacedSender::Priority priority) {
ModuleRTPUtility::RTPHeaderParser rtp_parser(
buffer, payload_length + rtp_header_length);
RTPHeader rtp_header;
@ -761,10 +801,9 @@ int32_t RTPSender::SendToNetwork(
}
if (paced_sender_ && storage != kDontStore) {
if (!paced_sender_->SendPacket(
PacedSender::kNormalPriority, rtp_header.ssrc,
rtp_header.sequenceNumber, capture_time_ms,
payload_length + rtp_header_length)) {
if (!paced_sender_->SendPacket(priority, rtp_header.ssrc,
rtp_header.sequenceNumber, capture_time_ms,
payload_length)) {
// We can't send the packet right now.
// We will be called when it is time.
return 0;
@ -865,7 +904,6 @@ int32_t RTPSender::BuildRTPheader(
if (time_stamp_provided) {
timestamp_ = start_time_stamp_ + capture_timestamp;
capture_timestamp_ = capture_timestamp;
} else {
// Make a unique time stamp.
// We can't inc by the actual time, since then we increase the risk of back

View File

@ -16,6 +16,7 @@
#include <map>
#include "webrtc/common_types.h"
#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
@ -28,7 +29,6 @@
namespace webrtc {
class CriticalSectionWrapper;
class PacedSender;
class RTPPacketHistory;
class RTPSenderAudio;
class RTPSenderVideo;
@ -57,7 +57,8 @@ class RTPSenderInterface {
virtual int32_t SendToNetwork(
uint8_t *data_buffer, int payload_length, int rtp_header_length,
int64_t capture_time_ms, StorageType storage) = 0;
int64_t capture_time_ms, StorageType storage,
PacedSender::Priority priority) = 0;
};
class RTPSender : public Bitrate, public RTPSenderInterface {
@ -131,8 +132,8 @@ class RTPSender : public Bitrate, public RTPSenderInterface {
const RTPVideoTypeHeader * rtp_type_hdr = NULL);
int BuildPaddingPacket(uint8_t* packet, int header_length, int32_t bytes);
bool SendPadData(int8_t payload_type, uint32_t capture_timestamp,
int64_t capture_time_ms, int32_t bytes);
int SendPadData(int payload_type, int64_t capture_time_ms, int32_t bytes,
StorageType store, bool force_full_size_packets);
// RTP header extension
int32_t SetTransmissionTimeOffset(
const int32_t transmission_time_offset);
@ -163,6 +164,7 @@ class RTPSender : public Bitrate, public RTPSenderInterface {
const int64_t now_ms) const;
void TimeToSendPacket(uint16_t sequence_number, int64_t capture_time_ms);
int TimeToSendPadding(int bytes);
// NACK.
int SelectiveRetransmissions() const;
@ -204,7 +206,8 @@ class RTPSender : public Bitrate, public RTPSenderInterface {
virtual int32_t SendToNetwork(
uint8_t *data_buffer, int payload_length, int rtp_header_length,
int64_t capture_time_ms, StorageType storage);
int64_t capture_time_ms, StorageType storage,
PacedSender::Priority priority);
// Audio.
@ -320,7 +323,7 @@ class RTPSender : public Bitrate, public RTPSenderInterface {
bool ssrc_forced_;
uint32_t ssrc_;
uint32_t timestamp_;
int64_t capture_timestamp_;
int64_t capture_time_ms_;
uint8_t num_csrcs_;
uint32_t csrcs_[kRtpCsrcSize];
bool include_csrcs_;

View File

@ -480,7 +480,8 @@ int32_t RTPSenderAudio::SendAudio(
payloadSize,
static_cast<uint16_t>(rtpHeaderLength),
-1,
kAllowRetransmission);
kAllowRetransmission,
PacedSender::kHighPriority);
}
int32_t
@ -619,7 +620,8 @@ RTPSenderAudio::SendTelephoneEventPacket(const bool ended,
"timestamp", dtmfTimeStamp,
"seqnum", _rtpSender->SequenceNumber());
retVal = _rtpSender->SendToNetwork(dtmfbuffer, 4, 12, -1,
kAllowRetransmission);
kAllowRetransmission,
PacedSender::kHighPriority);
sendCount--;
}while (sendCount > 0 && retVal == 0);

View File

@ -349,7 +349,8 @@ TEST_F(RtpSenderTest, TrafficSmoothingWithExtensions) {
0,
rtp_length,
capture_time_ms,
kAllowRetransmission));
kAllowRetransmission,
PacedSender::kNormalPriority));
EXPECT_EQ(0, transport_.packets_sent_);
@ -402,7 +403,8 @@ TEST_F(RtpSenderTest, TrafficSmoothingRetransmits) {
0,
rtp_length,
capture_time_ms,
kAllowRetransmission));
kAllowRetransmission,
PacedSender::kNormalPriority));
EXPECT_EQ(0, transport_.packets_sent_);

View File

@ -132,7 +132,8 @@ RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage);
storage,
PacedSender::kNormalPriority);
ret |= packet_success;
@ -169,7 +170,8 @@ RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage);
storage,
PacedSender::kNormalPriority);
ret |= packet_success;
@ -190,7 +192,8 @@ RTPSenderVideo::SendVideoPacket(uint8_t* data_buffer,
payload_length,
rtp_header_length,
capture_time_ms,
storage);
storage,
PacedSender::kNormalPriority);
if (ret == 0) {
_videoBitrate.Update(payload_length + rtp_header_length);
}
@ -215,7 +218,8 @@ RTPSenderVideo::SendRTPIntraRequest()
TRACE_EVENT_INSTANT1("webrtc_rtp",
"Video::IntraRequest",
"seqnum", _rtpSender.SequenceNumber());
return _rtpSender.SendToNetwork(data, 0, length, -1, kDontStore);
return _rtpSender.SendToNetwork(data, 0, length, -1, kDontStore,
PacedSender::kNormalPriority);
}
int32_t

View File

@ -93,8 +93,7 @@ class ViEPacedSenderCallback : public PacedSender::Callback {
owner_->TimeToSendPacket(ssrc, sequence_number, capture_time_ms);
}
virtual int TimeToSendPadding(int bytes) {
// TODO(pwestin): Hook up this.
return 0;
return owner_->TimeToSendPadding(bytes);
}
private:
ViEEncoder* owner_;
@ -117,6 +116,7 @@ ViEEncoder::ViEEncoder(int32_t engine_id,
callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
bitrate_controller_(bitrate_controller),
send_padding_(false),
target_delay_ms_(0),
network_is_transmitting_(true),
encoder_paused_(false),
@ -183,6 +183,7 @@ bool ViEEncoder::Init() {
"%s Codec failure", __FUNCTION__);
return false;
}
send_padding_ = video_codec.numberOfSimulcastStreams > 1;
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
default_rtp_rtcp_->MaxDataPayloadLength()) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
@ -199,6 +200,7 @@ bool ViEEncoder::Init() {
#else
VideoCodec video_codec;
if (vcm_.Codec(webrtc::kVideoCodecI420, &video_codec) == VCM_OK) {
send_padding_ = video_codec.numberOfSimulcastStreams > 1;
vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
default_rtp_rtcp_->MaxDataPayloadLength());
default_rtp_rtcp_->RegisterSendPayload(video_codec);
@ -358,11 +360,12 @@ int32_t ViEEncoder::DeRegisterExternalEncoder(uint8_t pl_type) {
return -1;
}
// If the external encoder is the current send codeci, use vcm internal
// If the external encoder is the current send codec, use vcm internal
// encoder.
if (current_send_codec.plType == pl_type) {
uint16_t max_data_payload_length =
default_rtp_rtcp_->MaxDataPayloadLength();
send_padding_ = current_send_codec.numberOfSimulcastStreams > 1;
if (vcm_.RegisterSendCodec(&current_send_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
@ -401,6 +404,7 @@ int32_t ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
uint16_t max_data_payload_length =
default_rtp_rtcp_->MaxDataPayloadLength();
send_padding_ = video_codec.numberOfSimulcastStreams > 1;
if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
max_data_payload_length) != VCM_OK) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
@ -483,6 +487,13 @@ void ViEEncoder::TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
default_rtp_rtcp_->TimeToSendPacket(ssrc, sequence_number, capture_time_ms);
}
int ViEEncoder::TimeToSendPadding(int bytes) {
if (send_padding_) {
return default_rtp_rtcp_->TimeToSendPadding(bytes);
}
return 0;
}
bool ViEEncoder::EncoderPaused() const {
// Pause video if paused by caller or as long as the network is down or the
// pacer queue has grown too large in buffered mode.
@ -1002,7 +1013,13 @@ void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
vcm_.SetChannelParameters(bitrate_bps, fraction_lost, round_trip_time_ms);
int bitrate_kbps = bitrate_bps / 1000;
paced_sender_->UpdateBitrate(bitrate_kbps, 0);
VideoCodec send_codec;
if (vcm_.SendCodec(&send_codec) != 0) {
return;
}
int pad_up_to_bitrate = std::min(bitrate_kbps,
static_cast<int>(send_codec.maxBitrate));
paced_sender_->UpdateBitrate(bitrate_kbps, pad_up_to_bitrate);
default_rtp_rtcp_->SetTargetSendBitrate(bitrate_bps);
}

View File

@ -175,6 +175,7 @@ class ViEEncoder
// Called by PacedSender.
void TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms);
int TimeToSendPadding(int bytes);
private:
bool EncoderPaused() const;
@ -194,6 +195,7 @@ class ViEEncoder
BitrateController* bitrate_controller_;
bool send_padding_;
int target_delay_ms_;
bool network_is_transmitting_;
bool encoder_paused_;