Enable paced sender.

Review URL: https://webrtc-codereview.appspot.com/965016

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3089 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pwestin@webrtc.org 2012-11-13 21:12:39 +00:00
parent 42aa10eba7
commit 571a1c035b
19 changed files with 399 additions and 738 deletions

View File

@ -17,7 +17,8 @@
#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
namespace webrtc {
// forward declaration
// Forward declarations.
class PacedSender;
class RemoteBitrateEstimator;
class RemoteBitrateObserver;
class Transport;
@ -37,7 +38,8 @@ class RtpRtcp : public Module {
intra_frame_callback(NULL),
bandwidth_callback(NULL),
audio_messages(NULL),
remote_bitrate_estimator(NULL) {
remote_bitrate_estimator(NULL),
paced_sender(NULL) {
}
/* id - Unique identifier of this RTP/RTCP module object
* audio - True for a audio version of the RTP/RTCP module
@ -58,6 +60,8 @@ class RtpRtcp : public Module {
* audio_messages - Telehone events.
* remote_bitrate_estimator - Estimates the bandwidth available for a set of
* streams from the same client.
* paced_sender - Spread any bursts of packets into smaller
* bursts to minimize packet loss.
*/
int32_t id;
bool audio;
@ -71,6 +75,7 @@ class RtpRtcp : public Module {
RtcpBandwidthObserver* bandwidth_callback;
RtpAudioFeedback* audio_messages;
RemoteBitrateEstimator* remote_bitrate_estimator;
PacedSender* paced_sender;
};
/*
* Create a RTP/RTCP module object using the system clock.
@ -345,13 +350,6 @@ class RtpRtcp : public Module {
virtual WebRtc_Word32 DeregisterSendRtpHeaderExtension(
const RTPExtensionType type) = 0;
/*
* Enable/disable traffic smoothing of sending stream.
*/
virtual void SetTransmissionSmoothingStatus(const bool enable) = 0;
virtual bool TransmissionSmoothingStatus() const = 0;
/*
* get start timestamp
*/
@ -503,6 +501,9 @@ class RtpRtcp : public Module {
const RTPFragmentationHeader* fragmentation = NULL,
const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
virtual void TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms) = 0;
/**************************************************************************
*
* RTCP

View File

@ -109,10 +109,6 @@ class MockRtpRtcp : public RtpRtcp {
WebRtc_Word32(const RTPExtensionType type, const WebRtc_UWord8 id));
MOCK_METHOD1(DeregisterSendRtpHeaderExtension,
WebRtc_Word32(const RTPExtensionType type));
MOCK_METHOD1(SetTransmissionSmoothingStatus,
void(const bool enable));
MOCK_CONST_METHOD0(TransmissionSmoothingStatus,
bool());
MOCK_CONST_METHOD0(StartTimestamp,
WebRtc_UWord32());
MOCK_METHOD1(SetStartTimestamp,
@ -156,6 +152,8 @@ class MockRtpRtcp : public RtpRtcp {
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpVideoHdr));
MOCK_METHOD3(TimeToSendPacket,
void(uint32_t ssrc, uint16_t sequence_number, int64_t capture_time_ms));
MOCK_METHOD3(RegisterRtcpObservers,
void(RtcpIntraFrameObserver* intraFrameCallback,
RtcpBandwidthObserver* bandwidthCallback,

View File

@ -14,6 +14,7 @@
'dependencies': [
'<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
'<(webrtc_root)/modules/modules.gyp:remote_bitrate_estimator',
'<(webrtc_root)/modules/modules.gyp:paced_sender',
],
'include_dirs': [
'../interface',
@ -81,8 +82,6 @@
'video_codec_information.h',
'rtp_format_vp8.cc',
'rtp_format_vp8.h',
'transmission_bucket.cc',
'transmission_bucket.h',
'vp8_partition_aggregator.cc',
'vp8_partition_aggregator.h',
# Mocks

View File

@ -56,7 +56,8 @@ ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
configuration.audio,
configuration.clock,
configuration.outgoing_transport,
configuration.audio_messages),
configuration.audio_messages,
configuration.paced_sender),
_rtpReceiver(configuration.id, configuration.audio, configuration.clock,
this),
_rtcpSender(configuration.id, configuration.audio, configuration.clock,
@ -186,8 +187,6 @@ WebRtc_Word32 ModuleRtpRtcpImpl::Process() {
const WebRtc_Word64 now = _clock.GetTimeInMS();
_lastProcessTime = now;
_rtpSender.ProcessSendToNetwork();
if (now >= _lastPacketTimeoutProcessTime +
kRtpRtcpPacketTimeoutProcessTimeMs) {
_rtpReceiver.PacketTimeout();
@ -818,11 +817,11 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
const RTPFragmentationHeader* fragmentation,
const RTPVideoHeader* rtpVideoHdr) {
WEBRTC_TRACE(
kTraceStream,
kTraceRtpRtcp,
_id,
"SendOutgoingData(frameType:%d payloadType:%d timeStamp:%u size:%u)",
frameType, payloadType, timeStamp, payloadSize);
kTraceStream,
kTraceRtpRtcp,
_id,
"SendOutgoingData(frameType:%d payloadType:%d timeStamp:%u size:%u)",
frameType, payloadType, timeStamp, payloadSize);
_rtcpSender.SetLastRtpTime(timeStamp, capture_time_ms);
@ -914,6 +913,37 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
return retVal;
}
void ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
uint16_t sequence_number,
int64_t capture_time_ms) {
WEBRTC_TRACE(
kTraceStream,
kTraceRtpRtcp,
_id,
"TimeToSendPacket(ssrc:0x%x sequence_number:%u capture_time_ms:%ll)",
ssrc, sequence_number, capture_time_ms);
if (_simulcast) {
CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
while (it != _childModules.end()) {
if ((*it)->SendingMedia() && ssrc == (*it)->_rtpSender.SSRC()) {
(*it)->_rtpSender.TimeToSendPacket(sequence_number, capture_time_ms);
return;
}
it++;
}
} else {
bool have_child_modules(_childModules.empty() ? false : true);
if (!have_child_modules) {
// Don't send from default module.
if (SendingMedia() && ssrc == _rtpSender.SSRC()) {
_rtpSender.TimeToSendPacket(sequence_number, capture_time_ms);
}
}
}
}
WebRtc_UWord16 ModuleRtpRtcpImpl::MaxPayloadLength() const {
WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "MaxPayloadLength()");
@ -1338,14 +1368,6 @@ WebRtc_Word32 ModuleRtpRtcpImpl::DeregisterReceiveRtpHeaderExtension(
return _rtpReceiver.DeregisterRtpHeaderExtension(type);
}
void ModuleRtpRtcpImpl::SetTransmissionSmoothingStatus(const bool enable) {
_rtpSender.SetTransmissionSmoothingStatus(enable);
}
bool ModuleRtpRtcpImpl::TransmissionSmoothingStatus() const {
return _rtpSender.TransmissionSmoothingStatus();
}
/*
* (TMMBR) Temporary Max Media Bit Rate
*/

View File

@ -124,10 +124,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
virtual WebRtc_Word32 DeregisterSendRtpHeaderExtension(
const RTPExtensionType type);
virtual void SetTransmissionSmoothingStatus(const bool enable);
virtual bool TransmissionSmoothingStatus() const;
// get start timestamp
virtual WebRtc_UWord32 StartTimestamp() const;
@ -185,6 +181,8 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
const RTPFragmentationHeader* fragmentation = NULL,
const RTPVideoHeader* rtpVideoHdr = NULL);
virtual void TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms);
/*
* RTCP
*/

View File

@ -37,7 +37,6 @@
'rtp_sender_unittest.cc',
'rtcp_sender_unittest.cc',
'rtcp_receiver_unittest.cc',
'transmission_bucket_unittest.cc',
'vp8_partition_aggregator_unittest.cc',
],
},

View File

@ -8,28 +8,30 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include <cstdlib> // srand
#include "rtp_sender.h"
#include "critical_section_wrapper.h"
#include "trace.h"
#include "rtp_packet_history.h"
#include "rtp_sender_audio.h"
#include "rtp_sender_video.h"
#include "webrtc/modules/pacing/include/paced_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_packet_history.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_audio.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender_video.h"
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
#include "webrtc/system_wrappers/interface/trace.h"
namespace webrtc {
RTPSender::RTPSender(const WebRtc_Word32 id,
const bool audio,
RtpRtcpClock* clock,
Transport* transport,
RtpAudioFeedback* audio_feedback)
RtpAudioFeedback* audio_feedback,
PacedSender* paced_sender)
: Bitrate(clock),
_id(id),
_audioConfigured(audio),
_audio(NULL),
_video(NULL),
paced_sender_(paced_sender),
_sendCritsect(CriticalSectionWrapper::CreateCriticalSection()),
_transport(transport),
_sendingMedia(true), // Default to sending media
@ -44,15 +46,13 @@ RTPSender::RTPSender(const WebRtc_Word32 id,
_rtpHeaderExtensionMap(),
_transmissionTimeOffset(0),
// NACK
_nackByteCountTimes(),
_nackByteCount(),
_nackBitrate(clock),
_packetHistory(new RTPPacketHistory(clock)),
_sendBucket(clock),
_timeLastSendToNetworkUpdate(clock->GetTimeInMS()),
_transmissionSmoothing(false),
// statistics
_packetsSent(0),
_payloadBytesSent(0),
@ -74,7 +74,6 @@ RTPSender::RTPSender(const WebRtc_Word32 id,
memset(_nackByteCountTimes, 0, sizeof(_nackByteCountTimes));
memset(_nackByteCount, 0, sizeof(_nackByteCount));
memset(_CSRC, 0, sizeof(_CSRC));
// We need to seed the random generator.
srand( (WebRtc_UWord32)_clock.GetTimeInMS() );
_ssrc = _ssrcDB.CreateSSRC(); // Can't be 0.
@ -271,16 +270,6 @@ WebRtc_UWord16 RTPSender::PacketOverHead() const {
return _packetOverHead;
}
void RTPSender::SetTransmissionSmoothingStatus(const bool enable) {
CriticalSectionScoped cs(_sendCritsect);
_transmissionSmoothing = enable;
}
bool RTPSender::TransmissionSmoothingStatus() const {
CriticalSectionScoped cs(_sendCritsect);
return _transmissionSmoothing;
}
void RTPSender::SetRTXStatus(const bool enable,
const bool setSSRC,
const WebRtc_UWord32 SSRC) {
@ -690,110 +679,91 @@ void RTPSender::UpdateNACKBitRate(const WebRtc_UWord32 bytes,
}
}
void RTPSender::ProcessSendToNetwork() {
WebRtc_Word64 delta_time_ms;
{
CriticalSectionScoped cs(_sendCritsect);
void RTPSender::TimeToSendPacket(uint16_t sequence_number,
int64_t capture_time_ms) {
StorageType type;
uint16_t length = IP_PACKET_SIZE;
uint8_t data_buffer[IP_PACKET_SIZE];
int64_t stored_time_ms; // TODO(pwestin) can we depricate this?
if (!_transmissionSmoothing) {
return;
}
WebRtc_Word64 now = _clock.GetTimeInMS();
delta_time_ms = now - _timeLastSendToNetworkUpdate;
_timeLastSendToNetworkUpdate = now;
if (_packetHistory == NULL) {
return;
}
_sendBucket.UpdateBytesPerInterval(delta_time_ms, _targetSendBitrate);
if (!_packetHistory->GetRTPPacket(sequence_number, 0, data_buffer,
&length, &stored_time_ms, &type)) {
assert(false);
return;
}
assert(length > 0);
while (!_sendBucket.Empty()) {
WebRtc_Word32 seq_num = _sendBucket.GetNextPacket();
if (seq_num < 0) {
break;
}
ModuleRTPUtility::RTPHeaderParser rtpParser(data_buffer, length);
WebRtcRTPHeader rtp_header;
rtpParser.Parse(rtp_header);
WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
WebRtc_UWord16 length = IP_PACKET_SIZE;
int64_t stored_time_ms;
StorageType type;
bool found = _packetHistory->GetRTPPacket(seq_num, 0, data_buffer, &length,
&stored_time_ms, &type);
if (!found) {
assert(false);
return;
}
assert(length > 0);
WebRtc_Word64 diff_ms = _clock.GetTimeInMS() - stored_time_ms;
ModuleRTPUtility::RTPHeaderParser rtpParser(data_buffer, length);
WebRtcRTPHeader rtp_header;
rtpParser.Parse(rtp_header);
if (UpdateTransmissionTimeOffset(data_buffer, length, rtp_header,
diff_ms)) {
// Update stored packet in case of receiving a re-transmission request.
_packetHistory->ReplaceRTPHeader(data_buffer,
rtp_header.header.sequenceNumber,
rtp_header.header.headerLength);
}
// Send packet
WebRtc_Word32 bytes_sent = -1;
if (_transport) {
bytes_sent = _transport->SendPacket(_id, data_buffer, length);
}
// Update send statistics
if (bytes_sent > 0) {
CriticalSectionScoped cs(_sendCritsect);
Bitrate::Update(bytes_sent);
_packetsSent++;
if (bytes_sent > rtp_header.header.headerLength) {
_payloadBytesSent += bytes_sent - rtp_header.header.headerLength;
}
}
int64_t diff_ms = _clock.GetTimeInMS() - capture_time_ms;
if (UpdateTransmissionTimeOffset(data_buffer, length, rtp_header, diff_ms)) {
// Update stored packet in case of receiving a re-transmission request.
_packetHistory->ReplaceRTPHeader(data_buffer,
rtp_header.header.sequenceNumber,
rtp_header.header.headerLength);
}
int bytes_sent = -1;
if (_transport) {
bytes_sent = _transport->SendPacket(_id, data_buffer, length);
}
if (bytes_sent <= 0) {
return;
}
// Update send statistics
CriticalSectionScoped cs(_sendCritsect);
Bitrate::Update(bytes_sent);
_packetsSent++;
if (bytes_sent > rtp_header.header.headerLength) {
_payloadBytesSent += bytes_sent - rtp_header.header.headerLength;
}
}
WebRtc_Word32 RTPSender::SendToNetwork(WebRtc_UWord8* buffer,
WebRtc_UWord16 payload_length,
WebRtc_UWord16 rtp_header_length,
// TODO(pwestin): send in the RTPHeaderParser to avoid parsing it again
WebRtc_Word32 RTPSender::SendToNetwork(uint8_t* buffer,
int payload_length,
int rtp_header_length,
int64_t capture_time_ms,
StorageType storage) {
// Used for NACK or to spead out the transmission of packets.
if (_packetHistory->PutRTPPacket(buffer, rtp_header_length + payload_length,
_maxPayloadLength, capture_time_ms, storage) != 0) {
return -1;
}
if (_transmissionSmoothing) {
const WebRtc_UWord16 sequenceNumber = (buffer[2] << 8) + buffer[3];
const WebRtc_UWord32 timestamp = (buffer[4] << 24) + (buffer[5] << 16) +
(buffer[6] << 8) + buffer[7];
_sendBucket.Fill(sequenceNumber, timestamp,
rtp_header_length + payload_length);
// Packet will be sent at a later time.
return 0;
}
ModuleRTPUtility::RTPHeaderParser rtpParser(buffer,
payload_length + rtp_header_length);
WebRtcRTPHeader rtp_header;
rtpParser.Parse(rtp_header);
// |capture_time_ms| <= 0 is considered invalid.
// TODO(holmer): This should be changed all over Video Engine so that negative
// time is consider invalid, while 0 is considered a valid time.
if (capture_time_ms > 0) {
ModuleRTPUtility::RTPHeaderParser rtpParser(buffer,
rtp_header_length + payload_length);
WebRtcRTPHeader rtp_header;
rtpParser.Parse(rtp_header);
int64_t time_now = _clock.GetTimeInMS();
if (UpdateTransmissionTimeOffset(buffer, rtp_header_length + payload_length,
rtp_header, time_now - capture_time_ms)) {
// Update stored packet in case of receiving a re-transmission request.
_packetHistory->ReplaceRTPHeader(buffer, rtp_header.header.sequenceNumber,
rtp_header.header.headerLength);
UpdateTransmissionTimeOffset(buffer, payload_length + rtp_header_length,
rtp_header, time_now - capture_time_ms);
}
// Used for NACK and to spread out the transmission of packets.
if (_packetHistory->PutRTPPacket(buffer, rtp_header_length + payload_length,
_maxPayloadLength, capture_time_ms, storage) != 0) {
return -1;
}
if (paced_sender_) {
if (!paced_sender_ ->SendPacket(PacedSender::kNormalPriority,
rtp_header.header.ssrc,
rtp_header.header.sequenceNumber,
capture_time_ms,
payload_length + rtp_header_length)) {
// We can't send the packet right now.
// We will be called when it is time.
return payload_length + rtp_header_length;
}
}
// Send packet
WebRtc_Word32 bytes_sent = -1;
if (_transport) {
bytes_sent = _transport->SendPacket(_id, buffer,
payload_length + rtp_header_length);
bytes_sent = _transport->SendPacket(_id,
buffer,
payload_length + rtp_header_length);
}
if (bytes_sent <= 0) {
return -1;
@ -915,7 +885,7 @@ WebRtc_UWord16 RTPSender::BuildRTPHeaderExtension(
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| header extension |
| .... |
*/
*/
const WebRtc_UWord32 kPosLength = 2;
const WebRtc_UWord32 kHeaderLength = RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES;
@ -1000,7 +970,6 @@ bool RTPSender::UpdateTransmissionTimeOffset(
"Failed to update transmission time offset, not registered.");
return false;
}
int block_pos = 12 + rtp_header.header.numCSRCs + transmission_block_pos;
if (rtp_packet_length < block_pos + 4 ||
rtp_header.header.headerLength < block_pos + 4) {

View File

@ -15,19 +15,19 @@
#include <cmath>
#include <map>
#include "rtp_rtcp_config.h" // misc. defines (e.g. MAX_PACKET_LENGTH)
#include "rtp_rtcp_defines.h"
#include "common_types.h"
#include "ssrc_database.h"
#include "Bitrate.h"
#include "rtp_header_extension.h"
#include "video_codec_information.h"
#include "transmission_bucket.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/Bitrate.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_rtcp_config.h"
#include "webrtc/modules/rtp_rtcp/source/ssrc_database.h"
#include "webrtc/modules/rtp_rtcp/source/video_codec_information.h"
#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1
namespace webrtc {
class CriticalSectionWrapper;
class PacedSender;
class RTPPacketHistory;
class RTPSenderAudio;
class RTPSenderVideo;
@ -55,21 +55,24 @@ class RTPSenderInterface {
virtual WebRtc_UWord16 PacketOverHead() const = 0;
virtual WebRtc_UWord16 ActualSendBitrateKbit() const = 0;
virtual WebRtc_Word32 SendToNetwork(WebRtc_UWord8* data_buffer,
WebRtc_UWord16 payload_length,
WebRtc_UWord16 rtp_header_length,
virtual WebRtc_Word32 SendToNetwork(uint8_t* data_buffer,
int payload_length,
int rtp_header_length,
int64_t capture_time_ms,
StorageType storage) = 0;
};
class RTPSender : public Bitrate, public RTPSenderInterface {
public:
RTPSender(const WebRtc_Word32 id, const bool audio, RtpRtcpClock* clock,
Transport* transport, RtpAudioFeedback* audio_feedback);
RTPSender(const WebRtc_Word32 id,
const bool audio,
RtpRtcpClock* clock,
Transport* transport,
RtpAudioFeedback* audio_feedback,
PacedSender* paced_sender);
virtual ~RTPSender();
void ProcessBitrate();
void ProcessSendToNetwork();
WebRtc_UWord16 ActualSendBitrateKbit() const;
@ -143,204 +146,200 @@ class RTPSender : public Bitrate, public RTPSenderInterface {
/*
* RTP header extension
*/
WebRtc_Word32 SetTransmissionTimeOffset(
const WebRtc_Word32 transmissionTimeOffset);
WebRtc_Word32 SetTransmissionTimeOffset(
const WebRtc_Word32 transmissionTimeOffset);
WebRtc_Word32 RegisterRtpHeaderExtension(const RTPExtensionType type,
const WebRtc_UWord8 id);
WebRtc_Word32 RegisterRtpHeaderExtension(const RTPExtensionType type,
const WebRtc_UWord8 id);
WebRtc_Word32 DeregisterRtpHeaderExtension(const RTPExtensionType type);
WebRtc_Word32 DeregisterRtpHeaderExtension(const RTPExtensionType type);
WebRtc_UWord16 RtpHeaderExtensionTotalLength() const;
WebRtc_UWord16 RtpHeaderExtensionTotalLength() const;
WebRtc_UWord16 BuildRTPHeaderExtension(WebRtc_UWord8* dataBuffer) const;
WebRtc_UWord16 BuildRTPHeaderExtension(WebRtc_UWord8* dataBuffer) const;
WebRtc_UWord8 BuildTransmissionTimeOffsetExtension(
WebRtc_UWord8* dataBuffer) const;
WebRtc_UWord8 BuildTransmissionTimeOffsetExtension(
WebRtc_UWord8* dataBuffer) const;
bool UpdateTransmissionTimeOffset(WebRtc_UWord8* rtp_packet,
const WebRtc_UWord16 rtp_packet_length,
const WebRtcRTPHeader& rtp_header,
const WebRtc_Word64 time_diff_ms) const;
bool UpdateTransmissionTimeOffset(WebRtc_UWord8* rtp_packet,
const WebRtc_UWord16 rtp_packet_length,
const WebRtcRTPHeader& rtp_header,
const WebRtc_Word64 time_diff_ms) const;
void SetTransmissionSmoothingStatus(const bool enable);
void TimeToSendPacket(uint16_t sequence_number, int64_t capture_time_ms);
bool TransmissionSmoothingStatus() const;
/*
* NACK
*/
int SelectiveRetransmissions() const;
int SetSelectiveRetransmissions(uint8_t settings);
void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
const WebRtc_UWord16* nackSequenceNumbers,
const WebRtc_UWord16 avgRTT);
/*
* NACK
*/
int SelectiveRetransmissions() const;
int SetSelectiveRetransmissions(uint8_t settings);
void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
const WebRtc_UWord16* nackSequenceNumbers,
const WebRtc_UWord16 avgRTT);
void SetStorePacketsStatus(const bool enable,
const WebRtc_UWord16 numberToStore);
void SetStorePacketsStatus(const bool enable,
const WebRtc_UWord16 numberToStore);
bool StorePackets() const;
bool StorePackets() const;
WebRtc_Word32 ReSendPacket(WebRtc_UWord16 packet_id,
WebRtc_UWord32 min_resend_time = 0);
WebRtc_Word32 ReSendPacket(WebRtc_UWord16 packet_id,
WebRtc_UWord32 min_resend_time = 0);
WebRtc_Word32 ReSendToNetwork(const WebRtc_UWord8* packet,
const WebRtc_UWord32 size);
WebRtc_Word32 ReSendToNetwork(const WebRtc_UWord8* packet,
const WebRtc_UWord32 size);
bool ProcessNACKBitRate(const WebRtc_UWord32 now);
bool ProcessNACKBitRate(const WebRtc_UWord32 now);
/*
* RTX
*/
void SetRTXStatus(const bool enable,
const bool setSSRC,
const WebRtc_UWord32 SSRC);
/*
* RTX
*/
void SetRTXStatus(const bool enable,
const bool setSSRC,
const WebRtc_UWord32 SSRC);
void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const;
void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const;
/*
* Functions wrapping RTPSenderInterface
*/
virtual WebRtc_Word32 BuildRTPheader(WebRtc_UWord8* dataBuffer,
const WebRtc_Word8 payloadType,
const bool markerBit,
const WebRtc_UWord32 captureTimeStamp,
const bool timeStampProvided = true,
const bool incSequenceNumber = true);
/*
* Functions wrapping RTPSenderInterface
*/
virtual WebRtc_Word32 BuildRTPheader(WebRtc_UWord8* dataBuffer,
const WebRtc_Word8 payloadType,
const bool markerBit,
const WebRtc_UWord32 captureTimeStamp,
const bool timeStampProvided = true,
const bool incSequenceNumber = true);
virtual WebRtc_UWord16 RTPHeaderLength() const ;
virtual WebRtc_UWord16 IncrementSequenceNumber();
virtual WebRtc_UWord16 MaxPayloadLength() const;
virtual WebRtc_UWord16 PacketOverHead() const;
virtual WebRtc_UWord16 RTPHeaderLength() const ;
virtual WebRtc_UWord16 IncrementSequenceNumber();
virtual WebRtc_UWord16 MaxPayloadLength() const;
virtual WebRtc_UWord16 PacketOverHead() const;
// current timestamp
virtual WebRtc_UWord32 Timestamp() const;
virtual WebRtc_UWord32 SSRC() const;
// current timestamp
virtual WebRtc_UWord32 Timestamp() const;
virtual WebRtc_UWord32 SSRC() const;
virtual WebRtc_Word32 SendToNetwork(uint8_t* data_buffer,
int payload_length,
int rtp_header_length,
int64_t capture_time_ms,
StorageType storage);
/*
* Audio
*/
// Send a DTMF tone using RFC 2833 (4733)
WebRtc_Word32 SendTelephoneEvent(const WebRtc_UWord8 key,
const WebRtc_UWord16 time_ms,
const WebRtc_UWord8 level);
virtual WebRtc_Word32 SendToNetwork(WebRtc_UWord8* data_buffer,
WebRtc_UWord16 payload_length,
WebRtc_UWord16 rtp_header_length,
int64_t capture_time_ms,
StorageType storage);
/*
* Audio
*/
// Send a DTMF tone using RFC 2833 (4733)
WebRtc_Word32 SendTelephoneEvent(const WebRtc_UWord8 key,
const WebRtc_UWord16 time_ms,
const WebRtc_UWord8 level);
bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
// Set audio packet size, used to determine when it's time to send a DTMF
// packet in silence (CNG)
WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
// Set audio packet size, used to determine when it's time to send a DTMF
// packet in silence (CNG)
WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
// Set status and ID for header-extension-for-audio-level-indication.
WebRtc_Word32 SetAudioLevelIndicationStatus(const bool enable,
const WebRtc_UWord8 ID);
// Set status and ID for header-extension-for-audio-level-indication.
WebRtc_Word32 SetAudioLevelIndicationStatus(const bool enable,
const WebRtc_UWord8 ID);
// Get status and ID for header-extension-for-audio-level-indication.
WebRtc_Word32 AudioLevelIndicationStatus(bool& enable,
WebRtc_UWord8& ID) const;
// Get status and ID for header-extension-for-audio-level-indication.
WebRtc_Word32 AudioLevelIndicationStatus(bool& enable,
WebRtc_UWord8& ID) const;
// Store the audio level in dBov for
// header-extension-for-audio-level-indication.
WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
// Store the audio level in dBov for
// header-extension-for-audio-level-indication.
WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
// Set payload type for Redundant Audio Data RFC 2198
WebRtc_Word32 SetRED(const WebRtc_Word8 payloadType);
// Set payload type for Redundant Audio Data RFC 2198
WebRtc_Word32 SetRED(const WebRtc_Word8 payloadType);
// Get payload type for Redundant Audio Data RFC 2198
WebRtc_Word32 RED(WebRtc_Word8& payloadType) const;
// Get payload type for Redundant Audio Data RFC 2198
WebRtc_Word32 RED(WebRtc_Word8& payloadType) const;
/*
* Video
*/
VideoCodecInformation* CodecInformationVideo();
/*
* Video
*/
VideoCodecInformation* CodecInformationVideo();
RtpVideoCodecTypes VideoCodecType() const;
RtpVideoCodecTypes VideoCodecType() const;
WebRtc_UWord32 MaxConfiguredBitrateVideo() const;
WebRtc_UWord32 MaxConfiguredBitrateVideo() const;
WebRtc_Word32 SendRTPIntraRequest();
WebRtc_Word32 SendRTPIntraRequest();
// FEC
WebRtc_Word32 SetGenericFECStatus(const bool enable,
const WebRtc_UWord8 payloadTypeRED,
const WebRtc_UWord8 payloadTypeFEC);
// FEC
WebRtc_Word32 SetGenericFECStatus(const bool enable,
const WebRtc_UWord8 payloadTypeRED,
const WebRtc_UWord8 payloadTypeFEC);
WebRtc_Word32 GenericFECStatus(bool& enable,
WebRtc_UWord8& payloadTypeRED,
WebRtc_UWord8& payloadTypeFEC) const;
WebRtc_Word32 GenericFECStatus(bool& enable,
WebRtc_UWord8& payloadTypeRED,
WebRtc_UWord8& payloadTypeFEC) const;
WebRtc_Word32 SetFecParameters(
const FecProtectionParams* delta_params,
const FecProtectionParams* key_params);
WebRtc_Word32 SetFecParameters(
const FecProtectionParams* delta_params,
const FecProtectionParams* key_params);
protected:
WebRtc_Word32 CheckPayloadType(const WebRtc_Word8 payloadType,
RtpVideoCodecTypes& videoType);
WebRtc_Word32 CheckPayloadType(const WebRtc_Word8 payloadType,
RtpVideoCodecTypes& videoType);
private:
void UpdateNACKBitRate(const WebRtc_UWord32 bytes,
const WebRtc_UWord32 now);
void UpdateNACKBitRate(const WebRtc_UWord32 bytes,
const WebRtc_UWord32 now);
WebRtc_Word32 SendPaddingAccordingToBitrate(
WebRtc_Word8 payload_type,
WebRtc_UWord32 capture_timestamp,
int64_t capture_time_ms);
WebRtc_Word32 SendPaddingAccordingToBitrate(
WebRtc_Word8 payload_type,
WebRtc_UWord32 capture_timestamp,
int64_t capture_time_ms);
WebRtc_Word32 _id;
const bool _audioConfigured;
RTPSenderAudio* _audio;
RTPSenderVideo* _video;
WebRtc_Word32 _id;
const bool _audioConfigured;
RTPSenderAudio* _audio;
RTPSenderVideo* _video;
CriticalSectionWrapper* _sendCritsect;
PacedSender* paced_sender_;
CriticalSectionWrapper* _sendCritsect;
Transport* _transport;
bool _sendingMedia;
Transport* _transport;
bool _sendingMedia;
WebRtc_UWord16 _maxPayloadLength;
WebRtc_UWord16 _targetSendBitrate;
WebRtc_UWord16 _packetOverHead;
WebRtc_UWord16 _maxPayloadLength;
WebRtc_UWord16 _targetSendBitrate;
WebRtc_UWord16 _packetOverHead;
WebRtc_Word8 _payloadType;
std::map<WebRtc_Word8, ModuleRTPUtility::Payload*> _payloadTypeMap;
WebRtc_Word8 _payloadType;
std::map<WebRtc_Word8, ModuleRTPUtility::Payload*> _payloadTypeMap;
RtpHeaderExtensionMap _rtpHeaderExtensionMap;
WebRtc_Word32 _transmissionTimeOffset;
RtpHeaderExtensionMap _rtpHeaderExtensionMap;
WebRtc_Word32 _transmissionTimeOffset;
// NACK
WebRtc_UWord32 _nackByteCountTimes[NACK_BYTECOUNT_SIZE];
WebRtc_Word32 _nackByteCount[NACK_BYTECOUNT_SIZE];
Bitrate _nackBitrate;
// NACK
WebRtc_UWord32 _nackByteCountTimes[NACK_BYTECOUNT_SIZE];
WebRtc_Word32 _nackByteCount[NACK_BYTECOUNT_SIZE];
Bitrate _nackBitrate;
RTPPacketHistory* _packetHistory;
TransmissionBucket _sendBucket;
WebRtc_Word64 _timeLastSendToNetworkUpdate;
bool _transmissionSmoothing;
RTPPacketHistory* _packetHistory;
// Statistics
WebRtc_UWord32 _packetsSent;
WebRtc_UWord32 _payloadBytesSent;
// Statistics
WebRtc_UWord32 _packetsSent;
WebRtc_UWord32 _payloadBytesSent;
// RTP variables
bool _startTimeStampForced;
WebRtc_UWord32 _startTimeStamp;
SSRCDatabase& _ssrcDB;
WebRtc_UWord32 _remoteSSRC;
bool _sequenceNumberForced;
WebRtc_UWord16 _sequenceNumber;
WebRtc_UWord16 _sequenceNumberRTX;
bool _ssrcForced;
WebRtc_UWord32 _ssrc;
WebRtc_UWord32 _timeStamp;
WebRtc_UWord8 _CSRCs;
WebRtc_UWord32 _CSRC[kRtpCsrcSize];
bool _includeCSRCs;
bool _RTX;
WebRtc_UWord32 _ssrcRTX;
// RTP variables
bool _startTimeStampForced;
WebRtc_UWord32 _startTimeStamp;
SSRCDatabase& _ssrcDB;
WebRtc_UWord32 _remoteSSRC;
bool _sequenceNumberForced;
WebRtc_UWord16 _sequenceNumber;
WebRtc_UWord16 _sequenceNumberRTX;
bool _ssrcForced;
WebRtc_UWord32 _ssrc;
WebRtc_UWord32 _timeStamp;
WebRtc_UWord8 _CSRCs;
WebRtc_UWord32 _CSRC[kRtpCsrcSize];
bool _includeCSRCs;
bool _RTX;
WebRtc_UWord32 _ssrcRTX;
};
} // namespace webrtc

View File

@ -14,11 +14,12 @@
#include <gtest/gtest.h>
#include "rtp_header_extension.h"
#include "rtp_rtcp_defines.h"
#include "rtp_sender.h"
#include "rtp_utility.h"
#include "typedefs.h"
#include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_header_extension.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_sender.h"
#include "webrtc/modules/rtp_rtcp/source/rtp_utility.h"
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
#include "webrtc/typedefs.h"
namespace webrtc {
@ -78,18 +79,14 @@ class RtpSenderTest : public ::testing::Test {
protected:
RtpSenderTest()
: fake_clock_(),
rtp_sender_(new RTPSender(0, false, &fake_clock_, &transport_, NULL)),
rtp_sender_(new RTPSender(0, false, &fake_clock_, &transport_, NULL,
NULL)),
kMarkerBit(true),
kType(kRtpExtensionTransmissionTimeOffset),
packet_() {
kType(kRtpExtensionTransmissionTimeOffset) {
rtp_sender_->SetSequenceNumber(kSeqNum);
}
~RtpSenderTest() {
delete rtp_sender_;
}
FakeClockTest fake_clock_;
RTPSender* rtp_sender_;
scoped_ptr<RTPSender> rtp_sender_;
LoopbackTransportTest transport_;
const bool kMarkerBit;
RTPExtensionType kType;
@ -213,17 +210,15 @@ TEST_F(RtpSenderTest, NoTrafficSmoothing) {
EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
}
TEST_F(RtpSenderTest, TrafficSmoothing) {
rtp_sender_->SetTransmissionSmoothingStatus(true);
TEST_F(RtpSenderTest, DISABLED_TrafficSmoothing) {
// TODO(pwestin) we need to send in a pacer object.
rtp_sender_->SetStorePacketsStatus(true, 10);
EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
rtp_sender_->SetTargetSendBitrate(300000);
WebRtc_Word32 rtp_length = rtp_sender_->BuildRTPheader(packet_,
kPayload,
kMarkerBit,
kTimestamp);
// Packet should be stored in a send bucket.
EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_,
0,
@ -231,25 +226,19 @@ TEST_F(RtpSenderTest, TrafficSmoothing) {
fake_clock_.GetTimeInMS(),
kAllowRetransmission));
EXPECT_EQ(0, transport_.packets_sent_);
const int kStoredTimeInMs = 100;
fake_clock_.IncrementTime(kStoredTimeInMs);
// Process send bucket. Packet should now be sent.
rtp_sender_->ProcessSendToNetwork();
EXPECT_EQ(1, transport_.packets_sent_);
EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
// Parse sent packet.
webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(
transport_.last_sent_packet_, rtp_length);
webrtc::WebRtcRTPHeader rtp_header;
RtpHeaderExtensionMap map;
map.Register(kType, kId);
const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
ASSERT_TRUE(valid_rtp_header);
// Verify transmission time offset.
EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
}

View File

@ -1,155 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "transmission_bucket.h"
#include <assert.h>
#include "critical_section_wrapper.h"
#include "rtp_utility.h"
namespace webrtc {
namespace {
// Factor that is applied to the target bitrate to calculate the number of
// bytes that can be transmitted per interval.
// Increasing this factor will result in lower delays in cases of bitrate
// overshoots.
const float kBytesPerIntervalMargin = 1.5f;
// Time limit in ms between packets within a frame.
// A packet will be transmitted if the elapsed time since the last transmitted
// packet (for packets within same frames) has exceed this time limit.
const int kPacketLimitMs = 5;
// Time limit factor between frames.
// A packet in a new frame will be transmitted if the elapsed time since the
// last transmitted packet in the previous frame has exceeded the time
// difference for when the packets were stored, multiplied by this factor.
const float kFrameLimitFactor = 1.2f;
} // namespace
TransmissionBucket::TransmissionBucket(RtpRtcpClock* clock)
: clock_(clock),
critsect_(CriticalSectionWrapper::CreateCriticalSection()),
accumulator_(0),
bytes_rem_interval_(0),
packets_(),
last_transmitted_packet_(0, 0, 0, 0) {
}
TransmissionBucket::~TransmissionBucket() {
packets_.clear();
delete critsect_;
}
void TransmissionBucket::Reset() {
CriticalSectionScoped cs(critsect_);
accumulator_ = 0;
bytes_rem_interval_ = 0;
packets_.clear();
}
void TransmissionBucket::Fill(uint16_t seq_num,
uint32_t timestamp,
uint16_t num_bytes) {
CriticalSectionScoped cs(critsect_);
accumulator_ += num_bytes;
Packet p(seq_num, timestamp, num_bytes, clock_->GetTimeInMS());
packets_.push_back(p);
}
bool TransmissionBucket::Empty() {
CriticalSectionScoped cs(critsect_);
return packets_.empty();
}
void TransmissionBucket::UpdateBytesPerInterval(
uint32_t delta_time_ms,
uint16_t target_bitrate_kbps) {
CriticalSectionScoped cs(critsect_);
uint32_t bytes_per_interval =
kBytesPerIntervalMargin * (target_bitrate_kbps * delta_time_ms / 8);
if (bytes_rem_interval_ < 0) {
bytes_rem_interval_ += bytes_per_interval;
} else {
bytes_rem_interval_ = bytes_per_interval;
}
}
int32_t TransmissionBucket::GetNextPacket() {
CriticalSectionScoped cs(critsect_);
if (accumulator_ == 0) {
// Empty.
return -1;
}
std::vector<Packet>::const_iterator it_begin = packets_.begin();
const uint16_t num_bytes = (*it_begin).length;
const uint16_t seq_num = (*it_begin).sequence_number;
if (bytes_rem_interval_ <= 0 &&
!SameFrameAndPacketIntervalTimeElapsed(*it_begin) &&
!NewFrameAndFrameIntervalTimeElapsed(*it_begin)) {
// All bytes consumed for this interval.
return -1;
}
// Ok to transmit packet.
bytes_rem_interval_ -= num_bytes;
assert(accumulator_ >= num_bytes);
accumulator_ -= num_bytes;
last_transmitted_packet_ = packets_[0];
last_transmitted_packet_.transmitted_ms = clock_->GetTimeInMS();
packets_.erase(packets_.begin());
return seq_num;
}
bool TransmissionBucket::SameFrameAndPacketIntervalTimeElapsed(
const Packet& current_packet) {
if (last_transmitted_packet_.length == 0) {
// Not stored.
return false;
}
if (current_packet.timestamp != last_transmitted_packet_.timestamp) {
// Not same frame.
return false;
}
if ((clock_->GetTimeInMS() - last_transmitted_packet_.transmitted_ms) <
kPacketLimitMs) {
// Time has not elapsed.
return false;
}
return true;
}
bool TransmissionBucket::NewFrameAndFrameIntervalTimeElapsed(
const Packet& current_packet) {
if (last_transmitted_packet_.length == 0) {
// Not stored.
return false;
}
if (current_packet.timestamp == last_transmitted_packet_.timestamp) {
// Not a new frame.
return false;
}
if ((clock_->GetTimeInMS() - last_transmitted_packet_.transmitted_ms) <
kFrameLimitFactor *
(current_packet.stored_ms - last_transmitted_packet_.stored_ms)) {
// Time has not elapsed.
return false;
}
return true;
}
} // namespace webrtc

View File

@ -1,77 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
#define WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
#include <vector>
#include "typedefs.h"
namespace webrtc
{
class CriticalSectionWrapper;
class RtpRtcpClock;
class TransmissionBucket {
public:
TransmissionBucket(RtpRtcpClock* clock);
~TransmissionBucket();
// Resets members to initial state.
void Reset();
// Adds packet to be sent.
void Fill(uint16_t seq_num, uint32_t timestamp, uint16_t num_bytes);
// Returns true if there is no packet to be sent.
bool Empty();
// Updates the number of bytes that can be sent for the next time interval.
void UpdateBytesPerInterval(uint32_t delta_time_in_ms,
uint16_t target_bitrate_kbps);
// Checks if next packet in line can be transmitted. Returns the sequence
// number of the packet on success, -1 otherwise. The packet is removed from
// the vector on success.
int32_t GetNextPacket();
private:
struct Packet {
Packet(uint16_t seq_number,
uint32_t time_stamp,
uint16_t length_in_bytes,
int64_t now)
: sequence_number(seq_number),
timestamp(time_stamp),
length(length_in_bytes),
stored_ms(now),
transmitted_ms(0) {
}
uint16_t sequence_number;
uint32_t timestamp;
uint16_t length;
int64_t stored_ms;
int64_t transmitted_ms;
};
bool SameFrameAndPacketIntervalTimeElapsed(const Packet& current_packet);
bool NewFrameAndFrameIntervalTimeElapsed(const Packet& current_packet);
RtpRtcpClock* clock_;
CriticalSectionWrapper* critsect_;
uint32_t accumulator_;
int32_t bytes_rem_interval_;
std::vector<Packet> packets_;
Packet last_transmitted_packet_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_

View File

@ -1,131 +0,0 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file includes unit tests for the TransmissionBucket.
*/
#include <gtest/gtest.h>
#include "rtp_rtcp_defines.h"
#include "transmission_bucket.h"
#include "typedefs.h"
namespace webrtc {
// TODO(asapersson): This class has been introduced in several test files.
// Break out into a unittest helper file.
class FakeClock : public RtpRtcpClock {
public:
FakeClock() {
time_in_ms_ = 123456;
}
// Return a timestamp in milliseconds relative to some arbitrary
// source; the source is fixed for this clock.
virtual WebRtc_Word64 GetTimeInMS() {
return time_in_ms_;
}
// Retrieve an NTP absolute timestamp.
virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
secs = time_in_ms_ / 1000;
frac = (time_in_ms_ % 1000) * 4294967;
}
void IncrementTime(WebRtc_UWord32 time_increment_ms) {
time_in_ms_ += time_increment_ms;
}
private:
int64_t time_in_ms_;
};
class TransmissionBucketTest : public ::testing::Test {
protected:
TransmissionBucketTest()
: send_bucket_(new TransmissionBucket(&fake_clock_)) {
}
~TransmissionBucketTest() {
delete send_bucket_;
}
FakeClock fake_clock_;
TransmissionBucket* send_bucket_;
};
TEST_F(TransmissionBucketTest, Fill) {
EXPECT_TRUE(send_bucket_->Empty());
send_bucket_->Fill(1, 3000, 100);
EXPECT_FALSE(send_bucket_->Empty());
}
TEST_F(TransmissionBucketTest, Reset) {
send_bucket_->Fill(1, 3000, 100);
EXPECT_FALSE(send_bucket_->Empty());
send_bucket_->Reset();
EXPECT_TRUE(send_bucket_->Empty());
}
TEST_F(TransmissionBucketTest, GetNextPacket) {
EXPECT_EQ(-1, send_bucket_->GetNextPacket()); // empty
const int delta_time_ms = 1;
const int target_bitrate_kbps = 800; // 150 bytes per interval
send_bucket_->UpdateBytesPerInterval(delta_time_ms, target_bitrate_kbps);
send_bucket_->Fill(1235, 3000, 75);
send_bucket_->Fill(1236, 3000, 75);
EXPECT_EQ(1235, send_bucket_->GetNextPacket()); // ok
EXPECT_EQ(1236, send_bucket_->GetNextPacket()); // ok
EXPECT_TRUE(send_bucket_->Empty());
send_bucket_->Fill(1237, 3000, 75);
EXPECT_EQ(-1, send_bucket_->GetNextPacket()); // packet does not fit
}
TEST_F(TransmissionBucketTest, SameFrameAndPacketIntervalTimeElapsed) {
const int delta_time_ms = 1;
const int target_bitrate_kbps = 800; // 150 bytes per interval
send_bucket_->UpdateBytesPerInterval(delta_time_ms, target_bitrate_kbps);
send_bucket_->Fill(1235, 3000, 75);
send_bucket_->Fill(1236, 3000, 75);
EXPECT_EQ(1235, send_bucket_->GetNextPacket()); // ok
EXPECT_EQ(1236, send_bucket_->GetNextPacket()); // ok
EXPECT_TRUE(send_bucket_->Empty());
fake_clock_.IncrementTime(4);
send_bucket_->Fill(1237, 3000, 75);
EXPECT_EQ(-1, send_bucket_->GetNextPacket()); // packet does not fit
// Time limit (5ms) elapsed.
fake_clock_.IncrementTime(1);
EXPECT_EQ(1237, send_bucket_->GetNextPacket());
}
TEST_F(TransmissionBucketTest, NewFrameAndFrameIntervalTimeElapsed) {
const int delta_time_ms = 1;
const int target_bitrate_kbps = 800; // 150 bytes per interval
send_bucket_->UpdateBytesPerInterval(delta_time_ms, target_bitrate_kbps);
send_bucket_->Fill(1235, 3000, 75);
send_bucket_->Fill(1236, 3000, 75);
EXPECT_EQ(1235, send_bucket_->GetNextPacket()); // ok
EXPECT_EQ(1236, send_bucket_->GetNextPacket()); // ok
EXPECT_TRUE(send_bucket_->Empty());
fake_clock_.IncrementTime(4);
send_bucket_->Fill(1237, 6000, 75);
EXPECT_EQ(-1, send_bucket_->GetNextPacket()); // packet does not fit
// Time limit elapsed (4*1.2=4.8ms).
fake_clock_.IncrementTime(1);
EXPECT_EQ(1237, send_bucket_->GetNextPacket());
}
} // namespace webrtc

View File

@ -91,12 +91,6 @@ TEST_F(RtpRtcpAPITest, CSRC) {
EXPECT_EQ(test_CSRC[1], testOfCSRC[1]);
}
TEST_F(RtpRtcpAPITest, TrafficSmoothing) {
EXPECT_FALSE(module->TransmissionSmoothingStatus());
module->SetTransmissionSmoothingStatus(true);
EXPECT_TRUE(module->TransmissionSmoothingStatus());
}
TEST_F(RtpRtcpAPITest, RTCP) {
EXPECT_EQ(kRtcpOff, module->RTCP());
EXPECT_EQ(0, module->SetRTCPStatus(kRtcpCompound));

View File

@ -14,6 +14,7 @@
#include <vector>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/pacing/include/paced_sender.h"
#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "modules/udp_transport/interface/udp_transport.h"
#include "modules/utility/interface/process_thread.h"
@ -41,6 +42,7 @@ ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
RtcpIntraFrameObserver* intra_frame_observer,
RtcpBandwidthObserver* bandwidth_observer,
RemoteBitrateEstimator* remote_bitrate_estimator,
PacedSender* paced_sender,
RtpRtcp* default_rtp_rtcp,
bool sender)
: ViEFrameProviderBase(channel_id, engine_id),
@ -67,6 +69,7 @@ ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
rtcp_observer_(NULL),
networkObserver_(NULL),
intra_frame_observer_(intra_frame_observer),
paced_sender_(paced_sender),
bandwidth_observer_(bandwidth_observer),
rtp_packet_timeout_(false),
send_timestamp_extension_id_(kInvalidRtpExtensionId),
@ -97,6 +100,7 @@ ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
configuration.intra_frame_callback = intra_frame_observer;
configuration.bandwidth_callback = bandwidth_observer;
configuration.remote_bitrate_estimator = remote_bitrate_estimator;
configuration.paced_sender = paced_sender;
rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
vie_receiver_.SetRtpRtcpModule(rtp_rtcp_.get());
@ -126,7 +130,12 @@ WebRtc_Word32 ViEChannel::Init() {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::SetRTCPStatus failure", __FUNCTION__);
}
if (paced_sender_) {
if (rtp_rtcp_->SetStorePacketsStatus(true, kNackHistorySize) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s:SetStorePacketsStatus failure", __FUNCTION__);
}
}
// VCM initialization
if (vcm_.InitializeReceiver() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
@ -231,7 +240,6 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
rtp_rtcp_->SetSendingStatus(false);
}
NACKMethod nack_method = rtp_rtcp_->NACK();
bool transmission_smoothening = rtp_rtcp_->TransmissionSmoothingStatus();
bool fec_enabled = false;
WebRtc_UWord8 payload_type_red;
@ -254,6 +262,7 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
configuration.outgoing_transport = &vie_sender_;
configuration.intra_frame_callback = intra_frame_observer_;
configuration.bandwidth_callback = bandwidth_observer_.get();
configuration.paced_sender = paced_sender_;
RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
@ -266,6 +275,8 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
if (nack_method != kNackOff) {
rtp_rtcp->SetStorePacketsStatus(true, kNackHistorySize);
rtp_rtcp->SetNACKStatus(nack_method);
} else if (paced_sender_) {
rtp_rtcp->SetStorePacketsStatus(true, kNackHistorySize);
}
if (fec_enabled) {
rtp_rtcp->SetGenericFECStatus(fec_enabled, payload_type_red,
@ -301,7 +312,6 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
if (mtu_ != 0) {
rtp_rtcp->SetMaxTransferUnit(mtu_);
}
rtp_rtcp->SetTransmissionSmoothingStatus(transmission_smoothening);
if (restart_rtp) {
rtp_rtcp->SetSendingStatus(true);
}
@ -602,8 +612,8 @@ WebRtc_Word32 ViEChannel::ProcessNACKRequest(const bool enable) {
it != simulcast_rtp_rtcp_.end();
it++) {
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->SetStorePacketsStatus(true, kNackHistorySize);
rtp_rtcp->SetNACKStatus(nackMethod);
rtp_rtcp->SetStorePacketsStatus(true, kNackHistorySize);
}
} else {
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
@ -611,11 +621,15 @@ WebRtc_Word32 ViEChannel::ProcessNACKRequest(const bool enable) {
it != simulcast_rtp_rtcp_.end();
it++) {
RtpRtcp* rtp_rtcp = *it;
rtp_rtcp->SetStorePacketsStatus(false);
if (paced_sender_ == NULL) {
rtp_rtcp->SetStorePacketsStatus(false, 0);
}
rtp_rtcp->SetNACKStatus(kNackOff);
}
rtp_rtcp_->SetStorePacketsStatus(false);
vcm_.RegisterPacketRequestCallback(NULL);
if (paced_sender_ == NULL) {
rtp_rtcp_->SetStorePacketsStatus(false, 0);
}
if (rtp_rtcp_->SetNACKStatus(kNackOff) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: Could not turn off NACK", __FUNCTION__);
@ -738,12 +752,8 @@ int ViEChannel::SetReceiveTimestampOffsetStatus(bool enable, int id) {
}
void ViEChannel::SetTransmissionSmoothingStatus(bool enable) {
CriticalSectionScoped cs(rtp_rtcp_cs_.get());
rtp_rtcp_->SetTransmissionSmoothingStatus(enable);
for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
it != simulcast_rtp_rtcp_.end(); ++it) {
(*it)->SetTransmissionSmoothingStatus(enable);
}
assert(paced_sender_ && "No paced sender registered.");
paced_sender_->SetStatus(enable);
}
WebRtc_Word32 ViEChannel::EnableTMMBR(const bool enable) {

View File

@ -33,6 +33,7 @@ namespace webrtc {
class CriticalSectionWrapper;
class Encryption;
class PacedSender;
class ProcessThread;
class RtpRtcp;
class ThreadWrapper;
@ -63,6 +64,7 @@ class ViEChannel
RtcpIntraFrameObserver* intra_frame_observer,
RtcpBandwidthObserver* bandwidth_observer,
RemoteBitrateEstimator* remote_bitrate_estimator,
PacedSender* paced_sender,
RtpRtcp* default_rtp_rtcp,
bool sender);
~ViEChannel();
@ -380,6 +382,8 @@ class ViEChannel
ViERTCPObserver* rtcp_observer_;
ViENetworkObserver* networkObserver_;
RtcpIntraFrameObserver* intra_frame_observer_;
PacedSender* paced_sender_;
scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
bool rtp_packet_timeout_;
int send_timestamp_extension_id_;

View File

@ -142,14 +142,11 @@ int ViEChannelManager::CreateChannel(int* channel_id,
if (!channel_group) {
return -1;
}
int new_channel_id = FreeChannelId();
if (new_channel_id == -1) {
return -1;
}
BitrateController* bitrate_controller = channel_group->GetBitrateController();
RtcpBandwidthObserver* bandwidth_observer =
bitrate_controller->CreateRtcpBandwidthObserver();
RemoteBitrateEstimator* remote_bitrate_estimator =
@ -164,10 +161,12 @@ int ViEChannelManager::CreateChannel(int* channel_id,
*module_process_thread_,
bitrate_controller);
if (!(vie_encoder->Init() &&
CreateChannelObject(
new_channel_id, vie_encoder, bandwidth_observer,
remote_bitrate_estimator,
encoder_state_feedback->GetRtcpIntraFrameObserver(), sender))) {
CreateChannelObject(
new_channel_id,
vie_encoder,
bandwidth_observer,
remote_bitrate_estimator,
encoder_state_feedback->GetRtcpIntraFrameObserver(), sender))) {
delete vie_encoder;
vie_encoder = NULL;
}
@ -180,18 +179,19 @@ int ViEChannelManager::CreateChannel(int* channel_id,
vie_encoder = ViEEncoderPtr(original_channel);
assert(vie_encoder);
if (!CreateChannelObject(
new_channel_id, vie_encoder, bandwidth_observer,
new_channel_id,
vie_encoder,
bandwidth_observer,
remote_bitrate_estimator,
encoder_state_feedback->GetRtcpIntraFrameObserver(), sender)) {
encoder_state_feedback->GetRtcpIntraFrameObserver(),
sender)) {
vie_encoder = NULL;
}
}
if (!vie_encoder) {
ReturnChannelId(new_channel_id);
return -1;
}
*channel_id = new_channel_id;
channel_group->AddChannel(*channel_id);
return 0;
@ -401,6 +401,8 @@ bool ViEChannelManager::CreateChannelObject(
RemoteBitrateEstimator* remote_bitrate_estimator,
RtcpIntraFrameObserver* intra_frame_observer,
bool sender) {
PacedSender* paced_sender = vie_encoder->GetPacedSender();
// Register the channel at the encoder.
RtpRtcp* send_rtp_rtcp_module = vie_encoder->SendRtpRtcpModule();
@ -410,6 +412,7 @@ bool ViEChannelManager::CreateChannelObject(
intra_frame_observer,
bandwidth_observer,
remote_bitrate_estimator,
paced_sender,
send_rtp_rtcp_module,
sender);
if (vie_channel->Init() != 0) {

View File

@ -84,7 +84,8 @@ class ViEChannelManager: private ViEManagerBase {
private:
// Creates a channel object connected to |vie_encoder|. Assumed to be called
// protected.
bool CreateChannelObject(int channel_id, ViEEncoder* vie_encoder,
bool CreateChannelObject(int channel_id,
ViEEncoder* vie_encoder,
RtcpBandwidthObserver* bandwidth_observer,
RemoteBitrateEstimator* remote_bitrate_estimator,
RtcpIntraFrameObserver* intra_frame_observer,

View File

@ -13,6 +13,7 @@
#include <cassert>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/pacing/include/paced_sender.h"
#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
#include "modules/utility/interface/process_thread.h"
#include "modules/video_coding/codecs/interface/video_codec_interface.h"
@ -27,6 +28,9 @@
namespace webrtc {
// Pace in kbits/s until we receive first estimate.
const int kInitialPace = 2000;
class QMVideoSettingsCallback : public VCMQMSettingsCallback {
public:
explicit QMVideoSettingsCallback(VideoProcessingModule* vpm);
@ -56,6 +60,22 @@ class ViEBitrateObserver : public BitrateObserver {
ViEEncoder* owner_;
};
class ViEPacedSenderCallback : public PacedSender::Callback {
public:
explicit ViEPacedSenderCallback(ViEEncoder* owner)
: owner_(owner) {
}
virtual void TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms) {
owner_->TimeToSendPacket(ssrc, sequence_number, capture_time_ms);
}
virtual void TimeToSendPadding(int /*bytes*/) {
// TODO(pwestin): Hook up this.
}
private:
ViEEncoder* owner_;
};
ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id,
WebRtc_Word32 channel_id,
WebRtc_UWord32 number_of_cores,
@ -97,6 +117,8 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id,
default_rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
bitrate_observer_.reset(new ViEBitrateObserver(this));
pacing_callback_.reset(new ViEPacedSenderCallback(this));
paced_sender_.reset(new PacedSender(pacing_callback_.get(), kInitialPace));
}
bool ViEEncoder::Init() {
@ -111,19 +133,14 @@ bool ViEEncoder::Init() {
// Enable/disable content analysis: off by default for now.
vpm_.EnableContentAnalysis(false);
if (module_process_thread_.RegisterModule(&vcm_) != 0) {
if (module_process_thread_.RegisterModule(&vcm_) != 0 ||
module_process_thread_.RegisterModule(default_rtp_rtcp_.get()) != 0 ||
module_process_thread_.RegisterModule(paced_sender_.get()) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterModule failure", __FUNCTION__);
return false;
}
if (module_process_thread_.RegisterModule(default_rtp_rtcp_.get()) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterModule failure", __FUNCTION__);
return false;
}
if (qm_callback_) {
delete qm_callback_;
}
@ -196,6 +213,7 @@ ViEEncoder::~ViEEncoder() {
module_process_thread_.DeRegisterModule(&vcm_);
module_process_thread_.DeRegisterModule(&vpm_);
module_process_thread_.DeRegisterModule(default_rtp_rtcp_.get());
module_process_thread_.DeRegisterModule(paced_sender_.get());
delete &vcm_;
delete &vpm_;
delete qm_callback_;
@ -415,6 +433,11 @@ WebRtc_Word32 ViEEncoder::ScaleInputImage(bool enable) {
return 0;
}
void ViEEncoder::TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms) {
default_rtp_rtcp_->TimeToSendPacket(ssrc, sequence_number, capture_time_ms);
}
RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
@ -878,12 +901,16 @@ void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
"%s(bitrate_bps: %u, fraction_lost: %u, rtt_ms: %u",
__FUNCTION__, bitrate_bps, fraction_lost, round_trip_time_ms);
vcm_.SetChannelParameters(bitrate_bps / 1000, fraction_lost,
round_trip_time_ms);
int bitrate_kbps = bitrate_bps / 1000;
vcm_.SetChannelParameters(bitrate_kbps, fraction_lost, round_trip_time_ms);
paced_sender_->UpdateBitrate(bitrate_kbps);
default_rtp_rtcp_->SetTargetSendBitrate(bitrate_bps);
}
PacedSender* ViEEncoder::GetPacedSender() {
return paced_sender_.get();
}
WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
CriticalSectionScoped cs(callback_cs_.get());
if (effect_filter == NULL) {

View File

@ -28,6 +28,7 @@
namespace webrtc {
class CriticalSectionWrapper;
class PacedSender;
class ProcessThread;
class QMVideoSettingsCallback;
class RtpRtcp;
@ -35,6 +36,7 @@ class VideoCodingModule;
class ViEBitrateObserver;
class ViEEffectFilter;
class ViEEncoderObserver;
class ViEPacedSenderCallback;
class ViEEncoder
: public RtcpIntraFrameObserver,
@ -44,6 +46,7 @@ class ViEEncoder
public ViEFrameCallback {
public:
friend class ViEBitrateObserver;
friend class ViEPacedSenderCallback;
ViEEncoder(WebRtc_Word32 engine_id,
WebRtc_Word32 channel_id,
@ -76,6 +79,8 @@ class ViEEncoder
unsigned char config_parameters[kConfigParameterSize],
unsigned char& config_parameters_size);
PacedSender* GetPacedSender();
// Scale or crop/pad image.
WebRtc_Word32 ScaleInputImage(bool enable);
@ -159,6 +164,10 @@ class ViEEncoder
const uint8_t fraction_lost,
const uint32_t round_trip_time_ms);
// Called by PacedSender.
void TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms);
private:
WebRtc_Word32 engine_id_;
const int channel_id_;
@ -170,6 +179,8 @@ class ViEEncoder
scoped_ptr<CriticalSectionWrapper> callback_cs_;
scoped_ptr<CriticalSectionWrapper> data_cs_;
scoped_ptr<BitrateObserver> bitrate_observer_;
scoped_ptr<PacedSender> paced_sender_;
scoped_ptr<ViEPacedSenderCallback> pacing_callback_;
BitrateController* bitrate_controller_;