Add interface to signal a network down event.

- In real-time mode encoding will be paused until the network is back up.
- In buffering mode the encoder will keep encoding, and packets will be
  buffered at the sender. When the buffer grows above the target delay
  encoding will be paused.
- Fixes a couple of issues related to pacing which was found with the new test.
- Introduces different max bitrates for pacing and for encoding. This allows
  the pacer to faster get rid of the queue after a network down event.

(Work based on issue 1237004)

BUG=1524
TESTS=trybots,vie_auto_test

Review URL: https://webrtc-codereview.appspot.com/1258004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3730 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
stefan@webrtc.org 2013-03-27 16:36:01 +00:00
parent 686001dd96
commit bfacda60be
14 changed files with 263 additions and 61 deletions

View File

@ -64,6 +64,9 @@ class PacedSender : public Module {
bool SendPacket(Priority priority, uint32_t ssrc, uint16_t sequence_number,
int64_t capture_time_ms, int bytes);
// Returns the time since the oldest queued packet was captured.
int QueueInMs() const;
// Returns the number of milliseconds until the module want a worker thread
// to call Process.
virtual int32_t TimeUntilNextProcess();
@ -85,6 +88,9 @@ class PacedSender : public Module {
int64_t capture_time_ms_;
int bytes_;
};
typedef std::list<Packet> PacketList;
// Checks if next packet in line can be transmitted. Returns true on success.
bool GetNextPacket(uint32_t* ssrc, uint16_t* sequence_number,
int64_t* capture_time_ms);
@ -109,9 +115,9 @@ class PacedSender : public Module {
TickTime time_last_update_;
TickTime time_last_send_;
std::list<Packet> high_priority_packets_;
std::list<Packet> normal_priority_packets_;
std::list<Packet> low_priority_packets_;
PacketList high_priority_packets_;
PacketList normal_priority_packets_;
PacketList low_priority_packets_;
};
} // namespace webrtc
#endif // WEBRTC_MODULES_PACED_SENDER_H_

View File

@ -82,6 +82,9 @@ bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
UpdateState(bytes);
return true; // We can send now.
}
if (capture_time_ms < 0) {
capture_time_ms = TickTime::MillisecondTimestamp();
}
if (paused_) {
// Queue all packets when we are paused.
switch (priority) {
@ -135,6 +138,28 @@ bool PacedSender::SendPacket(Priority priority, uint32_t ssrc,
return false;
}
int PacedSender::QueueInMs() const {
CriticalSectionScoped cs(critsect_.get());
int64_t now_ms = TickTime::MillisecondTimestamp();
int64_t oldest_packet_capture_time = now_ms;
if (!high_priority_packets_.empty()) {
oldest_packet_capture_time = std::min(
oldest_packet_capture_time,
high_priority_packets_.front().capture_time_ms_);
}
if (!normal_priority_packets_.empty()) {
oldest_packet_capture_time = std::min(
oldest_packet_capture_time,
normal_priority_packets_.front().capture_time_ms_);
}
if (!low_priority_packets_.empty()) {
oldest_packet_capture_time = std::min(
oldest_packet_capture_time,
low_priority_packets_.front().capture_time_ms_);
}
return now_ms - oldest_packet_capture_time;
}
int32_t PacedSender::TimeUntilNextProcess() {
CriticalSectionScoped cs(critsect_.get());
int64_t elapsed_time_ms =

View File

@ -185,8 +185,11 @@ TEST_F(PacedSenderTest, Pause) {
uint32_t ssrc_low_priority = 12345;
uint32_t ssrc = 12346;
uint16_t sequence_number = 1234;
int64_t capture_time_ms = 56789;
int64_t second_capture_time_ms = 67890;
int64_t capture_time_ms = TickTime::MillisecondTimestamp();
TickTime::AdvanceFakeClock(10000);
int64_t second_capture_time_ms = TickTime::MillisecondTimestamp();
EXPECT_EQ(0, send_bucket_->QueueInMs());
// Due to the multiplicative factor we can send 3 packets not 2 packets.
EXPECT_TRUE(send_bucket_->SendPacket(PacedSender::kLowPriority,
@ -208,6 +211,9 @@ TEST_F(PacedSenderTest, Pause) {
EXPECT_FALSE(send_bucket_->SendPacket(PacedSender::kHighPriority,
ssrc, sequence_number++, capture_time_ms, 250));
EXPECT_EQ(TickTime::MillisecondTimestamp() - capture_time_ms,
send_bucket_->QueueInMs());
// Expect no packet to come out while paused.
EXPECT_CALL(callback_, TimeToSendPadding(_)).Times(0);
EXPECT_CALL(callback_, TimeToSendPacket(_, _, _)).Times(0);
@ -235,6 +241,7 @@ TEST_F(PacedSenderTest, Pause) {
TickTime::AdvanceFakeClock(5);
EXPECT_EQ(0, send_bucket_->TimeUntilNextProcess());
EXPECT_EQ(0, send_bucket_->Process());
EXPECT_EQ(0, send_bucket_->QueueInMs());
}
} // namespace test

View File

@ -987,15 +987,25 @@ void ModuleRtpRtcpImpl::TimeToSendPacket(uint32_t ssrc,
(*it)->rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms);
return;
}
it++;
++it;
}
} else {
bool have_child_modules(child_modules_.empty() ? false : true);
bool have_child_modules = !child_modules_.empty();
if (!have_child_modules) {
// Don't send from default module.
if (SendingMedia() && ssrc == rtp_sender_.SSRC()) {
rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms);
}
} else {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
if ((*it)->SendingMedia() && ssrc == (*it)->rtp_sender_.SSRC()) {
(*it)->rtp_sender_.TimeToSendPacket(sequence_number, capture_time_ms);
return;
}
++it;
}
}
}
}

View File

@ -673,7 +673,7 @@ WebRtc_Word32 RTPSender::SendToNetwork(
}
}
if (paced_sender_) {
if (paced_sender_ && storage != kDontStore) {
if (!paced_sender_->SendPacket(
PacedSender::kNormalPriority, rtp_header.header.ssrc,
rtp_header.header.sequenceNumber, capture_time_ms,

View File

@ -205,7 +205,7 @@ RTPSenderVideo::SendRTPIntraRequest()
ModuleRTPUtility::AssignUWord32ToBuffer(data+4, _rtpSender.SSRC());
return _rtpSender.SendToNetwork(data, 0, length, -1, kAllowRetransmission);
return _rtpSender.SendToNetwork(data, 0, length, -1, kDontStore);
}
WebRtc_Word32

View File

@ -94,6 +94,12 @@ VCMMediaOptimization::SetTargetRates(WebRtc_UWord32 target_bitrate,
WebRtc_UWord8 &fractionLost,
WebRtc_UWord32 roundTripTimeMs)
{
// TODO(holmer): Consider putting this threshold only on the video bitrate,
// and not on protection.
if (_maxBitRate > 0 &&
target_bitrate > static_cast<uint32_t>(_maxBitRate)) {
target_bitrate = _maxBitRate;
}
VCMProtectionMethod *selectedMethod = _lossProtLogic->SelectedMethod();
float target_bitrate_kbps = static_cast<float>(target_bitrate) / 1000.0f;
_lossProtLogic->UpdateBitRate(target_bitrate_kbps);

View File

@ -47,50 +47,47 @@ VideoCodingModuleImpl::VideoCodingModuleImpl(const WebRtc_Word32 id,
Clock* clock,
EventFactory* event_factory,
bool owns_event_factory)
:
_id(id),
clock_(clock),
_receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_receiverInited(false),
_timing(clock_, id, 1),
_dualTiming(clock_, id, 2, &_timing),
_receiver(&_timing, clock_, event_factory, id, 1, true),
_dualReceiver(&_dualTiming, clock_, event_factory, id, 2, false),
_decodedFrameCallback(_timing, clock_),
_dualDecodedFrameCallback(_dualTiming, clock_),
_frameTypeCallback(NULL),
_frameStorageCallback(NULL),
_receiveStatsCallback(NULL),
_packetRequestCallback(NULL),
_decoder(NULL),
_dualDecoder(NULL),
: _id(id),
clock_(clock),
_receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_receiverInited(false),
_timing(clock_, id, 1),
_dualTiming(clock_, id, 2, &_timing),
_receiver(&_timing, clock_, event_factory, id, 1, true),
_dualReceiver(&_dualTiming, clock_, event_factory, id, 2, false),
_decodedFrameCallback(_timing, clock_),
_dualDecodedFrameCallback(_dualTiming, clock_),
_frameTypeCallback(NULL),
_frameStorageCallback(NULL),
_receiveStatsCallback(NULL),
_packetRequestCallback(NULL),
_decoder(NULL),
_dualDecoder(NULL),
#ifdef DEBUG_DECODER_BIT_STREAM
_bitStreamBeforeDecoder(NULL),
_bitStreamBeforeDecoder(NULL),
#endif
_frameFromFile(),
_keyRequestMode(kKeyOnError),
_scheduleKeyRequest(false),
max_nack_list_size_(0),
_sendCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_encoder(),
_encodedFrameCallback(),
_nextFrameTypes(1, kVideoFrameDelta),
_mediaOpt(id, clock_),
_sendCodecType(kVideoCodecUnknown),
_sendStatsCallback(NULL),
_encoderInputFile(NULL),
_codecDataBase(id),
_receiveStatsTimer(1000, clock_),
_sendStatsTimer(1000, clock_),
_retransmissionTimer(10, clock_),
_keyRequestTimer(500, clock_),
event_factory_(event_factory),
owns_event_factory_(owns_event_factory)
{
assert(clock_);
_frameFromFile(),
_keyRequestMode(kKeyOnError),
_scheduleKeyRequest(false),
max_nack_list_size_(0),
_sendCritSect(CriticalSectionWrapper::CreateCriticalSection()),
_encoder(),
_encodedFrameCallback(),
_nextFrameTypes(1, kVideoFrameDelta),
_mediaOpt(id, clock_),
_sendCodecType(kVideoCodecUnknown),
_sendStatsCallback(NULL),
_encoderInputFile(NULL),
_codecDataBase(id),
_receiveStatsTimer(1000, clock_),
_sendStatsTimer(1000, clock_),
_retransmissionTimer(10, clock_),
_keyRequestTimer(500, clock_),
event_factory_(event_factory),
owns_event_factory_(owns_event_factory) {
assert(clock_);
#ifdef DEBUG_DECODER_BIT_STREAM
_bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
_bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
#endif
}

View File

@ -19,7 +19,7 @@
// - Packet timeout notification.
// - DeadorAlive connection observations.
#include "common_types.h"
#include "webrtc/common_types.h"
namespace webrtc {
@ -65,6 +65,11 @@ class WEBRTC_DLLEXPORT ViENetwork {
// for all sub-API:s before the VideoEngine object can be safely deleted.
virtual int Release() = 0;
// Inform the engine about if the network adapter is currently transmitting
// packets or not.
virtual void SetNetworkTransmissionState(const int video_channel,
const bool is_transmitting) = 0;
// Specifies the ports to receive RTP packets on. It is also possible to set
// port for RTCP and local IP address.
virtual int SetLocalReceiver(const int video_channel,

View File

@ -365,6 +365,81 @@ void ViEAutoTest::ViERtpRtcpStandardTest()
// short above?
// EXPECT_LT(inEndPos, outEndPos + 100);
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
ViETest::Log("Testing Network Down...\n");
EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.rtp_rtcp->SetTransmissionSmoothingStatus(
tbChannel.videoChannel, true));
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
// Real-time mode.
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
EXPECT_GT(sentTotalBitrate, 0u);
// Simulate lost reception and verify that nothing is sent during that time.
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, false);
ViETest::Log("Network Down...\n");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
EXPECT_EQ(sentTotalBitrate, 0u);
// Network reception back. Video should now be sent.
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, true);
ViETest::Log("Network Up...\n");
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
EXPECT_GT(sentTotalBitrate, 0u);
// Buffering mode.
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
ViE.rtp_rtcp->SetSenderBufferingMode(tbChannel.videoChannel,
kAutoTestSleepTimeMs / 2);
// Add extra delay to the receiver to make sure it doesn't flush due to
// too old packets being received (as the down-time introduced is longer
// than what we buffer at the sender).
ViE.rtp_rtcp->SetReceiverBufferingMode(tbChannel.videoChannel,
3 * kAutoTestSleepTimeMs / 2);
EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
EXPECT_GT(sentTotalBitrate, 0u);
// Simulate lost reception and verify that nothing is sent during that time.
ViETest::Log("Network Down...\n");
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, false);
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
EXPECT_EQ(sentTotalBitrate, 0u);
// Network reception back. Video should now be sent.
ViETest::Log("Network Up...\n");
ViE.network->SetNetworkTransmissionState(tbChannel.videoChannel, true);
AutoTestSleep(kAutoTestSleepTimeMs);
EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
sentFecBitrate, sentNackBitrate));
EXPECT_GT(sentTotalBitrate, 0u);
// TODO(holmer): Verify that the decoded framerate doesn't decrease on an
// outage when in buffering mode. This isn't currently possible because we
// don't have an API to get decoded framerate.
EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
// Deregister external transport
EXPECT_EQ(0, ViE.network->DeregisterSendTransport(tbChannel.videoChannel));

View File

@ -30,7 +30,12 @@
namespace webrtc {
// Pace in kbits/s until we receive first estimate.
const int kInitialPace = 2000;
static const int kInitialPace = 2000;
// Allow packets to be transmitted in up to 2 times max video bitrate if the
// bandwidth estimate allows it.
// TODO(holmer): Expose transmission start, min and max bitrates in the
// VideoEngine API and remove the kTransmissionMaxBitrateMultiplier.
static const int kTransmissionMaxBitrateMultiplier = 2;
class QMVideoSettingsCallback : public VCMQMSettingsCallback {
public:
@ -93,7 +98,9 @@ ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id,
callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
bitrate_controller_(bitrate_controller),
paused_(false),
target_delay_ms_(0),
network_is_transmitting_(true),
encoder_paused_(false),
channels_dropping_delta_frames_(0),
drop_next_frame_(false),
fec_enabled_(false),
@ -227,12 +234,28 @@ int ViEEncoder::Owner() const {
return channel_id_;
}
void ViEEncoder::SetNetworkTransmissionState(bool is_transmitting) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s(%s)", __FUNCTION__,
is_transmitting ? "transmitting" : "not transmitting");
{
CriticalSectionScoped cs(data_cs_.get());
network_is_transmitting_ = is_transmitting;
}
if (is_transmitting) {
paced_sender_->Resume();
} else {
paced_sender_->Pause();
}
}
void ViEEncoder::Pause() {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
CriticalSectionScoped cs(data_cs_.get());
paused_ = true;
encoder_paused_ = true;
}
void ViEEncoder::Restart() {
@ -240,7 +263,7 @@ void ViEEncoder::Restart() {
ViEId(engine_id_, channel_id_),
"%s", __FUNCTION__);
CriticalSectionScoped cs(data_cs_.get());
paused_ = false;
encoder_paused_ = false;
}
WebRtc_Word32 ViEEncoder::DropDeltaAfterKey(bool enable) {
@ -386,6 +409,7 @@ WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
bitrate_controller_->SetBitrateObserver(bitrate_observer_.get(),
video_codec.startBitrate * 1000,
video_codec.minBitrate * 1000,
kTransmissionMaxBitrateMultiplier *
video_codec.maxBitrate * 1000);
return 0;
@ -446,6 +470,15 @@ void ViEEncoder::TimeToSendPacket(uint32_t ssrc, uint16_t sequence_number,
default_rtp_rtcp_->TimeToSendPacket(ssrc, sequence_number, capture_time_ms);
}
bool ViEEncoder::EncoderPaused() const {
// Pause video if paused by caller or as long as the network is down and the
// pacer queue has grown too large.
const bool max_send_buffer_reached =
paced_sender_->QueueInMs() >= target_delay_ms_;
return encoder_paused_ ||
(!network_is_transmitting_ && max_send_buffer_reached);
}
RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
@ -464,7 +497,7 @@ void ViEEncoder::DeliverFrame(int id,
video_frame->timestamp());
{
CriticalSectionScoped cs(data_cs_.get());
if (paused_ || default_rtp_rtcp_->SendingMedia() == false) {
if (EncoderPaused() || default_rtp_rtcp_->SendingMedia() == false) {
// We've paused or we have no channels attached, don't encode.
return;
}
@ -708,10 +741,14 @@ WebRtc_Word32 ViEEncoder::UpdateProtectionMethod() {
}
void ViEEncoder::SetSenderBufferingMode(int target_delay_ms) {
{
CriticalSectionScoped cs(data_cs_.get());
target_delay_ms_ = target_delay_ms;
}
if (target_delay_ms > 0) {
// Disable external frame-droppers.
vcm_.EnableFrameDropper(false);
vpm_.EnableTemporalDecimation(false);
// Disable external frame-droppers.
vcm_.EnableFrameDropper(false);
vpm_.EnableTemporalDecimation(false);
} else {
// Real-time mode - enable frame droppers.
vpm_.EnableTemporalDecimation(true);
@ -730,7 +767,7 @@ WebRtc_Word32 ViEEncoder::SendData(
const RTPVideoHeader* rtp_video_hdr) {
{
CriticalSectionScoped cs(data_cs_.get());
if (paused_) {
if (EncoderPaused()) {
// Paused, don't send this packet.
return 0;
}

View File

@ -57,6 +57,8 @@ class ViEEncoder
bool Init();
void SetNetworkTransmissionState(bool is_transmitting);
// Returns the id of the owning channel.
int Owner() const;
@ -173,6 +175,8 @@ class ViEEncoder
int64_t capture_time_ms);
private:
bool EncoderPaused() const;
WebRtc_Word32 engine_id_;
const int channel_id_;
const WebRtc_UWord32 number_of_cores_;
@ -188,7 +192,9 @@ class ViEEncoder
BitrateController* bitrate_controller_;
bool paused_;
int target_delay_ms_;
bool network_is_transmitting_;
bool encoder_paused_;
std::map<unsigned int, int64_t> time_last_intra_request_ms_;
WebRtc_Word32 channels_dropping_delta_frames_;
bool drop_next_frame_;

View File

@ -60,6 +60,32 @@ int ViENetworkImpl::Release() {
return ref_count;
}
void ViENetworkImpl::SetNetworkTransmissionState(const int video_channel,
const bool is_transmitting) {
WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"%s(event: Network %s)", __FUNCTION__,
is_transmitting ? "transmitting" : "not transmitting");
if (!shared_data_->Initialized()) {
shared_data_->SetLastError(kViENotInitialized);
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
"%s - ViE instance %d not initialized", __FUNCTION__,
shared_data_->instance_id());
return;
}
ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
ViEEncoder* vie_encoder = cs.Encoder(video_channel);
if (!vie_encoder) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
ViEId(shared_data_->instance_id(), video_channel),
"An encoder doesn't exist for this channel");
shared_data_->SetLastError(kViENetworkInvalidChannelId);
return;
}
vie_encoder->SetNetworkTransmissionState(is_transmitting);
}
ViENetworkImpl::ViENetworkImpl(ViESharedData* shared_data)
: shared_data_(shared_data) {
WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),

View File

@ -25,6 +25,8 @@ class ViENetworkImpl
public:
// Implements ViENetwork.
virtual int Release();
virtual void SetNetworkTransmissionState(const int video_channel,
const bool is_transmitting);
virtual int SetLocalReceiver(const int video_channel,
const uint16_t rtp_port,
const uint16_t rtcp_port,