Pass capture time (wallclock) to the RTP sender to compute transmission offset

- Change how the transmission offset is calculated, to
  incorporate the time since the frame was captured.
- Break out RtpRtcpClock and move it to system_wrappers.
- Use RtpRtcpClock to set the capture time in ms in the capture module.
  We must use the same clock as in the RTP module to be able to measure
  the time from capture until transmission.
- Enables the RTP header extension for packet transmission time offsets.

BUG=
TEST=trybots

Review URL: https://webrtc-codereview.appspot.com/666006

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2489 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
stefan@webrtc.org 2012-07-03 13:21:22 +00:00
parent 1853005f37
commit ddfdfed3b5
36 changed files with 256 additions and 92 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -29,17 +29,29 @@ enum VideoFrameType
class RawImage
{
public:
RawImage() : _width(0), _height(0), _timeStamp(0), _buffer(NULL),
_length(0), _size(0) {}
RawImage()
: _width(0),
_height(0),
_timeStamp(0),
capture_time_ms_(0),
_buffer(NULL),
_length(0),
_size(0) {}
RawImage(WebRtc_UWord8* buffer, WebRtc_UWord32 length,
WebRtc_UWord32 size) :
_width(0), _height(0), _timeStamp(0),
_buffer(buffer), _length(length), _size(size) {}
WebRtc_UWord32 size)
: _width(0),
_height(0),
_timeStamp(0),
capture_time_ms_(0),
_buffer(buffer),
_length(length),
_size(size) {}
WebRtc_UWord32 _width;
WebRtc_UWord32 _height;
WebRtc_UWord32 _timeStamp;
int64_t capture_time_ms_;
WebRtc_UWord8* _buffer;
WebRtc_UWord32 _length;
WebRtc_UWord32 _size;
@ -48,21 +60,34 @@ public:
class EncodedImage
{
public:
EncodedImage() :
_encodedWidth(0), _encodedHeight(0), _timeStamp(0),
_frameType(kDeltaFrame), _buffer(NULL), _length(0),
_size(0), _completeFrame(false) {}
EncodedImage()
: _encodedWidth(0),
_encodedHeight(0),
_timeStamp(0),
capture_time_ms_(0),
_frameType(kDeltaFrame),
_buffer(NULL),
_length(0),
_size(0),
_completeFrame(false) {}
EncodedImage(WebRtc_UWord8* buffer,
WebRtc_UWord32 length,
WebRtc_UWord32 size) :
_encodedWidth(0), _encodedHeight(0), _timeStamp(0),
_frameType(kDeltaFrame), _buffer(buffer), _length(length),
_size(size), _completeFrame(false) {}
WebRtc_UWord32 size)
: _encodedWidth(0),
_encodedHeight(0),
_timeStamp(0),
capture_time_ms_(0),
_frameType(kDeltaFrame),
_buffer(buffer),
_length(length),
_size(size),
_completeFrame(false) {}
WebRtc_UWord32 _encodedWidth;
WebRtc_UWord32 _encodedHeight;
WebRtc_UWord32 _timeStamp;
int64_t capture_time_ms_;
VideoFrameType _frameType;
WebRtc_UWord8* _buffer;
WebRtc_UWord32 _length;

View File

@ -492,6 +492,7 @@ class RtpRtcp : public Module {
const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation = NULL,

View File

@ -145,10 +145,11 @@ class MockRtpRtcp : public RtpRtcp {
void(WebRtc_UWord32* totalRate, WebRtc_UWord32* videoRate, WebRtc_UWord32* fecRate, WebRtc_UWord32* nackRate));
MOCK_CONST_METHOD1(EstimatedReceiveBandwidth,
int(WebRtc_UWord32* available_bandwidth));
MOCK_METHOD7(SendOutgoingData,
MOCK_METHOD8(SendOutgoingData,
WebRtc_Word32(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,

View File

@ -13,7 +13,7 @@
#include <map>
#include "rtp_rtcp_defines.h"
#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
#include "typedefs.h"
namespace webrtc {

View File

@ -108,6 +108,7 @@ void RTPPacketHistory::VerifyAndAllocatePacketLength(uint16_t packet_length) {
int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
uint16_t packet_length,
uint16_t max_packet_length,
int64_t capture_time_ms,
StorageType type) {
if (type == kDontStore) {
return 0;
@ -138,7 +139,7 @@ int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
stored_seq_nums_[prev_index_] = seq_num;
stored_lengths_[prev_index_] = packet_length;
stored_times_[prev_index_] = clock_.GetTimeInMS();
stored_times_[prev_index_] = capture_time_ms;
stored_resend_times_[prev_index_] = 0; // packet not resent
stored_types_[prev_index_] = type;
@ -173,7 +174,7 @@ bool RTPPacketHistory::GetRTPPacket(uint16_t sequence_number,
uint32_t min_elapsed_time_ms,
uint8_t* packet,
uint16_t* packet_length,
uint32_t* stored_time_ms,
int64_t* stored_time_ms,
StorageType* type) const {
webrtc::CriticalSectionScoped cs(*critsect_);
if (!store_) {

View File

@ -37,6 +37,7 @@ class RTPPacketHistory {
int32_t PutRTPPacket(const uint8_t* packet,
uint16_t packet_length,
uint16_t max_packet_length,
int64_t capture_time_ms,
StorageType type);
// Gets stored RTP packet corresponding to the input sequence number.
@ -54,7 +55,7 @@ class RTPPacketHistory {
uint32_t min_elapsed_time_ms,
uint8_t* packet,
uint16_t* packet_length,
uint32_t* stored_time_ms,
int64_t* stored_time_ms,
StorageType* type) const;
bool HasRTPPacket(uint16_t sequence_number) const;

View File

@ -87,12 +87,13 @@ TEST_F(RtpPacketHistoryTest, SetStoreStatus) {
TEST_F(RtpPacketHistoryTest, NoStoreStatus) {
EXPECT_FALSE(hist_->StorePackets());
uint16_t len = 0;
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
kAllowRetransmission));
capture_time_ms, kAllowRetransmission));
// Packet should not be stored.
len = kMaxPacketLength;
uint32_t time;
int64_t time;
StorageType type;
EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type));
}
@ -100,32 +101,37 @@ TEST_F(RtpPacketHistoryTest, NoStoreStatus) {
TEST_F(RtpPacketHistoryTest, DontStore) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = 0;
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength, kDontStore));
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
capture_time_ms, kDontStore));
// Packet should not be stored.
len = kMaxPacketLength;
uint32_t time;
int64_t time;
StorageType type;
EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type));
}
TEST_F(RtpPacketHistoryTest, PutRtpPacket_TooLargePacketLength) {
hist_->SetStorePacketsStatus(true, 10);
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
EXPECT_EQ(-1, hist_->PutRTPPacket(packet_,
kMaxPacketLength + 1,
kMaxPacketLength,
capture_time_ms,
kAllowRetransmission));
}
TEST_F(RtpPacketHistoryTest, GetRtpPacket_TooSmallBuffer) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = 0;
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
kAllowRetransmission));
capture_time_ms, kAllowRetransmission));
uint16_t len_out = len - 1;
uint32_t time;
int64_t time;
StorageType type;
EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len_out, &time,
&type));
@ -134,7 +140,7 @@ TEST_F(RtpPacketHistoryTest, GetRtpPacket_TooSmallBuffer) {
TEST_F(RtpPacketHistoryTest, GetRtpPacket_NotStored) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = kMaxPacketLength;
uint32_t time;
int64_t time;
StorageType type;
EXPECT_FALSE(hist_->GetRTPPacket(0, 0, packet_, &len, &time, &type));
}
@ -145,25 +151,28 @@ TEST_F(RtpPacketHistoryTest, PutRtpPacket) {
CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
EXPECT_FALSE(hist_->HasRTPPacket(kSeqNum));
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
kAllowRetransmission));
capture_time_ms, kAllowRetransmission));
EXPECT_TRUE(hist_->HasRTPPacket(kSeqNum));
}
TEST_F(RtpPacketHistoryTest, GetRtpPacket) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = 0;
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
kAllowRetransmission));
capture_time_ms, kAllowRetransmission));
uint16_t len_out = kMaxPacketLength;
uint32_t time;
int64_t time;
StorageType type;
EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time,
&type));
EXPECT_EQ(len, len_out);
EXPECT_EQ(kAllowRetransmission, type);
EXPECT_EQ(capture_time_ms, time);
for (int i = 0; i < len; i++) {
EXPECT_EQ(packet_[i], packet_out_[i]);
}
@ -172,26 +181,28 @@ TEST_F(RtpPacketHistoryTest, GetRtpPacket) {
TEST_F(RtpPacketHistoryTest, DontRetransmit) {
hist_->SetStorePacketsStatus(true, 10);
uint16_t len = 0;
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
kDontRetransmit));
capture_time_ms, kDontRetransmit));
uint16_t len_out = kMaxPacketLength;
uint32_t time;
int64_t time;
StorageType type;
EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time,
&type));
EXPECT_EQ(len, len_out);
EXPECT_EQ(kDontRetransmit, type);
EXPECT_EQ(capture_time_ms, time);
}
TEST_F(RtpPacketHistoryTest, MinResendTime) {
hist_->SetStorePacketsStatus(true, 10);
WebRtc_Word64 store_time = fake_clock_.GetTimeInMS();
uint16_t len = 0;
int64_t capture_time_ms = fake_clock_.GetTimeInMS();
CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
kAllowRetransmission));
capture_time_ms, kAllowRetransmission));
hist_->UpdateResendTime(kSeqNum);
fake_clock_.IncrementTime(100);
@ -199,10 +210,10 @@ TEST_F(RtpPacketHistoryTest, MinResendTime) {
// Time has elapsed.
len = kMaxPacketLength;
StorageType type;
uint32_t time;
int64_t time;
EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 100, packet_, &len, &time, &type));
EXPECT_GT(len, 0);
EXPECT_EQ(store_time, time);
EXPECT_EQ(capture_time_ms, time);
// Time has not elapsed. Packet should be found, but no bytes copied.
len = kMaxPacketLength;

View File

@ -92,10 +92,12 @@ WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecific(
// Ethernet header here as well.
const WebRtc_UWord16 packetSize = payloadDataLength + _packetOverHead +
rtpHeader->header.headerLength + rtpHeader->header.paddingLength;
uint32_t compensated_timestamp = rtpHeader->header.timestamp +
rtpHeader->extension.transmissionTimeOffset;
remote_bitrate_->IncomingPacket(rtpHeader->header.ssrc,
packetSize,
nowMS,
rtpHeader->header.timestamp,
compensated_timestamp,
-1);
if (isRED) {

View File

@ -798,6 +798,7 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
FrameType frameType,
WebRtc_Word8 payloadType,
WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,
@ -818,6 +819,7 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
return _rtpSender.SendOutgoingData(frameType,
payloadType,
timeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,
@ -847,6 +849,7 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
return rtpSender.SendOutgoingData(frameType,
payloadType,
timeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,
@ -863,6 +866,7 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
retVal = rtpSender.SendOutgoingData(frameType,
payloadType,
timeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,
@ -878,6 +882,7 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
retVal = rtpSender.SendOutgoingData(frameType,
payloadType,
timeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,

View File

@ -176,6 +176,7 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation = NULL,

View File

@ -464,6 +464,7 @@ WebRtc_Word32
RTPSender::SendOutgoingData(const FrameType frame_type,
const WebRtc_Word8 payload_type,
const WebRtc_UWord32 capture_timestamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payload_data,
const WebRtc_UWord32 payload_size,
const RTPFragmentationHeader* fragmentation,
@ -500,12 +501,14 @@ RTPSender::SendOutgoingData(const FrameType frame_type,
frame_type != kAudioFrameCN);
if (frame_type == kFrameEmpty) {
return SendPaddingAccordingToBitrate(payload_type, capture_timestamp);
return SendPaddingAccordingToBitrate(payload_type, capture_timestamp,
capture_time_ms);
}
return _video->SendVideo(video_type,
frame_type,
payload_type,
capture_timestamp,
capture_time_ms,
payload_data,
payload_size,
fragmentation,
@ -516,7 +519,8 @@ RTPSender::SendOutgoingData(const FrameType frame_type,
WebRtc_Word32 RTPSender::SendPaddingAccordingToBitrate(
WebRtc_Word8 payload_type,
WebRtc_UWord32 capture_timestamp) {
WebRtc_UWord32 capture_timestamp,
int64_t capture_time_ms) {
// Current bitrate since last estimate(1 second) averaged with the
// estimate since then, to get the most up to date bitrate.
uint32_t current_bitrate = BitrateNow();
@ -535,13 +539,14 @@ WebRtc_Word32 RTPSender::SendPaddingAccordingToBitrate(
}
}
// Send padding data.
return SendPadData(payload_type, capture_timestamp, bytes);
return SendPadData(payload_type, capture_timestamp, capture_time_ms, bytes);
}
return 0;
}
WebRtc_Word32 RTPSender::SendPadData(WebRtc_Word8 payload_type,
WebRtc_UWord32 capture_timestamp,
int64_t capture_time_ms,
WebRtc_Word32 bytes) {
// Drop this packet if we're not sending media packets
if (!_sendingMedia) {
@ -586,6 +591,7 @@ WebRtc_Word32 RTPSender::SendPadData(WebRtc_Word8 payload_type,
if (0 > SendToNetwork(data_buffer,
padding_bytes_in_packet,
header_length,
capture_time_ms,
kDontRetransmit)) {
// Error sending the packet.
break;
@ -616,7 +622,7 @@ WebRtc_Word32 RTPSender::ReSendPacket(WebRtc_UWord16 packet_id,
WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
WebRtc_UWord8* buffer_to_send_ptr = data_buffer;
WebRtc_UWord32 stored_time_in_ms;
int64_t stored_time_in_ms;
StorageType type;
bool found = _packetHistory->GetRTPPacket(packet_id,
min_resend_time, data_buffer, &length, &stored_time_in_ms, &type);
@ -848,7 +854,7 @@ void RTPSender::ProcessSendToNetwork() {
WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
WebRtc_UWord16 length = IP_PACKET_SIZE;
WebRtc_UWord32 stored_time_ms;
int64_t stored_time_ms;
StorageType type;
bool found = _packetHistory->GetRTPPacket(seq_num, 0, data_buffer, &length,
&stored_time_ms, &type);
@ -888,14 +894,16 @@ void RTPSender::ProcessSendToNetwork() {
}
WebRtc_Word32
RTPSender::SendToNetwork(const WebRtc_UWord8* buffer,
RTPSender::SendToNetwork(WebRtc_UWord8* buffer,
const WebRtc_UWord16 length,
const WebRtc_UWord16 rtpLength,
int64_t capture_time_ms,
const StorageType storage)
{
// Used for NACK or to spead out the transmission of packets.
if (_packetHistory->PutRTPPacket(
buffer, rtpLength + length, _maxPayloadLength, storage) != 0) {
buffer, rtpLength + length, _maxPayloadLength, capture_time_ms, storage)
!= 0) {
return -1;
}
@ -906,6 +914,15 @@ RTPSender::SendToNetwork(const WebRtc_UWord8* buffer,
return 0;
}
if (capture_time_ms >= 0) {
ModuleRTPUtility::RTPHeaderParser rtpParser(buffer, length);
WebRtcRTPHeader rtp_header;
rtpParser.Parse(rtp_header);
int64_t time_now = _clock.GetTimeInMS();
UpdateTransmissionTimeOffset(buffer, length, rtp_header,
time_now - capture_time_ms);
}
// Send packet
WebRtc_Word32 bytes_sent = -1;
{

View File

@ -56,9 +56,10 @@ public:
virtual WebRtc_UWord16 PacketOverHead() const = 0;
virtual WebRtc_UWord16 ActualSendBitrateKbit() const = 0;
virtual WebRtc_Word32 SendToNetwork(const WebRtc_UWord8* dataBuffer,
virtual WebRtc_Word32 SendToNetwork(WebRtc_UWord8* dataBuffer,
const WebRtc_UWord16 payloadLength,
const WebRtc_UWord16 rtpHeaderLength,
int64_t capture_time_ms,
const StorageType storage) = 0;
};
@ -133,6 +134,7 @@ public:
WebRtc_Word32 SendOutgoingData(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,
@ -141,6 +143,7 @@ public:
WebRtc_Word32 SendPadData(WebRtc_Word8 payload_type,
WebRtc_UWord32 capture_timestamp,
int64_t capture_time_ms,
WebRtc_Word32 bytes);
/*
* RTP header extension
@ -219,9 +222,10 @@ public:
virtual WebRtc_UWord32 Timestamp() const;
virtual WebRtc_UWord32 SSRC() const;
virtual WebRtc_Word32 SendToNetwork(const WebRtc_UWord8* dataBuffer,
virtual WebRtc_Word32 SendToNetwork(WebRtc_UWord8* dataBuffer,
const WebRtc_UWord16 payloadLength,
const WebRtc_UWord16 rtpHeaderLength,
int64_t capture_time_ms,
const StorageType storage);
/*
@ -290,7 +294,8 @@ private:
WebRtc_Word32 SendPaddingAccordingToBitrate(
WebRtc_Word8 payload_type,
WebRtc_UWord32 capture_timestamp);
WebRtc_UWord32 capture_timestamp,
int64_t capture_time_ms);
WebRtc_Word32 _id;
const bool _audioConfigured;

View File

@ -457,6 +457,7 @@ WebRtc_Word32 RTPSenderAudio::SendAudio(
return _rtpSender->SendToNetwork(dataBuffer,
payloadSize,
static_cast<WebRtc_UWord16>(rtpHeaderLength),
-1,
kAllowRetransmission);
}
@ -591,7 +592,7 @@ RTPSenderAudio::SendTelephoneEventPacket(const bool ended,
ModuleRTPUtility::AssignUWord16ToBuffer(dtmfbuffer+14, duration);
_sendAudioCritsect->Leave();
retVal = _rtpSender->SendToNetwork(dtmfbuffer, 4, 12,
retVal = _rtpSender->SendToNetwork(dtmfbuffer, 4, 12, -1,
kAllowRetransmission);
sendCount--;

View File

@ -207,7 +207,10 @@ TEST_F(RtpSenderTest, NoTrafficSmoothing) {
kTimestamp);
// Packet should be sent immediately.
EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_, 0, rtp_length,
EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_,
0,
rtp_length,
kTimestamp / 90,
kAllowRetransmission));
EXPECT_EQ(1, transport_.packets_sent_);
EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
@ -225,7 +228,10 @@ TEST_F(RtpSenderTest, TrafficSmoothing) {
kTimestamp);
// Packet should be stored in a send bucket.
EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_, 0, rtp_length,
EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_,
0,
rtp_length,
fake_clock_.GetTimeInMS(),
kAllowRetransmission));
EXPECT_EQ(0, transport_.packets_sent_);

View File

@ -105,9 +105,10 @@ WebRtc_Word32 RTPSenderVideo::RegisterVideoPayload(
}
WebRtc_Word32
RTPSenderVideo::SendVideoPacket(const WebRtc_UWord8* data_buffer,
RTPSenderVideo::SendVideoPacket(WebRtc_UWord8* data_buffer,
const WebRtc_UWord16 payload_length,
const WebRtc_UWord16 rtp_header_length,
int64_t capture_time_ms,
StorageType storage,
bool protect) {
if(_fecEnabled) {
@ -124,6 +125,7 @@ RTPSenderVideo::SendVideoPacket(const WebRtc_UWord8* data_buffer,
red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage);
ret |= packet_success;
@ -156,6 +158,7 @@ RTPSenderVideo::SendVideoPacket(const WebRtc_UWord8* data_buffer,
red_packet->data(),
red_packet->length() - rtp_header_length,
rtp_header_length,
capture_time_ms,
storage);
ret |= packet_success;
@ -173,6 +176,7 @@ RTPSenderVideo::SendVideoPacket(const WebRtc_UWord8* data_buffer,
int ret = _rtpSender.SendToNetwork(data_buffer,
payload_length,
rtp_header_length,
capture_time_ms,
storage);
if (ret == 0) {
_videoBitrate.Update(payload_length + rtp_header_length);
@ -195,7 +199,7 @@ RTPSenderVideo::SendRTPIntraRequest()
ModuleRTPUtility::AssignUWord32ToBuffer(data+4, _rtpSender.SSRC());
return _rtpSender.SendToNetwork(data, 0, length, kAllowRetransmission);
return _rtpSender.SendToNetwork(data, 0, length, -1, kAllowRetransmission);
}
WebRtc_Word32
@ -248,7 +252,8 @@ WebRtc_Word32
RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,
@ -276,12 +281,18 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
switch(videoType)
{
case kRtpNoVideo:
retVal = SendGeneric(payloadType,captureTimeStamp, payloadData,
payloadSize);
retVal = SendGeneric(payloadType, captureTimeStamp, capture_time_ms,
payloadData, payloadSize);
break;
case kRtpVp8Video:
retVal = SendVP8(frameType, payloadType, captureTimeStamp,
payloadData, payloadSize, fragmentation, rtpTypeHdr);
retVal = SendVP8(frameType,
payloadType,
captureTimeStamp,
capture_time_ms,
payloadData,
payloadSize,
fragmentation,
rtpTypeHdr);
break;
default:
assert(false);
@ -298,7 +309,8 @@ RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
WebRtc_Word32
RTPSenderVideo::SendGeneric(const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize)
{
@ -348,6 +360,7 @@ RTPSenderVideo::SendGeneric(const WebRtc_Word8 payloadType,
if(-1 == SendVideoPacket(dataBuffer,
payloadBytesInPacket,
rtpHeaderLength,
capture_time_ms,
kAllowRetransmission,
true))
{
@ -378,7 +391,8 @@ RTPSenderVideo::MaxConfiguredBitrateVideo() const
WebRtc_Word32
RTPSenderVideo::SendVP8(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,
@ -438,7 +452,7 @@ RTPSenderVideo::SendVP8(const FrameType frameType,
_rtpSender.BuildRTPheader(dataBuffer, payloadType, last,
captureTimeStamp);
if (-1 == SendVideoPacket(dataBuffer, payloadBytesInPacket,
rtpHeaderLength, storage, protect))
rtpHeaderLength, capture_time_ms, storage, protect))
{
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
"RTPSenderVideo::SendVP8 failed to send packet number"

View File

@ -50,7 +50,8 @@ public:
WebRtc_Word32 SendVideo(const RtpVideoCodecTypes videoType,
const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,
@ -88,21 +89,24 @@ public:
int SetSelectiveRetransmissions(uint8_t settings);
protected:
virtual WebRtc_Word32 SendVideoPacket(const WebRtc_UWord8* dataBuffer,
virtual WebRtc_Word32 SendVideoPacket(WebRtc_UWord8* dataBuffer,
const WebRtc_UWord16 payloadLength,
const WebRtc_UWord16 rtpHeaderLength,
int64_t capture_time_ms,
StorageType storage,
bool protect);
private:
WebRtc_Word32 SendGeneric(const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize);
WebRtc_Word32 SendVP8(const FrameType frameType,
const WebRtc_Word8 payloadType,
const WebRtc_UWord32 captureTimeStamp,
const uint32_t captureTimeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader* fragmentation,

View File

@ -203,7 +203,7 @@ TEST_F(RtpRtcpAudioTest, Basic) {
// Send an empty RTP packet.
// Should fail since we have not registerd the payload type.
EXPECT_EQ(-1, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
96, 0, NULL, 0));
96, 0, -1, NULL, 0));
CodecInst voiceCodec;
voiceCodec.pltype = 96;
@ -219,7 +219,7 @@ TEST_F(RtpRtcpAudioTest, Basic) {
const WebRtc_UWord8 test[5] = "test";
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
0, test, 4));
0, -1, test, 4));
EXPECT_EQ(test_ssrc, module2->RemoteSSRC());
EXPECT_EQ(test_timestamp, module2->RemoteTimestamp());
@ -270,7 +270,7 @@ TEST_F(RtpRtcpAudioTest, RED) {
const WebRtc_UWord8 test[5] = "test";
// Send a RTP packet.
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
96, 160, test, 4,
96, 160, -1, test, 4,
&fragmentation));
EXPECT_EQ(0, module1->SetSendREDPayloadType(-1));
@ -316,7 +316,7 @@ TEST_F(RtpRtcpAudioTest, DTMF) {
// pause between = 2560ms + 1600ms = 4160ms
for (;timeStamp <= 250 * 160; timeStamp += 160) {
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
timeStamp, test, 4));
timeStamp, -1, test, 4));
fake_clock.IncrementTime(20);
module1->Process();
}
@ -324,7 +324,7 @@ TEST_F(RtpRtcpAudioTest, DTMF) {
for (;timeStamp <= 740 * 160; timeStamp += 160) {
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
timeStamp, test, 4));
timeStamp, -1, test, 4));
fake_clock.IncrementTime(20);
module1->Process();
}

View File

@ -156,6 +156,7 @@ TEST_F(RtpRtcpNackTest, RTCP) {
for (int frame = 0; frame < 10; ++frame) {
EXPECT_EQ(0, video_module_->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
timestamp,
timestamp / 90,
payload_data,
payload_data_length));
@ -209,10 +210,12 @@ TEST_F(RtpRtcpNackTest, RTX) {
WebRtc_UWord16 nack_list[kVideoNackListSize];
for (int frame = 0; frame < 10; ++frame) {
EXPECT_EQ(0, video_module_->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
timestamp,
payload_data,
payload_data_length));
EXPECT_EQ(0, video_module_->SendOutgoingData(webrtc::kVideoFrameDelta,
123,
timestamp,
timestamp / 90,
payload_data,
payload_data_length));
std::sort(nack_receiver_->sequence_numbers_.begin(),
nack_receiver_->sequence_numbers_.end());

View File

@ -140,7 +140,7 @@ class RtpRtcpRtcpTest : public ::testing::Test {
// send RTP packet with the data "testtest"
const WebRtc_UWord8 test[9] = "testtest";
EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
0, test, 8));
0, -1, test, 8));
}
virtual void TearDown() {

View File

@ -87,6 +87,7 @@ TEST_F(RtpRtcpVideoTest, BasicVideo) {
WebRtc_UWord32 timestamp = 3000;
EXPECT_EQ(0, video_module->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
timestamp,
timestamp / 90,
payload_data,
payload_data_length));

View File

@ -127,9 +127,10 @@ WebRtc_Word32 VideoCoder::FrameToRender(VideoFrame& videoFrame)
}
WebRtc_Word32 VideoCoder::SendData(
FrameType frameType,
WebRtc_UWord8 payloadType,
WebRtc_UWord32 timeStamp,
const FrameType frameType,
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,

View File

@ -54,6 +54,7 @@ private:
const FrameType /*frameType*/,
const WebRtc_UWord8 /*payloadType*/,
const WebRtc_UWord32 /*timeStamp*/,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& /* fragmentationHeader*/,

View File

@ -469,6 +469,7 @@ int VP8Encoder::GetEncodedFrame(const RawImage& input_image) {
if (encoded_image_._length > 0) {
encoded_image_._timeStamp = input_image._timeStamp;
encoded_image_.capture_time_ms_ = input_image.capture_time_ms_;
// Figure out where partition boundaries are located.
RTPFragmentationHeader fragInfo;
@ -542,6 +543,7 @@ int VP8Encoder::GetEncodedPartitions(const RawImage& input_image) {
}
if (encoded_image_._length > 0) {
encoded_image_._timeStamp = input_image._timeStamp;
encoded_image_.capture_time_ms_ = input_image.capture_time_ms_;
encoded_image_._encodedHeight = raw_->h;
encoded_image_._encodedWidth = raw_->w;
encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,

View File

@ -69,9 +69,12 @@ struct VCMFrameCount {
class VCMPacketizationCallback {
public:
virtual WebRtc_Word32 SendData(
const FrameType frameType, const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp, const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
FrameType frameType,
WebRtc_UWord8 payloadType,
WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* rtpVideoHdr) = 0;
protected:

View File

@ -67,6 +67,7 @@ VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
rawImage._width = inputFrame.Width();
rawImage._height = inputFrame.Height();
rawImage._timeStamp = inputFrame.TimeStamp();
rawImage.capture_time_ms_ = inputFrame.RenderTimeMs();
VideoFrameType videoFrameType =
VCMEncodedFrame::ConvertFrameType(frameType);
@ -207,6 +208,7 @@ VCMEncodedFrameCallback::Encoded(
frameType,
_payloadType,
encodedImage._timeStamp,
encodedImage.capture_time_ms_,
encodedImage._buffer,
encodedBytes,
*fragmentationHeader,

View File

@ -570,6 +570,7 @@ VCMEncComplete_KeyReqTest::SendData(
const FrameType frameType,
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& /*fragmentationHeader*/,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -94,6 +94,7 @@ public:
const webrtc::FrameType frameType,
const WebRtc_UWord8 payloadType,
WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const webrtc::RTPFragmentationHeader& fragmentationHeader,

View File

@ -76,6 +76,7 @@ VCMNTEncodeCompleteCallback::SendData(
const FrameType frameType,
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& /*fragmentationHeader*/,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -31,6 +31,7 @@ public:
WebRtc_Word32 SendData(const webrtc::FrameType frameType,
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const webrtc::RTPFragmentationHeader& fragmentationHeader,

View File

@ -51,6 +51,7 @@ VCMEncodeCompleteCallback::SendData(
const FrameType frameType,
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
@ -144,6 +145,7 @@ VCMRTPEncodeCompleteCallback::SendData(
const FrameType frameType,
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
@ -155,6 +157,7 @@ VCMRTPEncodeCompleteCallback::SendData(
return _RTPModule->SendOutgoingData(frameType,
payloadType,
timeStamp,
capture_time_ms,
payloadData,
payloadSize,
&fragmentationHeader,

View File

@ -43,10 +43,13 @@ public:
// Process encoded data received from the encoder, pass stream to the
// VCMReceiver module
WebRtc_Word32 SendData(const FrameType frameType,
const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr);
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr);
// Register exisitng VCM. Currently - encode and decode under same module.
void RegisterReceiverVCM(VideoCodingModule *vcm) {_VCMReceiver = vcm;}
// Return size of last encoded frame data (all frames in the sequence)
@ -99,10 +102,13 @@ public:
// Process encoded data received from the encoder, pass stream to the
// RTP module
WebRtc_Word32 SendData(const FrameType frameType,
const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
const WebRtc_UWord8* payloadData, const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr);
const WebRtc_UWord8 payloadType,
const WebRtc_UWord32 timeStamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payloadData,
const WebRtc_UWord32 payloadSize,
const RTPFragmentationHeader& fragmentationHeader,
const RTPVideoHeader* videoHdr);
// Return size of last encoded frame. Value good for one call
// (resets to zero after call to inform test of frame drop)
float EncodedBytes();

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -89,6 +89,7 @@ VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
(target_half_width * target_half_height));
outFrame.VerifyAndAllocate(requiredSize);
outFrame.SetTimeStamp(inFrame.TimeStamp());
outFrame.SetRenderTime(inFrame.RenderTimeMs());
outFrame.SetWidth(_targetWidth);
outFrame.SetHeight(_targetHeight);

View File

@ -121,6 +121,21 @@ WebRtc_Word32 ViEChannel::Init() {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::SetRTCPStatus failure", __FUNCTION__);
}
if (rtp_rtcp_->RegisterSendRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset, 1) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::RegisterSendRtpHeaderExtension failure",
__FUNCTION__);
return -1;
}
if (rtp_rtcp_->RegisterReceiveRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset, 1) != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: RTP::RegisterReceiveRtpHeaderExtension failure",
__FUNCTION__);
return -1;
}
// VCM initialization
if (vcm_.InitializeReceiver() != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo,
@ -284,6 +299,13 @@ WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
if (restart_rtp) {
rtp_rtcp->SetSendingStatus(true);
}
if (rtp_rtcp_->RegisterReceiveRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset, 1) != 0) {
WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
"%s: could not register transmission time offset extension",
__FUNCTION__);
return -1;
}
}
// |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
// modules can be deleted after this step.

View File

@ -150,6 +150,13 @@ bool ViEEncoder::Init() {
"%s RegisterSendPayload failure", __FUNCTION__);
return false;
}
if (default_rtp_rtcp_->RegisterSendRtpHeaderExtension(
kRtpExtensionTransmissionTimeOffset, 1) != 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
ViEId(engine_id_, channel_id_),
"%s RegisterSendRtpHeaderExtension failure", __FUNCTION__);
return false;
}
#else
VideoCodec video_codec;
if (vcm_.Codec(webrtc::kVideoCodecI420, &video_codec) == VCM_OK) {
@ -682,6 +689,7 @@ WebRtc_Word32 ViEEncoder::SendData(
const FrameType frame_type,
const WebRtc_UWord8 payload_type,
const WebRtc_UWord32 time_stamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payload_data,
const WebRtc_UWord32 payload_size,
const webrtc::RTPFragmentationHeader& fragmentation_header,
@ -702,8 +710,11 @@ WebRtc_Word32 ViEEncoder::SendData(
}
// New encoded data, hand over to the rtp module.
return default_rtp_rtcp_->SendOutgoingData(frame_type, payload_type,
time_stamp, payload_data,
return default_rtp_rtcp_->SendOutgoingData(frame_type,
payload_type,
time_stamp,
capture_time_ms,
payload_data,
payload_size,
&fragmentation_header,
rtp_video_hdr);

View File

@ -109,6 +109,7 @@ class ViEEncoder
const FrameType frame_type,
const WebRtc_UWord8 payload_type,
const WebRtc_UWord32 time_stamp,
int64_t capture_time_ms,
const WebRtc_UWord8* payload_data,
const WebRtc_UWord32 payload_size,
const RTPFragmentationHeader& fragmentation_header,

View File

@ -63,6 +63,10 @@ Channel::SendData(FrameType frameType,
if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
payloadType,
timeStamp,
// Leaving the time when this frame was
// received from the capture device as
// undefined for voice for now.
-1,
payloadData,
payloadSize,
fragmentation) == -1)
@ -5728,6 +5732,10 @@ Channel::InsertExtraRTPPacket(unsigned char payloadType,
if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
_lastPayloadType,
_lastLocalTimeStamp,
// Leaving the time when this frame was
// received from the capture device as
// undefined for voice for now.
-1,
(const WebRtc_UWord8*) payloadData,
payloadSize) != 0)
{