With these changes we will assume that the capture time of a frame is based on NTP time. This makes the interface of video engine more well defined and makes it easier and cleaner to handle user provided capture timestamps.

We should consider making the same change to the render timestamps generated at the receiver.

BUG=1563

Review URL: https://webrtc-codereview.appspot.com/1283005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3799 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
stefan@webrtc.org 2013-04-09 13:37:40 +00:00
parent fd2bfc8fca
commit afcc6101d0
17 changed files with 136 additions and 141 deletions

View File

@ -8,7 +8,9 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
#include "webrtc/modules/remote_bitrate_estimator/include/rtp_to_ntp.h"
#include "webrtc/system_wrappers/interface/clock.h"
#include <assert.h>
@ -57,12 +59,6 @@ bool CompensateForWrapAround(uint32_t new_timestamp,
return true;
}
// Converts an NTP timestamp to a millisecond timestamp.
int64_t NtpToMs(uint32_t ntp_secs, uint32_t ntp_frac) {
const double ntp_frac_ms = static_cast<double>(ntp_frac) / kNtpFracPerMs;
return ntp_secs * 1000 + ntp_frac_ms + 0.5;
}
// Converts |rtp_timestamp| to the NTP time base using the NTP and RTP timestamp
// pairs in |rtcp|. The converted timestamp is returned in
// |rtp_timestamp_in_ms|. This function compensates for wrap arounds in RTP
@ -71,9 +67,9 @@ bool RtpToNtpMs(int64_t rtp_timestamp,
const synchronization::RtcpList& rtcp,
int64_t* rtp_timestamp_in_ms) {
assert(rtcp.size() == 2);
int64_t rtcp_ntp_ms_new = synchronization::NtpToMs(rtcp.front().ntp_secs,
int64_t rtcp_ntp_ms_new = Clock::NtpToMs(rtcp.front().ntp_secs,
rtcp.front().ntp_frac);
int64_t rtcp_ntp_ms_old = synchronization::NtpToMs(rtcp.back().ntp_secs,
int64_t rtcp_ntp_ms_old = Clock::NtpToMs(rtcp.back().ntp_secs,
rtcp.back().ntp_frac);
int64_t rtcp_timestamp_new = rtcp.front().rtp_timestamp;
int64_t rtcp_timestamp_old = rtcp.back().rtp_timestamp;

View File

@ -879,13 +879,6 @@ class RtpRtcp : public Module {
*
***************************************************************************/
/*
* Set the estimated camera delay in MS
*
* return -1 on failure else 0
*/
virtual int32_t SetCameraDelay(const int32_t delayMS) = 0;
/*
* Set the target send bitrate
*/

View File

@ -265,8 +265,6 @@ class MockRtpRtcp : public RtpRtcp {
int32_t(bool& enable, uint8_t& ID));
MOCK_METHOD1(SetAudioLevel,
int32_t(const uint8_t level_dBov));
MOCK_METHOD1(SetCameraDelay,
int32_t(const int32_t delayMS));
MOCK_METHOD1(SetTargetSendBitrate,
void(const uint32_t bitrate));
MOCK_METHOD3(SetGenericFECStatus,

View File

@ -512,8 +512,8 @@ RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
_clock->CurrentNtp(lastReceivedRRNTPsecs, lastReceivedRRNTPfrac);
// time when we received this in MS
uint32_t receiveTimeMS = ModuleRTPUtility::ConvertNTPTimeToMS(
lastReceivedRRNTPsecs, lastReceivedRRNTPfrac);
uint32_t receiveTimeMS = Clock::NtpToMs(lastReceivedRRNTPsecs,
lastReceivedRRNTPfrac);
// Estimate RTT
uint32_t d = (delaySinceLastSendReport & 0x0000ffff) * 1000;

View File

@ -91,8 +91,6 @@ RTCPSender::RTCPSender(const int32_t id,
_reportBlocks(),
_csrcCNAMEs(),
_cameraDelayMS(0),
_lastSendReport(),
_lastRTCPTime(),
@ -170,7 +168,6 @@ RTCPSender::Init()
start_timestamp_ = -1;
_SSRC = 0;
_remoteSSRC = 0;
_cameraDelayMS = 0;
_sequenceNumberFIR = 0;
_tmmbr_Send = 0;
_packetOH_Send = 0;
@ -351,7 +348,7 @@ void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp,
last_rtp_timestamp_ = rtp_timestamp;
if (capture_time_ms < 0) {
// We don't currently get a capture time from VoiceEngine.
last_frame_capture_time_ms_ = _clock->TimeInMilliseconds();
last_frame_capture_time_ms_ = _clock->CurrentNtpInMilliseconds();
} else {
last_frame_capture_time_ms_ = capture_time_ms;
}
@ -380,19 +377,6 @@ RTCPSender::SetRemoteSSRC( const uint32_t ssrc)
return 0;
}
int32_t
RTCPSender::SetCameraDelay(const int32_t delayMS)
{
CriticalSectionScoped lock(_criticalSectionRTCPSender);
if(delayMS > 1000 || delayMS < -1000)
{
WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument, delay can't be larger than 1 sec", __FUNCTION__);
return -1;
}
_cameraDelayMS = delayMS;
return 0;
}
int32_t RTCPSender::CNAME(char cName[RTCP_CNAME_SIZE]) {
assert(cName);
CriticalSectionScoped lock(_criticalSectionRTCPSender);
@ -625,25 +609,13 @@ RTCPSender::BuildSR(uint8_t* rtcpbuffer,
_lastRTCPTime[i+1] =_lastRTCPTime[i];
}
_lastRTCPTime[0] = ModuleRTPUtility::ConvertNTPTimeToMS(NTPsec, NTPfrac);
_lastRTCPTime[0] = Clock::NtpToMs(NTPsec, NTPfrac);
_lastSendReport[0] = (NTPsec << 16) + (NTPfrac >> 16);
uint32_t freqHz = 90000; // For video
if(_audio) {
freqHz = _rtpRtcp.CurrentSendFrequencyHz();
if (!RtpTimestampNow(NTPsec, NTPfrac, &RTPtime)) {
RTPtime = 0;
}
// The timestamp of this RTCP packet should be estimated as the timestamp of
// the frame being captured at this moment. We are calculating that
// timestamp as the last frame's timestamp + the time since the last frame
// was captured.
{
// Needs protection since this method is called on the process thread.
CriticalSectionScoped lock(_criticalSectionRTCPSender);
RTPtime = start_timestamp_ + last_rtp_timestamp_ + (
_clock->TimeInMilliseconds() - last_frame_capture_time_ms_) *
(freqHz / 1000);
}
// Add sender data
// Save for our length field
@ -683,6 +655,24 @@ RTCPSender::BuildSR(uint8_t* rtcpbuffer,
return 0;
}
bool RTCPSender::RtpTimestampNow(uint32_t ntp_secs_now, uint32_t ntp_fracs_now,
uint32_t* timestamp_now) const {
if (last_frame_capture_time_ms_ < 0) {
return false;
}
uint32_t freq_khz = 90; // Default for video.
if(_audio) {
freq_khz = _rtpRtcp.CurrentSendFrequencyHz() / 1000;
}
// The timestamp of this RTCP packet should be estimated as the timestamp of
// the frame being captured at this moment. We are calculating that
// timestamp as the last frame's timestamp + the time since the last frame
// was captured.
*timestamp_now = start_timestamp_ + last_rtp_timestamp_ +
(Clock::NtpToMs(ntp_secs_now, ntp_fracs_now) -
last_frame_capture_time_ms_) * freq_khz;
return true;
}
int32_t RTCPSender::BuildSDEC(uint8_t* rtcpbuffer,
uint32_t& pos) {
@ -2091,7 +2081,7 @@ int32_t RTCPSender::AddReportBlocks(uint8_t* rtcpbuffer,
}
if (received) {
// answer to the one that sends to me
_lastRTCPTime[0] = ModuleRTPUtility::ConvertNTPTimeToMS(NTPsec, NTPfrac);
_lastRTCPTime[0] = Clock::NtpToMs(NTPsec, NTPfrac);
// Remote SSRC
ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);

View File

@ -72,8 +72,6 @@ public:
int32_t SetRemoteSSRC( const uint32_t ssrc);
int32_t SetCameraDelay(const int32_t delayMS);
int32_t CNAME(char cName[RTCP_CNAME_SIZE]);
int32_t SetCNAME(const char cName[RTCP_CNAME_SIZE]);
@ -197,6 +195,9 @@ private:
const uint16_t* nackList,
std::string* nackString);
bool RtpTimestampNow(uint32_t ntp_secs_now, uint32_t ntp_fracs_now,
uint32_t* timestamp_now) const;
private:
int32_t _id;
const bool _audio;
@ -229,8 +230,6 @@ private:
std::map<uint32_t, RTCPReportBlock*> _reportBlocks;
std::map<uint32_t, RTCPUtility::RTCPCnameInformation*> _csrcCNAMEs;
int32_t _cameraDelayMS;
// Sent
uint32_t _lastSendReport[RTCP_NUMBER_OF_SR]; // allow packet loss and RTT above 1 sec
uint32_t _lastRTCPTime[RTCP_NUMBER_OF_SR];

View File

@ -1811,30 +1811,6 @@ int32_t ModuleRtpRtcpImpl::SendRTCPSliceLossIndication(
return rtcp_sender_.SendRTCP(kRtcpSli, 0, 0, false, picture_id);
}
int32_t ModuleRtpRtcpImpl::SetCameraDelay(const int32_t delay_ms) {
WEBRTC_TRACE(kTraceModuleCall,
kTraceRtpRtcp,
id_,
"SetCameraDelay(%d)",
delay_ms);
const bool default_instance(child_modules_.empty() ? false : true);
if (default_instance) {
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
std::list<ModuleRtpRtcpImpl*>::iterator it = child_modules_.begin();
while (it != child_modules_.end()) {
RtpRtcp* module = *it;
if (module) {
module->SetCameraDelay(delay_ms);
}
it++;
}
return 0;
}
return rtcp_sender_.SetCameraDelay(delay_ms);
}
int32_t ModuleRtpRtcpImpl::SetGenericFECStatus(
const bool enable,
const uint8_t payload_type_red,

View File

@ -390,8 +390,6 @@ class ModuleRtpRtcpImpl : public RtpRtcp {
// Send a request for a keyframe.
virtual int32_t RequestKeyFrame();
virtual int32_t SetCameraDelay(const int32_t delay_ms);
virtual void SetTargetSendBitrate(const uint32_t bitrate);
virtual int32_t SetGenericFECStatus(

View File

@ -609,7 +609,7 @@ void RTPSender::TimeToSendPacket(uint16_t sequence_number,
WebRtcRTPHeader rtp_header;
rtp_parser.Parse(rtp_header);
int64_t diff_ms = clock_->TimeInMilliseconds() - capture_time_ms;
int64_t diff_ms = clock_->CurrentNtpInMilliseconds() - capture_time_ms;
if (UpdateTransmissionTimeOffset(data_buffer, length, rtp_header, diff_ms)) {
// Update stored packet in case of receiving a re-transmission request.
packet_history_->ReplaceRTPHeader(data_buffer,
@ -645,9 +645,9 @@ int32_t RTPSender::SendToNetwork(
// TODO(holmer): This should be changed all over Video Engine so that negative
// time is consider invalid, while 0 is considered a valid time.
if (capture_time_ms > 0) {
int64_t time_now = clock_->TimeInMilliseconds();
int64_t diff_ms = clock_->CurrentNtpInMilliseconds() - capture_time_ms;
UpdateTransmissionTimeOffset(buffer, payload_length + rtp_header_length,
rtp_header, time_now - capture_time_ms);
rtp_header, diff_ms);
}
// Used for NACK and to spread out the transmission of packets.
if (packet_history_->PutRTPPacket(buffer, rtp_header_length + payload_length,

View File

@ -74,14 +74,6 @@ uint32_t ConvertNTPTimeToRTP(uint32_t NTPsec, uint32_t NTPfrac, uint32_t freq) {
return NTPsec * freq + tmp;
}
uint32_t ConvertNTPTimeToMS(uint32_t NTPsec, uint32_t NTPfrac) {
int freq = 1000;
float ftemp = (float)NTPfrac / (float)NTP_FRAC;
uint32_t tmp = (uint32_t)(ftemp * freq);
uint32_t MStime = NTPsec * freq + tmp;
return MStime;
}
bool OldTimestamp(uint32_t newTimestamp,
uint32_t existingTimestamp,
bool* wrapped) {

View File

@ -70,11 +70,6 @@ namespace ModuleRTPUtility
uint32_t NTPfrac,
uint32_t freq);
// Return the time in milliseconds corresponding to the specified
// NTP timestamp.
uint32_t ConvertNTPTimeToMS(uint32_t NTPsec,
uint32_t NTPfrac);
uint32_t pow2(uint8_t exp);
// Returns a pointer to the payload data given a packet.

View File

@ -10,6 +10,7 @@
#include "video_capture_impl.h"
#include "clock.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "critical_section_wrapper.h"
#include "module_common_types.h"
@ -196,7 +197,8 @@ WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame&
captureFrame.set_render_time_ms(capture_time);
}
else {
captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
captureFrame.set_render_time_ms(
Clock::GetRealTimeClock()->CurrentNtpInMilliseconds());
}
if (captureFrame.render_time_ms() == last_capture_time_) {

View File

@ -34,9 +34,15 @@ class Clock {
// source is fixed for this clock.
virtual int64_t TimeInMicroseconds() = 0;
// Retrieve an NTP absolute timestamp.
// Retrieve an NTP absolute timestamp in seconds and fractions of a second.
virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) = 0;
// Retrieve an NTP absolute timestamp in milliseconds.
virtual int64_t CurrentNtpInMilliseconds() = 0;
// Converts an NTP timestamp to a millisecond timestamp.
static int64_t NtpToMs(uint32_t seconds, uint32_t fractions);
// Returns an instance of the real-time system clock implementation.
static Clock* GetRealTimeClock();
};
@ -55,9 +61,12 @@ class SimulatedClock : public Clock {
// source is fixed for this clock.
virtual int64_t TimeInMicroseconds();
// Retrieve an NTP absolute timestamp.
// Retrieve an NTP absolute timestamp in milliseconds.
virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions);
// Converts an NTP timestamp to a millisecond timestamp.
virtual int64_t CurrentNtpInMilliseconds();
// Advance the simulated clock with a given number of milliseconds or
// microseconds.
void AdvanceTimeMilliseconds(int64_t milliseconds);

View File

@ -23,6 +23,14 @@
namespace webrtc {
const double kNtpFracPerMs = 4.294967296E6;
int64_t Clock::NtpToMs(uint32_t ntp_secs, uint32_t ntp_frac) {
const double ntp_frac_ms = static_cast<double>(ntp_frac) / kNtpFracPerMs;
return 1000 * static_cast<int64_t>(ntp_secs) +
static_cast<int64_t>(ntp_frac_ms + 0.5);
}
#if defined(_WIN32)
struct reference_point {
@ -128,6 +136,42 @@ class RealTimeClock : public Clock {
virtual int64_t TimeInMicroseconds() {
return TickTime::MicrosecondTimestamp();
}
// Retrieve an NTP absolute timestamp in seconds and fractions of a second.
virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) {
timeval tv = CurrentTimeVal();
double microseconds_in_seconds;
Adjust(tv, &seconds, &microseconds_in_seconds);
fractions = static_cast<uint32_t>(
microseconds_in_seconds * kMagicNtpFractionalUnit + 0.5);
}
// Retrieve an NTP absolute timestamp in milliseconds.
virtual int64_t CurrentNtpInMilliseconds() {
timeval tv = CurrentTimeVal();
uint32_t seconds;
double microseconds_in_seconds;
Adjust(tv, &seconds, &microseconds_in_seconds);
return 1000 * static_cast<int64_t>(seconds) +
static_cast<int64_t>(1000.0 * microseconds_in_seconds + 0.5);
}
protected:
virtual timeval CurrentTimeVal() const = 0;
static void Adjust(const timeval& tv, uint32_t* adjusted_s,
double* adjusted_us_in_s) {
*adjusted_s = tv.tv_sec + kNtpJan1970;
*adjusted_us_in_s = tv.tv_usec / 1e6;
if (*adjusted_us_in_s >= 1) {
*adjusted_us_in_s -= 1;
++*adjusted_s;
} else if (*adjusted_us_in_s < -1) {
*adjusted_us_in_s += 1;
--*adjusted_s;
}
}
};
#if defined(_WIN32)
@ -138,8 +182,8 @@ class WindowsRealTimeClock : public RealTimeClock {
virtual ~WindowsRealTimeClock() {}
// Retrieve an NTP absolute timestamp.
virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) {
protected:
timeval CurrentTimeVal() const {
const uint64_t FILETIME_1970 = 0x019db1ded53e8000;
FILETIME StartTime;
@ -147,35 +191,20 @@ class WindowsRealTimeClock : public RealTimeClock {
struct timeval tv;
// We can't use query performance counter since they can change depending on
// speed steping
// speed stepping.
get_time(_helpTimer, StartTime);
Time = (((uint64_t) StartTime.dwHighDateTime) << 32) +
(uint64_t) StartTime.dwLowDateTime;
// Convert the hecto-nano second time to tv format
// Convert the hecto-nano second time to tv format.
Time -= FILETIME_1970;
tv.tv_sec = (uint32_t)(Time / (uint64_t)10000000);
tv.tv_usec = (uint32_t)((Time % (uint64_t)10000000) / 10);
double dtemp;
seconds = tv.tv_sec + kNtpJan1970;
dtemp = tv.tv_usec / 1e6;
if (dtemp >= 1) {
dtemp -= 1;
seconds++;
} else if (dtemp < -1) {
dtemp += 1;
seconds--;
}
dtemp *= kMagicNtpFractionalUnit;
fractions = (uint32_t)dtemp;
return tv;
}
private:
WindowsHelpTimer* _helpTimer;
};
@ -186,26 +215,14 @@ class UnixRealTimeClock : public RealTimeClock {
virtual ~UnixRealTimeClock() {}
// Retrieve an NTP absolute timestamp.
virtual void CurrentNtp(uint32_t& seconds, uint32_t& fractions) {
double dtemp;
protected:
timeval CurrentTimeVal() const {
struct timeval tv;
struct timezone tz;
tz.tz_minuteswest = 0;
tz.tz_dsttime = 0;
gettimeofday(&tv, &tz);
seconds = tv.tv_sec + kNtpJan1970;
dtemp = tv.tv_usec / 1e6;
if (dtemp >= 1) {
dtemp -= 1;
seconds++;
} else if (dtemp < -1) {
dtemp += 1;
seconds--;
}
dtemp *= kMagicNtpFractionalUnit;
fractions = (uint32_t)dtemp;
return tv;
}
};
#endif
@ -247,6 +264,10 @@ void SimulatedClock::CurrentNtp(uint32_t& seconds, uint32_t& fractions) {
kMagicNtpFractionalUnit / 1000);
}
int64_t SimulatedClock::CurrentNtpInMilliseconds() {
return TimeInMilliseconds() + 1000 * static_cast<int64_t>(kNtpJan1970);
}
void SimulatedClock::AdvanceTimeMilliseconds(int64_t milliseconds) {
AdvanceTimeMicroseconds(1000 * milliseconds);
}

View File

@ -0,0 +1,26 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/system_wrappers/interface/clock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace webrtc {
TEST(ClockTest, NtpTime) {
Clock* clock = Clock::GetRealTimeClock();
uint32_t seconds;
uint32_t fractions;
clock->CurrentNtp(seconds, fractions);
int64_t milliseconds = clock->CurrentNtpInMilliseconds();
EXPECT_GE(milliseconds, Clock::NtpToMs(seconds, fractions));
EXPECT_NEAR(milliseconds, Clock::NtpToMs(seconds, fractions), 5);
}
} // namespace webrtc

View File

@ -19,6 +19,7 @@
],
'sources': [
'aligned_malloc_unittest.cc',
'clock_unittest.cc',
'condition_variable_unittest.cc',
'critical_section_unittest.cc',
'event_tracer_unittest.cc',

View File

@ -609,7 +609,6 @@ void ViEEncoder::DelayChanged(int id, int frame_delay) {
ViEId(engine_id_, channel_id_), "%s: %u", __FUNCTION__,
frame_delay);
default_rtp_rtcp_->SetCameraDelay(frame_delay);
file_recorder_.SetFrameDelay(frame_delay);
}