Adding playout buffer status to the voe video sync

Review URL: https://webrtc-codereview.appspot.com/1311004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3835 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pwestin@webrtc.org 2013-04-11 20:23:35 +00:00
parent 9da751715f
commit 1de01354e6
7 changed files with 196 additions and 229 deletions

View File

@ -114,24 +114,19 @@ int32_t ViESyncModule::Process() {
assert(video_rtp_rtcp_ && voe_sync_interface_);
assert(sync_.get());
int current_audio_delay_ms = 0;
int audio_jitter_buffer_delay_ms = 0;
int playout_buffer_delay_ms = 0;
if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
current_audio_delay_ms) != 0) {
// Could not get VoE delay value, probably not a valid channel Id.
&audio_jitter_buffer_delay_ms,
&playout_buffer_delay_ms) != 0) {
// Could not get VoE delay value, probably not a valid channel Id or
// the channel have not received enough packets.
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, vie_channel_->Id(),
"%s: VE_GetDelayEstimate error for voice_channel %d",
__FUNCTION__, voe_channel_id_);
return 0;
}
// VoiceEngine report delay estimates even when not started, ignore if the
// reported value is lower than 40 ms.
if (current_audio_delay_ms < 40) {
WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, vie_channel_->Id(),
"A/V Sync: Audio delay < 40, skipping.");
return 0;
}
RtpRtcp* voice_rtp_rtcp = NULL;
if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_, voice_rtp_rtcp)) {
return 0;
@ -153,14 +148,16 @@ int32_t ViESyncModule::Process() {
return 0;
}
TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay", total_video_delay_target_ms);
TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay", current_audio_delay_ms);
TRACE_COUNTER1("webrtc", "SyncCurrentVideoDelay",
total_video_delay_target_ms);
TRACE_COUNTER1("webrtc", "SyncCurrentAudioDelay",
audio_jitter_buffer_delay_ms);
TRACE_COUNTER1("webrtc", "SyncRelativeDelay", relative_delay_ms);
int extra_audio_delay_ms = 0;
// Calculate the necessary extra audio delay and desired total video
// delay to get the streams in sync.
if (!sync_->ComputeDelays(relative_delay_ms,
current_audio_delay_ms,
audio_jitter_buffer_delay_ms,
&extra_audio_delay_ms,
&total_video_delay_target_ms)) {
return 0;

View File

@ -628,7 +628,6 @@ Channel::OnReceivedPayloadData(const uint8_t* payloadData,
// Update the packet delay
UpdatePacketDelay(rtpHeader->header.timestamp,
rtpHeader->header.sequenceNumber);
return 0;
}
@ -898,8 +897,8 @@ Channel::Channel(const int32_t channelId,
_decryptionRTCPBufferPtr(NULL),
_timeStamp(0), // This is just an offset, RTP module will add it's own random offset
_sendTelephoneEventPayloadType(106),
_playoutTimeStampRTP(0),
_playoutTimeStampRTCP(0),
playout_timestamp_rtp_(0),
playout_timestamp_rtcp_(0),
_numberOfDiscardedPackets(0),
_engineStatisticsPtr(NULL),
_outputMixerPtr(NULL),
@ -950,8 +949,7 @@ Channel::Channel(const int32_t channelId,
_countAliveDetections(0),
_countDeadDetections(0),
_outputSpeechType(AudioFrame::kNormalSpeech),
_averageDelayMs(0),
_previousSequenceNumber(0),
_average_jitter_buffer_delay_us(0),
_previousTimestamp(0),
_recPacketDelayMs(20),
_RxVadDetection(false),
@ -2120,10 +2118,7 @@ int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length) {
"Channel::ReceivedRTPPacket()");
// Store playout timestamp for the received RTP packet
uint32_t playoutTimestamp(0);
if (GetPlayoutTimeStamp(playoutTimestamp) == 0) {
_playoutTimeStampRTP = playoutTimestamp;
}
UpdatePlayoutTimestamp(false);
// Dump the RTP packet to a file (if RTP dump is enabled).
if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
@ -2149,10 +2144,7 @@ int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::ReceivedRTCPPacket()");
// Store playout timestamp for the received RTCP packet
uint32_t playoutTimestamp(0);
if (GetPlayoutTimeStamp(playoutTimestamp) == 0) {
_playoutTimeStampRTCP = playoutTimestamp;
}
UpdatePlayoutTimestamp(true);
// Dump the RTCP packet to a file (if RTP dump is enabled).
if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
@ -3836,12 +3828,12 @@ Channel::GetRemoteRTCPData(
// This value is updated on each incoming RTCP packet (0 when no packet
// has been received)
playoutTimestamp = _playoutTimeStampRTCP;
playoutTimestamp = playout_timestamp_rtcp_;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId, _channelId),
"GetRemoteRTCPData() => playoutTimestamp=%lu",
_playoutTimeStampRTCP);
playout_timestamp_rtcp_);
if (NULL != jitter || NULL != fractionLost)
{
@ -4659,13 +4651,19 @@ Channel::GetNetworkStatistics(NetworkStatistics& stats)
return return_value;
}
int
Channel::GetDelayEstimate(int& delayMs) const
{
bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) const {
if (_average_jitter_buffer_delay_us == 0) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetDelayEstimate()");
delayMs = (_averageDelayMs + 5) / 10 + _recPacketDelayMs;
return 0;
"Channel::GetDelayEstimate() no valid estimate.");
return false;
}
*jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
_recPacketDelayMs;
*playout_buffer_delay_ms = playout_delay_ms_;
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetDelayEstimate()");
return true;
}
int Channel::SetInitialPlayoutDelay(int delay_ms)
@ -4714,24 +4712,69 @@ Channel::SetMinimumPlayoutDelay(int delayMs)
return 0;
}
int
Channel::GetPlayoutTimestamp(unsigned int& timestamp)
{
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetPlayoutTimestamp()");
uint32_t playoutTimestamp(0);
if (GetPlayoutTimeStamp(playoutTimestamp) != 0)
{
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_VALUE, kTraceError,
"GetPlayoutTimestamp() failed to retrieve timestamp");
return -1;
void Channel::UpdatePlayoutTimestamp(bool rtcp) {
uint32_t playout_timestamp = 0;
if (_audioCodingModule.PlayoutTimestamp(&playout_timestamp) == -1) {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::UpdatePlayoutTimestamp() failed to read playout"
" timestamp from the ACM");
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_VALUE, kTraceError,
"UpdatePlayoutTimestamp() failed to retrieve timestamp");
return;
}
uint16_t delay_ms = 0;
if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::UpdatePlayoutTimestamp() failed to read playout"
" delay from the ADM");
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_VALUE, kTraceError,
"UpdatePlayoutTimestamp() failed to retrieve playout delay");
return;
}
int32_t playout_frequency = _audioCodingModule.PlayoutFrequency();
CodecInst current_recive_codec;
if (_audioCodingModule.ReceiveCodec(&current_recive_codec) == 0) {
if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
playout_frequency = 8000;
} else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
playout_frequency = 48000;
}
timestamp = playoutTimestamp;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId,_channelId),
"GetPlayoutTimestamp() => timestamp=%u", timestamp);
return 0;
}
// Remove the playout delay.
playout_timestamp -= (delay_ms * (playout_frequency / 1000));
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
playout_timestamp);
if (rtcp) {
playout_timestamp_rtcp_ = playout_timestamp;
} else {
playout_timestamp_rtp_ = playout_timestamp;
}
playout_delay_ms_ = delay_ms;
}
int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetPlayoutTimestamp()");
if (playout_timestamp_rtp_ == 0) {
_engineStatisticsPtr->SetLastError(
VE_CANNOT_RETRIEVE_VALUE, kTraceError,
"GetPlayoutTimestamp() failed to retrieve timestamp");
return -1;
}
timestamp = playout_timestamp_rtp_;
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
VoEId(_instanceId,_channelId),
"GetPlayoutTimestamp() => timestamp=%u", timestamp);
return 0;
}
int
@ -4983,47 +5026,6 @@ Channel::InsertInbandDtmfTone()
return 0;
}
int32_t
Channel::GetPlayoutTimeStamp(uint32_t& playoutTimestamp)
{
uint32_t timestamp(0);
CodecInst currRecCodec;
if (_audioCodingModule.PlayoutTimestamp(&timestamp) == -1)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetPlayoutTimeStamp() failed to read playout"
" timestamp from the ACM");
return -1;
}
uint16_t delayMS(0);
if (_audioDeviceModulePtr->PlayoutDelay(&delayMS) == -1)
{
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetPlayoutTimeStamp() failed to read playout"
" delay from the ADM");
return -1;
}
int32_t playoutFrequency = _audioCodingModule.PlayoutFrequency();
if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
playoutFrequency = 8000;
} else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
playoutFrequency = 48000;
}
}
timestamp -= (delayMS * (playoutFrequency/1000));
playoutTimestamp = timestamp;
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::GetPlayoutTimeStamp() => playoutTimestamp = %lu",
playoutTimestamp);
return 0;
}
void
Channel::ResetDeadOrAliveCounters()
{
@ -5072,110 +5074,66 @@ Channel::SendPacketRaw(const void *data, int len, bool RTCP)
}
}
int32_t
Channel::UpdatePacketDelay(const uint32_t timestamp,
const uint16_t sequenceNumber)
{
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
timestamp, sequenceNumber);
// Called for incoming RTP packets after successful RTP header parsing.
void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
uint16_t sequence_number) {
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
"Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
rtp_timestamp, sequence_number);
int32_t rtpReceiveFrequency(0);
// Get frequency of last received payload
int rtp_receive_frequency = _audioCodingModule.ReceiveFrequency();
// Get frequency of last received payload
rtpReceiveFrequency = _audioCodingModule.ReceiveFrequency();
CodecInst current_receive_codec;
if (_audioCodingModule.ReceiveCodec(&current_receive_codec) != 0) {
return;
}
CodecInst currRecCodec;
if (_audioCodingModule.ReceiveCodec(&currRecCodec) == 0) {
if (STR_CASE_CMP("G722", currRecCodec.plname) == 0) {
// Even though the actual sampling rate for G.722 audio is
// 16,000 Hz, the RTP clock rate for the G722 payload format is
// 8,000 Hz because that value was erroneously assigned in
// RFC 1890 and must remain unchanged for backward compatibility.
rtpReceiveFrequency = 8000;
} else if (STR_CASE_CMP("opus", currRecCodec.plname) == 0) {
// We are resampling Opus internally to 32,000 Hz until all our
// DSP routines can operate at 48,000 Hz, but the RTP clock
// rate for the Opus payload format is standardized to 48,000 Hz,
// because that is the maximum supported decoding sampling rate.
rtpReceiveFrequency = 48000;
}
}
if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
// Even though the actual sampling rate for G.722 audio is
// 16,000 Hz, the RTP clock rate for the G722 payload format is
// 8,000 Hz because that value was erroneously assigned in
// RFC 1890 and must remain unchanged for backward compatibility.
rtp_receive_frequency = 8000;
} else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
// We are resampling Opus internally to 32,000 Hz until all our
// DSP routines can operate at 48,000 Hz, but the RTP clock
// rate for the Opus payload format is standardized to 48,000 Hz,
// because that is the maximum supported decoding sampling rate.
rtp_receive_frequency = 48000;
}
const uint32_t timeStampDiff = timestamp - _playoutTimeStampRTP;
uint32_t timeStampDiffMs(0);
// playout_timestamp_rtp_ updated in UpdatePlayoutTimestamp for every incoming
// packet.
uint32_t timestamp_diff_ms = (rtp_timestamp - playout_timestamp_rtp_) /
(rtp_receive_frequency / 1000);
if (timeStampDiff > 0)
{
switch (rtpReceiveFrequency) {
case 8000:
timeStampDiffMs = static_cast<uint32_t>(timeStampDiff >> 3);
break;
case 16000:
timeStampDiffMs = static_cast<uint32_t>(timeStampDiff >> 4);
break;
case 32000:
timeStampDiffMs = static_cast<uint32_t>(timeStampDiff >> 5);
break;
case 48000:
timeStampDiffMs = static_cast<uint32_t>(timeStampDiff / 48);
break;
default:
WEBRTC_TRACE(kTraceWarning, kTraceVoice,
VoEId(_instanceId, _channelId),
"Channel::UpdatePacketDelay() invalid sample rate");
timeStampDiffMs = 0;
return -1;
}
if (timeStampDiffMs > (2 * kVoiceEngineMaxMinPlayoutDelayMs))
{
timeStampDiffMs = 0;
}
uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
(rtp_receive_frequency / 1000);
if (_averageDelayMs == 0)
{
_averageDelayMs = timeStampDiffMs * 10;
}
else
{
// Filter average delay value using exponential filter (alpha is
// 7/8). We derive 10*_averageDelayMs here (reduces risk of
// rounding error) and compensate for it in GetDelayEstimate()
// later. Adding 4/8 results in correct rounding.
_averageDelayMs = ((_averageDelayMs*7 + 10*timeStampDiffMs + 4)>>3);
}
_previousTimestamp = rtp_timestamp;
if (sequenceNumber - _previousSequenceNumber == 1)
{
uint16_t packetDelayMs = 0;
switch (rtpReceiveFrequency) {
case 8000:
packetDelayMs = static_cast<uint16_t>(
(timestamp - _previousTimestamp) >> 3);
break;
case 16000:
packetDelayMs = static_cast<uint16_t>(
(timestamp - _previousTimestamp) >> 4);
break;
case 32000:
packetDelayMs = static_cast<uint16_t>(
(timestamp - _previousTimestamp) >> 5);
break;
case 48000:
packetDelayMs = static_cast<uint16_t>(
(timestamp - _previousTimestamp) / 48);
break;
}
if (timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
timestamp_diff_ms = 0;
}
if (packetDelayMs >= 10 && packetDelayMs <= 60)
_recPacketDelayMs = packetDelayMs;
}
}
if (timestamp_diff_ms == 0) return;
_previousSequenceNumber = sequenceNumber;
_previousTimestamp = timestamp;
if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
_recPacketDelayMs = packet_delay_ms;
}
return 0;
if (_average_jitter_buffer_delay_us == 0) {
_average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
return;
}
// Filter average delay value using exponential filter (alpha is
// 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
// risk of rounding error) and compensate for it in GetDelayEstimate()
// later.
_average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
1000 * timestamp_diff_ms + 500) / 8;
}
void

View File

@ -202,10 +202,12 @@ public:
int GetNetworkStatistics(NetworkStatistics& stats);
// VoEVideoSync
int GetDelayEstimate(int& delayMs) const;
bool GetDelayEstimate(int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) const;
int SetInitialPlayoutDelay(int delay_ms);
int SetMinimumPlayoutDelay(int delayMs);
int GetPlayoutTimestamp(unsigned int& timestamp);
void UpdatePlayoutTimestamp(bool rtcp);
int SetInitTimestamp(unsigned int timestamp);
int SetInitSequenceNumber(short sequenceNumber);
@ -421,14 +423,12 @@ public:
private:
int InsertInbandDtmfTone();
int32_t
MixOrReplaceAudioWithFile(const int mixingFrequency);
int32_t MixOrReplaceAudioWithFile(const int mixingFrequency);
int32_t MixAudioWithFile(AudioFrame& audioFrame, const int mixingFrequency);
int32_t GetPlayoutTimeStamp(uint32_t& playoutTimestamp);
void UpdateDeadOrAliveCounters(bool alive);
int32_t SendPacketRaw(const void *data, int len, bool RTCP);
int32_t UpdatePacketDelay(const uint32_t timestamp,
const uint16_t sequenceNumber);
void UpdatePacketDelay(uint32_t timestamp,
uint16_t sequenceNumber);
void RegisterReceiveCodecsToRTPModule();
int ApmProcessRx(AudioFrame& audioFrame);
@ -470,10 +470,12 @@ private:
uint8_t* _decryptionRTCPBufferPtr;
uint32_t _timeStamp;
uint8_t _sendTelephoneEventPayloadType;
uint32_t _playoutTimeStampRTP;
uint32_t _playoutTimeStampRTCP;
uint32_t playout_timestamp_rtp_;
uint32_t playout_timestamp_rtcp_;
uint32_t playout_delay_ms_;
uint32_t _numberOfDiscardedPackets;
private:
private:
// uses
Statistics* _engineStatisticsPtr;
OutputMixer* _outputMixerPtr;
@ -532,8 +534,7 @@ private:
uint32_t _countDeadDetections;
AudioFrame::SpeechType _outputSpeechType;
// VoEVideoSync
uint32_t _averageDelayMs;
uint16_t _previousSequenceNumber;
uint32_t _average_jitter_buffer_delay_us;
uint32_t _previousTimestamp;
uint16_t _recPacketDelayMs;
// VoEAudioProcessing

View File

@ -55,18 +55,20 @@ public:
virtual int Release() = 0;
// Gets the current sound card buffer size (playout delay).
virtual int GetPlayoutBufferSize(int& bufferMs) = 0;
virtual int GetPlayoutBufferSize(int& buffer_ms) = 0;
// Sets an additional delay for the playout jitter buffer.
virtual int SetMinimumPlayoutDelay(int channel, int delayMs) = 0;
virtual int SetMinimumPlayoutDelay(int channel, int delay_ms) = 0;
// Sets an initial delay for the playout jitter buffer. The playout of the
// audio is delayed by |delay_ms| in millisecond.
virtual int SetInitialPlayoutDelay(int channel, int delay_ms) = 0;
// Gets the sum of the algorithmic delay, jitter buffer delay, and the
// playout buffer delay for a specified |channel|.
virtual int GetDelayEstimate(int channel, int& delayMs) = 0;
// Gets the |jitter_buffer_delay_ms| (including the algorithmic delay), and
// the |playout_buffer_delay_ms| for a specified |channel|.
virtual int GetDelayEstimate(int channel,
int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) = 0;
// Manual initialization of the RTP timestamp.
virtual int SetInitTimestamp(int channel, unsigned int timestamp) = 0;

View File

@ -31,14 +31,17 @@ class VideoSyncTest : public AfterStreamingFixture {
std::vector<int> all_delay_estimates;
for (int second = 0; second < 15; second++) {
int delay_estimate = 0;
EXPECT_EQ(0, voe_vsync_->GetDelayEstimate(channel_, delay_estimate));
int jitter_buffer_delay_ms = 0;
int playout_buffer_delay_ms = 0;
EXPECT_EQ(0, voe_vsync_->GetDelayEstimate(channel_,
&jitter_buffer_delay_ms,
&playout_buffer_delay_ms));
EXPECT_GT(delay_estimate, min_estimate) <<
EXPECT_GT(jitter_buffer_delay_ms, min_estimate) <<
"The delay estimate can not conceivably get lower than " <<
min_estimate << " ms, it's unrealistic.";
all_delay_estimates.push_back(delay_estimate);
all_delay_estimates.push_back(jitter_buffer_delay_ms);
Sleep(1000);
}
@ -67,9 +70,10 @@ class VideoSyncTest : public AfterStreamingFixture {
}
};
TEST_F(VideoSyncTest, CanGetPlayoutTimestampWhilePlayingWithoutSettingItFirst) {
TEST_F(VideoSyncTest,
CanNotGetPlayoutTimestampWhilePlayingWithoutSettingItFirst) {
unsigned int ignored;
EXPECT_EQ(0, voe_vsync_->GetPlayoutTimestamp(channel_, ignored));
EXPECT_EQ(-1, voe_vsync_->GetPlayoutTimestamp(channel_, ignored));
}
TEST_F(VideoSyncTest, CannotSetInitTimestampWhilePlaying) {

View File

@ -164,26 +164,29 @@ int VoEVideoSyncImpl::SetInitialPlayoutDelay(int channel, int delay_ms)
return channel_ptr->SetInitialPlayoutDelay(delay_ms);
}
int VoEVideoSyncImpl::GetDelayEstimate(int channel, int& delayMs)
{
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetDelayEstimate(channel=%d, delayMs=?)", channel);
IPHONE_NOT_SUPPORTED(_shared->statistics());
int VoEVideoSyncImpl::GetDelayEstimate(int channel,
int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms) {
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
"GetDelayEstimate(channel=%d, delayMs=?)", channel);
IPHONE_NOT_SUPPORTED(_shared->statistics());
if (!_shared->statistics().Initialized())
{
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL)
{
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetDelayEstimate() failed to locate channel");
return -1;
}
return channelPtr->GetDelayEstimate(delayMs);
if (!_shared->statistics().Initialized()) {
_shared->SetLastError(VE_NOT_INITED, kTraceError);
return -1;
}
voe::ScopedChannel sc(_shared->channel_manager(), channel);
voe::Channel* channelPtr = sc.ChannelPtr();
if (channelPtr == NULL) {
_shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
"GetDelayEstimate() failed to locate channel");
return -1;
}
if (!channelPtr->GetDelayEstimate(jitter_buffer_delay_ms,
playout_buffer_delay_ms)) {
return -1;
}
return 0;
}
int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs)

View File

@ -26,7 +26,9 @@ public:
virtual int SetInitialPlayoutDelay(int channel, int delay_ms);
virtual int GetDelayEstimate(int channel, int& delayMs);
virtual int GetDelayEstimate(int channel,
int* jitter_buffer_delay_ms,
int* playout_buffer_delay_ms);
virtual int SetInitTimestamp(int channel, unsigned int timestamp);