Fix the chain that propagates the audio frame's rtp and ntp timestamp including:

* In AudioCodingModuleImpl::PlayoutData10Ms, don't reset the timestamp got from GetAudio.
* When there're more than one participant, set AudioFrame's RTP timestamp to 0.
* Copy ntp_time_ms_ in AudioFrame::CopyFrom method.
* In RemixAndResample, pass src frame's timestamp_ and ntp_time_ms_ to the dst frame.
* Fix how |elapsed_time_ms| is computed in channel.cc by adding GetPlayoutFrequency.

Tweaks on ntp_time_ms_:
* Init ntp_time_ms_ to -1 in AudioFrame ctor.
* When there're more than one participant, set AudioFrame's ntp_time_ms_ to an invalid value. I.e. we don't support ntp_time_ms_ in multiple participants case before the mixing is moved to chrome.

Added elapsed_time_ms to AudioFrame and pass it to chrome, where we don't have the information about the rtp timestmp's sample rate, i.e. can't convert rtp timestamp to ms.

BUG=3111
R=henrik.lundin@webrtc.org, turaj@webrtc.org, xians@webrtc.org
TBR=andrew
andrew to take another look on audio_conference_mixer_impl.cc

Review URL: https://webrtc-codereview.appspot.com/14559004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6346 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
wu@webrtc.org
2014-06-05 20:34:08 +00:00
parent 130fa64d4c
commit 94454b71ad
26 changed files with 168 additions and 100 deletions

View File

@@ -475,10 +475,17 @@ int AcmReceiver::GetAudio(int desired_freq_hz, AudioFrame* audio_frame) {
call_stats_.DecodedByNetEq(audio_frame->speech_type_);
// Computes the RTP timestamp of the first sample in |audio_frame| from
// |PlayoutTimestamp|, which is the timestamp of the last sample of
// |GetPlayoutTimestamp|, which is the timestamp of the last sample of
// |audio_frame|.
audio_frame->timestamp_ =
PlayoutTimestamp() - audio_frame->samples_per_channel_;
uint32_t playout_timestamp = 0;
if (GetPlayoutTimestamp(&playout_timestamp)) {
audio_frame->timestamp_ =
playout_timestamp - audio_frame->samples_per_channel_;
} else {
// Remain 0 until we have a valid |playout_timestamp|.
audio_frame->timestamp_ = 0;
}
return 0;
}
@@ -596,13 +603,14 @@ void AcmReceiver::set_id(int id) {
id_ = id;
}
uint32_t AcmReceiver::PlayoutTimestamp() {
bool AcmReceiver::GetPlayoutTimestamp(uint32_t* timestamp) {
if (av_sync_) {
assert(initial_delay_manager_.get());
if (initial_delay_manager_->buffering())
return initial_delay_manager_->playout_timestamp();
if (initial_delay_manager_->buffering()) {
return initial_delay_manager_->GetPlayoutTimestamp(timestamp);
}
}
return neteq_->PlayoutTimestamp();
return neteq_->GetPlayoutTimestamp(timestamp);
}
int AcmReceiver::last_audio_codec_id() const {

View File

@@ -242,9 +242,10 @@ class AcmReceiver {
void set_id(int id); // TODO(turajs): can be inline.
//
// Returns the RTP timestamp of the last sample delivered by GetAudio().
// Gets the RTP timestamp of the last sample delivered by GetAudio().
// Returns true if the RTP timestamp is valid, otherwise false.
//
uint32_t PlayoutTimestamp();
bool GetPlayoutTimestamp(uint32_t* timestamp);
//
// Return the index of the codec associated with the last non-CNG/non-DTMF

View File

@@ -1776,7 +1776,6 @@ int AudioCodingModuleImpl::PlayoutData10Ms(int desired_freq_hz,
}
audio_frame->id_ = id_;
audio_frame->timestamp_ = 0;
return 0;
}
@@ -1917,8 +1916,7 @@ int AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
}
int AudioCodingModuleImpl::PlayoutTimestamp(uint32_t* timestamp) {
*timestamp = receiver_.PlayoutTimestamp();
return 0;
return receiver_.GetPlayoutTimestamp(timestamp) ? 0 : -1;
}
bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {

View File

@@ -219,6 +219,14 @@ void InitialDelayManager::LatePackets(
return;
}
bool InitialDelayManager::GetPlayoutTimestamp(uint32_t* playout_timestamp) {
if (!buffering_) {
return false;
}
*playout_timestamp = playout_timestamp_;
return true;
}
void InitialDelayManager::DisableBuffering() {
buffering_ = false;
}

View File

@@ -65,8 +65,9 @@ class InitialDelayManager {
// sequence of late (or perhaps missing) packets is computed.
void LatePackets(uint32_t timestamp_now, SyncStream* sync_stream);
// Playout timestamp, valid when buffering.
uint32_t playout_timestamp() { return playout_timestamp_; }
// Get playout timestamp.
// Returns true if the timestamp is valid (when buffering), otherwise false.
bool GetPlayoutTimestamp(uint32_t* playout_timestamp);
// True if buffered audio is less than the given initial delay (specified at
// the constructor). Buffering might be disabled by the client of this class.

View File

@@ -359,7 +359,9 @@ TEST_F(InitialDelayManagerTest, BufferingAudio) {
EXPECT_TRUE(manager_->buffering());
const uint32_t expected_playout_timestamp = rtp_info_.header.timestamp -
kInitDelayMs * kSamplingRateHz / 1000;
EXPECT_EQ(expected_playout_timestamp, manager_->playout_timestamp());
uint32_t actual_playout_timestamp = 0;
EXPECT_TRUE(manager_->GetPlayoutTimestamp(&actual_playout_timestamp));
EXPECT_EQ(expected_playout_timestamp, actual_playout_timestamp);
NextRtpHeader(&rtp_info_, &rtp_receive_timestamp_);
}