(Auto)update libjingle 67555838-> 67643194

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6206 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
buildbot@webrtc.org 2014-05-21 00:24:54 +00:00
parent 82c4b8531c
commit 49a6a27bf0
2 changed files with 19 additions and 5 deletions

View File

@ -59,6 +59,8 @@ void CurrentSpeakerMonitor::Start() {
this, &CurrentSpeakerMonitor::OnAudioMonitor);
audio_source_context_->SignalMediaStreamsUpdate.connect(
this, &CurrentSpeakerMonitor::OnMediaStreamsUpdate);
audio_source_context_->SignalMediaStreamsReset.connect(
this, &CurrentSpeakerMonitor::OnMediaStreamsReset);
started_ = true;
}
@ -190,20 +192,28 @@ void CurrentSpeakerMonitor::OnAudioMonitor(
}
void CurrentSpeakerMonitor::OnMediaStreamsUpdate(
AudioSourceContext* audio_source_context, Session* session,
AudioSourceContext* audio_source_context, BaseSession* session,
const MediaStreams& added, const MediaStreams& removed) {
if (audio_source_context == audio_source_context_ && session == session_) {
// Update the speaking state map based on added and removed streams.
for (std::vector<cricket::StreamParams>::const_iterator
it = removed.video().begin(); it != removed.video().end(); ++it) {
it = removed.audio().begin(); it != removed.audio().end(); ++it) {
ssrc_to_speaking_state_map_.erase(it->first_ssrc());
}
for (std::vector<cricket::StreamParams>::const_iterator
it = added.video().begin(); it != added.video().end(); ++it) {
it = added.audio().begin(); it != added.audio().end(); ++it) {
ssrc_to_speaking_state_map_[it->first_ssrc()] = SS_NOT_SPEAKING;
}
}
}
void CurrentSpeakerMonitor::OnMediaStreamsReset(
AudioSourceContext* audio_source_context, BaseSession* session) {
if (audio_source_context == audio_source_context_ && session == session_) {
ssrc_to_speaking_state_map_.clear();
}
}
} // namespace cricket

View File

@ -48,7 +48,9 @@ class AudioSourceContext {
public:
sigslot::signal2<AudioSourceContext*, const cricket::AudioInfo&>
SignalAudioMonitor;
sigslot::signal4<AudioSourceContext*, cricket::Session*,
sigslot::signal2<AudioSourceContext*, cricket::BaseSession*>
SignalMediaStreamsReset;
sigslot::signal4<AudioSourceContext*, cricket::BaseSession*,
const cricket::MediaStreams&, const cricket::MediaStreams&>
SignalMediaStreamsUpdate;
};
@ -85,9 +87,11 @@ class CurrentSpeakerMonitor : public sigslot::has_slots<> {
private:
void OnAudioMonitor(AudioSourceContext* call, const AudioInfo& info);
void OnMediaStreamsUpdate(AudioSourceContext* call,
Session* session,
BaseSession* session,
const MediaStreams& added,
const MediaStreams& removed);
void OnMediaStreamsReset(AudioSourceContext* audio_source_context,
BaseSession* session);
// These are states that a participant will pass through so that we gradually
// recognize that they have started and stopped speaking. This avoids