diff --git a/talk/media/webrtc/webrtcexport.h b/talk/media/webrtc/webrtcexport.h index 71ebe4e8e..b00b0cb97 100644 --- a/talk/media/webrtc/webrtcexport.h +++ b/talk/media/webrtc/webrtcexport.h @@ -27,6 +27,9 @@ #ifndef TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_ #define TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_ +// When building for Chrome a part of the code can be built into +// a shared library, which is controlled by these macros. +// For all other builds, we always build a static library. #if !defined(GOOGLE_CHROME_BUILD) && !defined(CHROMIUM_BUILD) #define LIBPEERCONNECTION_LIB 1 #endif diff --git a/talk/media/webrtc/webrtcvoiceengine.cc b/talk/media/webrtc/webrtcvoiceengine.cc index 8faa46bb2..470c1ce84 100644 --- a/talk/media/webrtc/webrtcvoiceengine.cc +++ b/talk/media/webrtc/webrtcvoiceengine.cc @@ -1009,6 +1009,9 @@ bool WebRtcVoiceEngine::SetDevices(const Device* in_device, LOG_RTCERR2(SetRecordingDevice, in_name, in_id); ret = false; } + webrtc::AudioProcessing* ap = voe()->base()->audio_processing(); + if (ap) + ap->Initialize(); } // Find the playout device id in VoiceEngine and set playout device. @@ -3136,6 +3139,23 @@ bool WebRtcVoiceMediaChannel::MuteStream(uint32 ssrc, bool muted) { LOG_RTCERR2(SetInputMute, channel, muted); return false; } + // We set the AGC to mute state only when all the channels are muted. + // This implementation is not ideal, instead we should signal the AGC when + // the mic channel is muted/unmuted. We can't do it today because there + // is no good way to know which stream is mapping to the mic channel. + bool all_muted = muted; + for (ChannelMap::const_iterator iter = send_channels_.begin(); + iter != send_channels_.end() && all_muted; ++iter) { + if (engine()->voe()->volume()->GetInputMute(iter->second->channel(), + all_muted)) { + LOG_RTCERR1(GetInputMute, iter->second->channel()); + return false; + } + } + + webrtc::AudioProcessing* ap = engine()->voe()->base()->audio_processing(); + if (ap) + ap->set_output_will_be_muted(all_muted); return true; } diff --git a/talk/media/webrtc/webrtcvoiceengine.h b/talk/media/webrtc/webrtcvoiceengine.h index 38053e907..38c4e1811 100644 --- a/talk/media/webrtc/webrtcvoiceengine.h +++ b/talk/media/webrtc/webrtcvoiceengine.h @@ -47,6 +47,10 @@ #if !defined(LIBPEERCONNECTION_LIB) && \ !defined(LIBPEERCONNECTION_IMPLEMENTATION) +// If you hit this, then you've tried to include this header from outside +// the shared library. An instance of this class must only be created from +// within the library that actually implements it. Otherwise use the +// WebRtcMediaEngine to construct an instance. #error "Bogus include." #endif diff --git a/talk/media/webrtc/webrtcvoiceengine_unittest.cc b/talk/media/webrtc/webrtcvoiceengine_unittest.cc index 3786a8a42..1798d1d65 100644 --- a/talk/media/webrtc/webrtcvoiceengine_unittest.cc +++ b/talk/media/webrtc/webrtcvoiceengine_unittest.cc @@ -3185,22 +3185,3 @@ TEST(WebRtcVoiceEngineTest, CoInitialize) { CoUninitialize(); } #endif - - -#ifdef USE_WEBRTC_DEV_BRANCH -TEST_F(WebRtcVoiceEngineTestFake, ExperimentalNsConfigViaOptions) { - EXPECT_TRUE(SetupEngine()); - - cricket::FakeAudioProcessing* audio_processing = - static_cast( - engine_.voe()->base()->audio_processing()); - - EXPECT_FALSE(audio_processing->experimental_ns_enabled()); - - cricket::AudioOptions options; - options.experimental_ns.Set(true); - EXPECT_TRUE(engine_.SetOptions(options)); - - EXPECT_TRUE(audio_processing->experimental_ns_enabled()); -} -#endif