(Auto)update libjingle 69634309-> 69640360
git-svn-id: http://webrtc.googlecode.com/svn/trunk@6512 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@@ -27,9 +27,6 @@
|
|||||||
#ifndef TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
|
#ifndef TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
|
||||||
#define TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
|
#define TALK_MEDIA_WEBRTC_WEBRTCEXPORT_H_
|
||||||
|
|
||||||
// When building for Chrome a part of the code can be built into
|
|
||||||
// a shared library, which is controlled by these macros.
|
|
||||||
// For all other builds, we always build a static library.
|
|
||||||
#if !defined(GOOGLE_CHROME_BUILD) && !defined(CHROMIUM_BUILD)
|
#if !defined(GOOGLE_CHROME_BUILD) && !defined(CHROMIUM_BUILD)
|
||||||
#define LIBPEERCONNECTION_LIB 1
|
#define LIBPEERCONNECTION_LIB 1
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -1002,9 +1002,6 @@ bool WebRtcVoiceEngine::SetDevices(const Device* in_device,
|
|||||||
LOG_RTCERR2(SetRecordingDevice, in_name, in_id);
|
LOG_RTCERR2(SetRecordingDevice, in_name, in_id);
|
||||||
ret = false;
|
ret = false;
|
||||||
}
|
}
|
||||||
webrtc::AudioProcessing* ap = voe()->base()->audio_processing();
|
|
||||||
if (ap)
|
|
||||||
ap->Initialize();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find the playout device id in VoiceEngine and set playout device.
|
// Find the playout device id in VoiceEngine and set playout device.
|
||||||
@@ -3131,23 +3128,6 @@ bool WebRtcVoiceMediaChannel::MuteStream(uint32 ssrc, bool muted) {
|
|||||||
LOG_RTCERR2(SetInputMute, channel, muted);
|
LOG_RTCERR2(SetInputMute, channel, muted);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
// We set the AGC to mute state only when all the channels are muted.
|
|
||||||
// This implementation is not ideal, instead we should signal the AGC when
|
|
||||||
// the mic channel is muted/unmuted. We can't do it today because there
|
|
||||||
// is no good way to know which stream is mapping to the mic channel.
|
|
||||||
bool all_muted = muted;
|
|
||||||
for (ChannelMap::const_iterator iter = send_channels_.begin();
|
|
||||||
iter != send_channels_.end() && all_muted; ++iter) {
|
|
||||||
if (engine()->voe()->volume()->GetInputMute(iter->second->channel(),
|
|
||||||
all_muted)) {
|
|
||||||
LOG_RTCERR1(GetInputMute, iter->second->channel());
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
webrtc::AudioProcessing* ap = engine()->voe()->base()->audio_processing();
|
|
||||||
if (ap)
|
|
||||||
ap->set_output_will_be_muted(all_muted);
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -47,10 +47,6 @@
|
|||||||
|
|
||||||
#if !defined(LIBPEERCONNECTION_LIB) && \
|
#if !defined(LIBPEERCONNECTION_LIB) && \
|
||||||
!defined(LIBPEERCONNECTION_IMPLEMENTATION)
|
!defined(LIBPEERCONNECTION_IMPLEMENTATION)
|
||||||
// If you hit this, then you've tried to include this header from outside
|
|
||||||
// the shared library. An instance of this class must only be created from
|
|
||||||
// within the library that actually implements it. Otherwise use the
|
|
||||||
// WebRtcMediaEngine to construct an instance.
|
|
||||||
#error "Bogus include."
|
#error "Bogus include."
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|||||||
@@ -3176,3 +3176,4 @@ TEST(WebRtcVoiceEngineTest, CoInitialize) {
|
|||||||
CoUninitialize();
|
CoUninitialize();
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user