(Auto)update libjingle 73399579-> 73626167

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6928 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
henrike@webrtc.org 2014-08-19 14:56:59 +00:00
parent d5b292e450
commit 0481f15f02
14 changed files with 221 additions and 172 deletions

View File

@ -119,7 +119,6 @@ const char MediaConstraintsInterface::kHighBitrate[] =
const char MediaConstraintsInterface::kVeryHighBitrate[] =
"googVeryHighBitrate";
const char MediaConstraintsInterface::kPayloadPadding[] = "googPayloadPadding";
const char MediaConstraintsInterface::kOpusFec[] = "googOpusFec";
// Set |value| to the value associated with the first appearance of |key|, or

View File

@ -133,8 +133,6 @@ class MediaConstraintsInterface {
static const char kHighBitrate[]; // googHighBitrate
static const char kVeryHighBitrate[]; // googVeryHighBitrate
static const char kPayloadPadding[]; // googPayloadPadding
// kOpusFec controls whether we ask the other side to turn on FEC for Opus.
static const char kOpusFec[]; // googOpusFec
// The prefix of internal-only constraints whose JS set values should be
// stripped by Chrome before passed down to Libjingle.

View File

@ -239,14 +239,14 @@ bool PeerConnectionFactory::Initialize_s() {
new cricket::DummyDeviceManager());
// TODO: Need to make sure only one VoE is created inside
// WebRtcMediaEngine.
cricket::WebRtcMediaEngine* webrtc_media_engine(
new cricket::WebRtcMediaEngine(default_adm_.get(),
NULL, // No secondary adm.
video_encoder_factory_.get(),
video_decoder_factory_.get()));
cricket::MediaEngineInterface* media_engine(
cricket::WebRtcMediaEngineFactory::Create(default_adm_.get(),
NULL, // No secondary adm.
video_encoder_factory_.get(),
video_decoder_factory_.get()));
channel_manager_.reset(new cricket::ChannelManager(
webrtc_media_engine, device_manager, worker_thread_));
media_engine, device_manager, worker_thread_));
channel_manager_->SetVideoRtxEnabled(true);
if (!channel_manager_->Init()) {
return false;

View File

@ -622,10 +622,6 @@ bool WebRtcSession::Initialize(
cricket::VideoOptions::HIGH);
}
SetOptionFromOptionalConstraint(constraints,
MediaConstraintsInterface::kOpusFec,
&audio_options_.opus_fec);
const cricket::VideoCodec default_codec(
JsepSessionDescription::kDefaultVideoCodecId,
JsepSessionDescription::kDefaultVideoCodecName,

View File

@ -531,6 +531,7 @@
'media/webrtc/webrtcexport.h',
'media/webrtc/webrtcmediaengine.cc',
'media/webrtc/webrtcmediaengine.h',
'media/webrtc/webrtcmediaengine.cc',
'media/webrtc/webrtcpassthroughrender.cc',
'media/webrtc/webrtcpassthroughrender.h',
'media/webrtc/webrtctexturevideoframe.cc',

View File

@ -182,7 +182,6 @@ struct AudioOptions {
recording_sample_rate.SetFrom(change.recording_sample_rate);
playout_sample_rate.SetFrom(change.playout_sample_rate);
dscp.SetFrom(change.dscp);
opus_fec.SetFrom(change.opus_fec);
}
bool operator==(const AudioOptions& o) const {
@ -208,8 +207,7 @@ struct AudioOptions {
rx_agc_limiter == o.rx_agc_limiter &&
recording_sample_rate == o.recording_sample_rate &&
playout_sample_rate == o.playout_sample_rate &&
dscp == o.dscp &&
opus_fec == o.opus_fec;
dscp == o.dscp;
}
std::string ToString() const {
@ -240,7 +238,6 @@ struct AudioOptions {
ost << ToStringIfSet("recording_sample_rate", recording_sample_rate);
ost << ToStringIfSet("playout_sample_rate", playout_sample_rate);
ost << ToStringIfSet("dscp", dscp);
ost << ToStringIfSet("opus_fec", opus_fec);
ost << "}";
return ost.str();
}
@ -278,8 +275,6 @@ struct AudioOptions {
Settable<uint32> playout_sample_rate;
// Set DSCP value for packet sent from audio channel.
Settable<bool> dscp;
// Set Opus FEC
Settable<bool> opus_fec;
};
// Options that can be applied to a VideoMediaChannel or a VideoMediaEngine.

View File

@ -27,90 +27,40 @@
#include "talk/media/base/mediaengine.h"
namespace cricket {
const int MediaEngineInterface::kDefaultAudioDelayOffset = 0;
}
#if !defined(DISABLE_MEDIA_ENGINE_FACTORY)
#if defined(HAVE_LINPHONE)
#include "talk/media/other/linphonemediaengine.h"
#endif // HAVE_LINPHONE
#if defined(HAVE_WEBRTC_VOICE)
#include "talk/media/webrtc/webrtcvoiceengine.h"
#endif // HAVE_WEBRTC_VOICE
#if defined(HAVE_WEBRTC_VIDEO)
#include "talk/media/webrtc/webrtcvideoengine.h"
#endif // HAVE_WEBRTC_VIDEO
#if defined(HAVE_LMI)
#include "talk/media/base/hybridvideoengine.h"
#include "talk/media/lmi/lmimediaengine.h"
#endif // HAVE_LMI
#if defined(HAVE_WEBRTC_VOICE) && defined(HAVE_WEBRTC_VIDEO)
#include "talk/media/webrtc/webrtcmediaengine.h"
#endif // HAVE_WEBRTC_VOICE && HAVE_WEBRTC_VIDEO
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif // HAVE_CONFIG
namespace cricket {
#if defined(HAVE_WEBRTC_VOICE)
#define AUDIO_ENG_NAME WebRtcVoiceEngine
#else
#define AUDIO_ENG_NAME NullVoiceEngine
#endif
#if defined(HAVE_WEBRTC_VIDEO)
#if !defined(HAVE_LMI)
template<>
CompositeMediaEngine<WebRtcVoiceEngine, WebRtcVideoEngine>::
CompositeMediaEngine() {
video_.SetVoiceEngine(&voice_);
}
#define VIDEO_ENG_NAME WebRtcVideoEngine
#else
// If we have both WebRtcVideoEngine and LmiVideoEngine, enable dual-stack.
// This small class here allows us to hook the WebRtcVideoChannel up to
// the capturer owned by the LMI engine, without infecting the rest of the
// HybridVideoEngine classes with this abstraction violation.
class WebRtcLmiHybridVideoEngine
: public HybridVideoEngine<WebRtcVideoEngine, LmiVideoEngine> {
public:
void SetVoiceEngine(WebRtcVoiceEngine* engine) {
video1_.SetVoiceEngine(engine);
}
};
template<>
CompositeMediaEngine<WebRtcVoiceEngine, WebRtcLmiHybridVideoEngine>::
CompositeMediaEngine() {
video_.SetVoiceEngine(&voice_);
}
#define VIDEO_ENG_NAME WebRtcLmiHybridVideoEngine
#endif
#elif defined(HAVE_LMI)
#define VIDEO_ENG_NAME LmiVideoEngine
#else
#define VIDEO_ENG_NAME NullVideoEngine
#endif
MediaEngineFactory::MediaEngineCreateFunction
MediaEngineFactory::create_function_ = NULL;
MediaEngineFactory::MediaEngineCreateFunction
MediaEngineFactory::SetCreateFunction(MediaEngineCreateFunction function) {
MediaEngineCreateFunction old_function = create_function_;
create_function_ = function;
return old_function;
};
}
// TODO(pthatcher): Remove this method and require all the users of
// media engines to choose one explictly, or at least remove the
// default behavior and require calling SetCreateFunction explictly.
MediaEngineInterface* MediaEngineFactory::Create() {
if (create_function_) {
return create_function_();
} else {
#if defined(HAVE_LINPHONE)
return new LinphoneMediaEngine("", "");
#elif defined(AUDIO_ENG_NAME) && defined(VIDEO_ENG_NAME)
return new CompositeMediaEngine<AUDIO_ENG_NAME, VIDEO_ENG_NAME>();
#else
return new NullMediaEngine();
#endif
}
#if defined(HAVE_WEBRTC_VOICE) && defined(HAVE_WEBRTC_VIDEO)
return WebRtcMediaEngineFactory::Create();
#else
return new NullMediaEngine();
#endif // HAVE_WEBRTC_VIDEO && HAVE_WEBRTC_VOICE
}
}; // namespace cricket

View File

@ -26,7 +26,49 @@
*/
#include "talk/media/webrtc/webrtcmediaengine.h"
#include "talk/media/webrtc/webrtcvideoengine.h"
#ifdef WEBRTC_CHROMIUM_BUILD
#include "talk/media/webrtc/webrtcvideoengine2.h"
#endif
#include "talk/media/webrtc/webrtcvoiceengine.h"
#ifdef WEBRTC_CHROMIUM_BUILD
#include "webrtc/system_wrappers/interface/field_trial.h"
#endif
namespace cricket {
class WebRtcMediaEngine :
public CompositeMediaEngine<WebRtcVoiceEngine, WebRtcVideoEngine> {
public:
WebRtcMediaEngine() {}
WebRtcMediaEngine(webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
voice_.SetAudioDeviceModule(adm, adm_sc);
video_.SetVoiceEngine(&voice_);
video_.EnableTimedRender();
video_.SetExternalEncoderFactory(encoder_factory);
video_.SetExternalDecoderFactory(decoder_factory);
}
};
#ifdef WEBRTC_CHROMIUM_BUILD
class WebRtcMediaEngine2 :
public CompositeMediaEngine<WebRtcVoiceEngine, WebRtcVideoEngine2> {
public:
WebRtcMediaEngine2(webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
voice_.SetAudioDeviceModule(adm, adm_sc);
video_.SetVoiceEngine(&voice_);
video_.EnableTimedRender();
}
};
#endif // WEBRTC_CHROMIUM_BUILD
} // namespace cricket
WRME_EXPORT
cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
@ -40,19 +82,154 @@ cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
adm, adm_sc, encoder_factory, decoder_factory);
}
#endif // WEBRTC_CHROMIUM_BUILD
// This is just to get a diff to run pulse.
return new cricket::WebRtcMediaEngine(
adm, adm_sc, encoder_factory, decoder_factory);
}
WRME_EXPORT
void DestroyWebRtcMediaEngine(cricket::MediaEngineInterface* media_engine) {
#ifdef WEBRTC_CHROMIUM_BUILD
if (webrtc::field_trial::FindFullName("WebRTC-NewVideoAPI") == "Enabled") {
delete static_cast<cricket::WebRtcMediaEngine2*>(media_engine);
} else {
#endif // WEBRTC_CHROMIUM_BUILD
delete static_cast<cricket::WebRtcMediaEngine*>(media_engine);
#ifdef WEBRTC_CHROMIUM_BUILD
}
#endif // WEBRTC_CHROMIUM_BUILD
delete media_engine;
}
namespace cricket {
class DelegatingWebRtcMediaEngine : public cricket::MediaEngineInterface {
public:
DelegatingWebRtcMediaEngine(
webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory)
: delegate_(CreateWebRtcMediaEngine(
adm, adm_sc, encoder_factory, decoder_factory)) {
}
virtual ~DelegatingWebRtcMediaEngine() {
DestroyWebRtcMediaEngine(delegate_);
}
virtual bool Init(rtc::Thread* worker_thread) OVERRIDE {
return delegate_->Init(worker_thread);
}
virtual void Terminate() OVERRIDE {
delegate_->Terminate();
}
virtual int GetCapabilities() OVERRIDE {
return delegate_->GetCapabilities();
}
virtual VoiceMediaChannel* CreateChannel() OVERRIDE {
return delegate_->CreateChannel();
}
virtual VideoMediaChannel* CreateVideoChannel(
VoiceMediaChannel* voice_media_channel) OVERRIDE {
return delegate_->CreateVideoChannel(voice_media_channel);
}
virtual SoundclipMedia* CreateSoundclip() OVERRIDE {
return delegate_->CreateSoundclip();
}
virtual AudioOptions GetAudioOptions() const OVERRIDE {
return delegate_->GetAudioOptions();
}
virtual bool SetAudioOptions(const AudioOptions& options) OVERRIDE {
return delegate_->SetAudioOptions(options);
}
virtual bool SetVideoOptions(const VideoOptions& options) OVERRIDE {
return delegate_->SetVideoOptions(options);
}
virtual bool SetAudioDelayOffset(int offset) OVERRIDE {
return delegate_->SetAudioDelayOffset(offset);
}
virtual bool SetDefaultVideoEncoderConfig(
const VideoEncoderConfig& config) OVERRIDE {
return delegate_->SetDefaultVideoEncoderConfig(config);
}
virtual VideoEncoderConfig GetDefaultVideoEncoderConfig() const OVERRIDE {
return delegate_->GetDefaultVideoEncoderConfig();
}
virtual bool SetSoundDevices(
const Device* in_device, const Device* out_device) OVERRIDE {
return delegate_->SetSoundDevices(in_device, out_device);
}
virtual bool GetOutputVolume(int* level) OVERRIDE {
return delegate_->GetOutputVolume(level);
}
virtual bool SetOutputVolume(int level) OVERRIDE {
return delegate_->SetOutputVolume(level);
}
virtual int GetInputLevel() OVERRIDE {
return delegate_->GetInputLevel();
}
virtual bool SetLocalMonitor(bool enable) OVERRIDE {
return delegate_->SetLocalMonitor(enable);
}
virtual bool SetLocalRenderer(VideoRenderer* renderer) OVERRIDE {
return delegate_->SetLocalRenderer(renderer);
}
virtual const std::vector<AudioCodec>& audio_codecs() OVERRIDE {
return delegate_->audio_codecs();
}
virtual const std::vector<RtpHeaderExtension>&
audio_rtp_header_extensions() OVERRIDE {
return delegate_->audio_rtp_header_extensions();
}
virtual const std::vector<VideoCodec>& video_codecs() OVERRIDE {
return delegate_->video_codecs();
}
virtual const std::vector<RtpHeaderExtension>&
video_rtp_header_extensions() OVERRIDE {
return delegate_->video_rtp_header_extensions();
}
virtual void SetVoiceLogging(int min_sev, const char* filter) OVERRIDE {
delegate_->SetVoiceLogging(min_sev, filter);
}
virtual void SetVideoLogging(int min_sev, const char* filter) OVERRIDE {
delegate_->SetVideoLogging(min_sev, filter);
}
virtual bool StartAecDump(rtc::PlatformFile file) OVERRIDE {
return delegate_->StartAecDump(file);
}
virtual bool RegisterVoiceProcessor(
uint32 ssrc, VoiceProcessor* video_processor,
MediaProcessorDirection direction) OVERRIDE {
return delegate_->RegisterVoiceProcessor(ssrc, video_processor, direction);
}
virtual bool UnregisterVoiceProcessor(
uint32 ssrc, VoiceProcessor* video_processor,
MediaProcessorDirection direction) OVERRIDE {
return delegate_->UnregisterVoiceProcessor(ssrc, video_processor,
direction);
}
virtual VideoFormat GetStartCaptureFormat() const OVERRIDE {
return delegate_->GetStartCaptureFormat();
}
virtual sigslot::repeater2<VideoCapturer*, CaptureState>&
SignalVideoCaptureStateChange() {
return delegate_->SignalVideoCaptureStateChange();
}
private:
cricket::MediaEngineInterface* delegate_;
};
// Used by ChannelManager when no media engine is passed in to it
// explicitly (acts as a default).
MediaEngineInterface* WebRtcMediaEngineFactory::Create() {
return new cricket::WebRtcMediaEngine();
}
// Used by PeerConnectionFactory and to create a media engine passed
// into ChannelManager.
MediaEngineInterface* WebRtcMediaEngineFactory::Create(
webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
#if !defined(LIBPEERCONNECTION_LIB) && \
!defined(LIBPEERCONNECTION_IMPLEMENTATION)
return new cricket::DelegatingWebRtcMediaEngine();
#else
return CreateWebRtcMediaEngine(adm, adm_sc, encoder_factory, decoder_factory);
#endif // !defined(LIBPEERCONNECTION_LIB) &&
// !defined(LIBPEERCONNECTION_IMPLEMENTATION)
}
} // namespace cricket

View File

@ -40,7 +40,6 @@ class WebRtcVideoDecoderFactory;
class WebRtcVideoEncoderFactory;
}
#if !defined(LIBPEERCONNECTION_LIB) && \
!defined(LIBPEERCONNECTION_IMPLEMENTATION)
@ -53,11 +52,15 @@ cricket::MediaEngineInterface* CreateWebRtcMediaEngine(
WRME_EXPORT
void DestroyWebRtcMediaEngine(cricket::MediaEngineInterface* media_engine);
#endif // !defined(LIBPEERCONNECTION_LIB) &&
// !defined(LIBPEERCONNECTION_IMPLEMENTATION)
namespace cricket {
class WebRtcMediaEngine : public cricket::MediaEngineInterface {
class WebRtcMediaEngineFactory {
public:
WebRtcMediaEngine(
static MediaEngineInterface* Create();
static MediaEngineInterface* Create(
webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* adm_sc,
cricket::WebRtcVideoEncoderFactory* encoder_factory,
@ -189,35 +192,9 @@ class WebRtcMediaEngine : public WebRtcCompositeMediaEngine {
WebRtcMediaEngine(webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
voice_.SetAudioDeviceModule(adm, adm_sc);
video_.SetVoiceEngine(&voice_);
video_.EnableTimedRender();
video_.SetExternalEncoderFactory(encoder_factory);
video_.SetExternalDecoderFactory(decoder_factory);
}
WebRtcVideoDecoderFactory* decoder_factory);
};
#ifdef WEBRTC_CHROMIUM_BUILD
typedef CompositeMediaEngine<WebRtcVoiceEngine, WebRtcVideoEngine2>
WebRtcCompositeMediaEngine2;
class WebRtcMediaEngine2 : public WebRtcCompositeMediaEngine2 {
public:
WebRtcMediaEngine2(webrtc::AudioDeviceModule* adm,
webrtc::AudioDeviceModule* adm_sc,
WebRtcVideoEncoderFactory* encoder_factory,
WebRtcVideoDecoderFactory* decoder_factory) {
voice_.SetAudioDeviceModule(adm, adm_sc);
video_.SetVoiceEngine(&voice_);
video_.EnableTimedRender();
}
};
#endif
} // namespace cricket
#endif // !defined(LIBPEERCONNECTION_LIB) &&
// !defined(LIBPEERCONNECTION_IMPLEMENTATION)
#endif // TALK_MEDIA_WEBRTCMEDIAENGINE_H_

View File

@ -237,7 +237,6 @@ static AudioOptions GetDefaultEngineOptions() {
options.experimental_aec.Set(false);
options.experimental_ns.Set(false);
options.aec_dump.Set(false);
options.opus_fec.Set(false);
return options;
}
@ -430,15 +429,6 @@ static bool IsOpusFecEnabled(const AudioCodec& codec) {
return codec.GetParam(kCodecParamUseInbandFec, &value) && value == 1;
}
// Set params[kCodecParamUseInbandFec]. Caller should make sure codec is Opus.
static void SetOpusFec(AudioCodec* codec, bool opus_fec) {
if (opus_fec) {
codec->SetParam(kCodecParamUseInbandFec, 1);
} else {
codec->RemoveParam(kCodecParamUseInbandFec);
}
}
void WebRtcVoiceEngine::ConstructCodecs() {
LOG(LS_INFO) << "WebRtc VoiceEngine codecs:";
int ncodecs = voe_wrapper_->codec()->NumOfCodecs();
@ -483,7 +473,6 @@ void WebRtcVoiceEngine::ConstructCodecs() {
}
// TODO(hellner): Add ptime, sprop-stereo, stereo and useinbandfec
// when they can be set to values other than the default.
SetOpusFec(&codec, false);
}
codecs_.push_back(codec);
} else {
@ -916,16 +905,6 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) {
}
}
bool opus_fec;
if (options.opus_fec.Get(&opus_fec)) {
LOG(LS_INFO) << "Opus FEC is enabled? " << opus_fec;
for (std::vector<AudioCodec>::iterator it = codecs_.begin();
it != codecs_.end(); ++it) {
if (IsOpus(*it))
SetOpusFec(&(*it), opus_fec);
}
}
return true;
}

View File

@ -1230,31 +1230,6 @@ TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecIsacWithParamNoFec) {
}
#endif // USE_WEBRTC_DEV_BRANCH
// Test AudioOptions controls whether opus FEC is supported in codec list.
TEST_F(WebRtcVoiceEngineTestFake, OpusFecViaOptions) {
EXPECT_TRUE(SetupEngine());
std::vector<cricket::AudioCodec> codecs = engine_.codecs();
int value;
for (std::vector<cricket::AudioCodec>::const_iterator it = codecs.begin();
it != codecs.end(); ++it) {
if (_stricmp(it->name.c_str(), cricket::kOpusCodecName) == 0) {
EXPECT_FALSE(it->GetParam(cricket::kCodecParamUseInbandFec, &value));
}
}
cricket::AudioOptions options;
options.opus_fec.Set(true);
EXPECT_TRUE(engine_.SetOptions(options));
codecs = engine_.codecs();
for (std::vector<cricket::AudioCodec>::const_iterator it = codecs.begin();
it != codecs.end(); ++it) {
if (_stricmp(it->name.c_str(), cricket::kOpusCodecName) == 0) {
EXPECT_TRUE(it->GetParam(cricket::kCodecParamUseInbandFec, &value));
EXPECT_EQ(1, value);
}
}
}
// Test that we can apply CELT with stereo mode but fail with mono mode.
TEST_F(WebRtcVoiceEngineTestFake, SetSendCodecsCelt) {
EXPECT_TRUE(SetupEngine());

View File

@ -120,7 +120,7 @@ void ChannelManager::Construct(MediaEngineInterface* me,
audio_options_ = media_engine_->GetAudioOptions();
audio_in_device_ = DeviceManagerInterface::kDefaultDeviceName;
audio_out_device_ = DeviceManagerInterface::kDefaultDeviceName;
audio_delay_offset_ = MediaEngineInterface::kDefaultAudioDelayOffset;
audio_delay_offset_ = kDefaultAudioDelayOffset;
audio_output_volume_ = kNotSetOutputVolume;
local_renderer_ = NULL;
capturing_ = false;

View File

@ -42,6 +42,8 @@
namespace cricket {
const int kDefaultAudioDelayOffset = 0;
class Soundclip;
class VideoProcessor;
class VoiceChannel;

View File

@ -267,7 +267,7 @@ TEST_F(ChannelManagerTest, SetAudioOptionsBeforeInit) {
EXPECT_EQ(options, set_options);
// At this point, the media engine should also be initialized.
EXPECT_EQ(options, fme_->audio_options());
EXPECT_EQ(cricket::MediaEngineInterface::kDefaultAudioDelayOffset,
EXPECT_EQ(cricket::kDefaultAudioDelayOffset,
fme_->audio_delay_offset());
}
@ -294,7 +294,7 @@ TEST_F(ChannelManagerTest, SetAudioOptions) {
fme_->audio_in_device());
EXPECT_EQ(std::string(cricket::DeviceManagerInterface::kDefaultDeviceName),
fme_->audio_out_device());
EXPECT_EQ(cricket::MediaEngineInterface::kDefaultAudioDelayOffset,
EXPECT_EQ(cricket::kDefaultAudioDelayOffset,
fme_->audio_delay_offset());
// Test setting specific values.
AudioOptions options;
@ -306,7 +306,7 @@ TEST_F(ChannelManagerTest, SetAudioOptions) {
EXPECT_TRUE(
fme_->audio_options().auto_gain_control.Get(&auto_gain_control));
EXPECT_TRUE(auto_gain_control);
EXPECT_EQ(cricket::MediaEngineInterface::kDefaultAudioDelayOffset,
EXPECT_EQ(cricket::kDefaultAudioDelayOffset,
fme_->audio_delay_offset());
// Test setting bad values.
EXPECT_FALSE(cm_->SetAudioOptions("audio-in9", "audio-out2", options));