This is a change in the iOS audio device to use VoiceProcessingIO API instead of RemoteIO. This way we don't need to use WebRTC EC and NS because it happens on the device hardware.

Review URL: https://webrtc-codereview.appspot.com/1061007
Patch from Gil Osher <gil.osher@vonage.com>.

git-svn-id: http://webrtc.googlecode.com/svn/trunk@3437 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
andrew@webrtc.org
2013-01-30 21:18:31 +00:00
parent 7ded92b71e
commit 73a702c979
3 changed files with 103 additions and 99 deletions

View File

@@ -4,6 +4,7 @@
Google Inc. Google Inc.
Mozilla Foundation Mozilla Foundation
Intel Corporation Intel Corporation
Vonage Holdings Corp.
Ben Strong <bstrong@gmail.com> Ben Strong <bstrong@gmail.com>
Petar Jovanovic <petarj@mips.com> Petar Jovanovic <petarj@mips.com>
Martin Storsjo <martin@martin.st> Martin Storsjo <martin@martin.st>

View File

@@ -23,7 +23,7 @@ AudioDeviceIPhone::AudioDeviceIPhone(const WebRtc_Word32 id)
_captureWorkerThread(NULL), _captureWorkerThread(NULL),
_captureWorkerThreadId(0), _captureWorkerThreadId(0),
_id(id), _id(id),
_auRemoteIO(NULL), _auVoiceProcessing(NULL),
_initialized(false), _initialized(false),
_isShutDown(false), _isShutDown(false),
_recording(false), _recording(false),
@@ -127,17 +127,6 @@ WebRtc_Word32 AudioDeviceIPhone::Init() {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
_id, "Thread already created"); _id, "Thread already created");
} }
// Set preferred hardware sample rate to 16 kHz
Float64 sampleRate(16000.0);
OSStatus result = AudioSessionSetProperty(
kAudioSessionProperty_PreferredHardwareSampleRate,
sizeof(sampleRate), &sampleRate);
if (0 != result) {
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
"Could not set preferred sample rate (result=%d)", result);
}
_playWarning = 0; _playWarning = 0;
_playError = 0; _playError = 0;
_recWarning = 0; _recWarning = 0;
@@ -168,7 +157,7 @@ WebRtc_Word32 AudioDeviceIPhone::Terminate() {
_captureWorkerThread = NULL; _captureWorkerThread = NULL;
} }
// Shut down AU Remote IO // Shut down Audio Unit
ShutdownPlayOrRecord(); ShutdownPlayOrRecord();
_isShutDown = true; _isShutDown = true;
@@ -941,13 +930,13 @@ WebRtc_Word32 AudioDeviceIPhone::StartRecording() {
_recError = 0; _recError = 0;
if (!_playing) { if (!_playing) {
// Start AU Remote IO // Start Audio Unit
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
" Starting AU Remote IO"); " Starting Audio Unit");
OSStatus result = AudioOutputUnitStart(_auRemoteIO); OSStatus result = AudioOutputUnitStart(_auVoiceProcessing);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
" Error starting AU Remote IO (result=%d)", result); " Error starting Audio Unit (result=%d)", result);
return -1; return -1;
} }
} }
@@ -991,7 +980,7 @@ WebRtc_Word32 AudioDeviceIPhone::StartPlayout() {
// This lock is (among other things) needed to avoid concurrency issues // This lock is (among other things) needed to avoid concurrency issues
// with capture thread // with capture thread
// shutting down AU Remote IO // shutting down Audio Unit
CriticalSectionScoped lock(&_critSect); CriticalSectionScoped lock(&_critSect);
if (!_playIsInitialized) { if (!_playIsInitialized) {
@@ -1016,13 +1005,13 @@ WebRtc_Word32 AudioDeviceIPhone::StartPlayout() {
_playError = 0; _playError = 0;
if (!_recording) { if (!_recording) {
// Start AU Remote IO // Start Audio Unit
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
" Starting AU Remote IO"); " Starting Audio Unit");
OSStatus result = AudioOutputUnitStart(_auRemoteIO); OSStatus result = AudioOutputUnitStart(_auVoiceProcessing);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
" Error starting AU Remote IO (result=%d)", result); " Error starting Audio Unit (result=%d)", result);
return -1; return -1;
} }
} }
@@ -1196,7 +1185,7 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
OSStatus result = -1; OSStatus result = -1;
// Check if already initialized // Check if already initialized
if (NULL != _auRemoteIO) { if (NULL != _auVoiceProcessing) {
// We already have initialized before and created any of the audio unit, // We already have initialized before and created any of the audio unit,
// check that all exist // check that all exist
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
@@ -1205,12 +1194,12 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
return 0; return 0;
} }
// Create AU Remote IO // Create Voice Processing Audio Unit
AudioComponentDescription desc; AudioComponentDescription desc;
AudioComponent comp; AudioComponent comp;
desc.componentType = kAudioUnitType_Output; desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO; desc.componentSubType = kAudioUnitSubType_VoiceProcessingIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple; desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0; desc.componentFlags = 0;
desc.componentFlagsMask = 0; desc.componentFlagsMask = 0;
@@ -1218,23 +1207,38 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
comp = AudioComponentFindNext(NULL, &desc); comp = AudioComponentFindNext(NULL, &desc);
if (NULL == comp) { if (NULL == comp) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not find audio component for AU Remote IO"); " Could not find audio component for Audio Unit");
return -1; return -1;
} }
result = AudioComponentInstanceNew(comp, &_auRemoteIO); result = AudioComponentInstanceNew(comp, &_auVoiceProcessing);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not create AU Remote IO instance (result=%d)", " Could not create Audio Unit instance (result=%d)",
result); result);
return -1; return -1;
} }
////////////////////// // Set preferred hardware sample rate to 16 kHz
// Setup AU remote IO Float64 sampleRate(16000.0);
result = AudioSessionSetProperty(
kAudioSessionProperty_PreferredHardwareSampleRate,
sizeof(sampleRate), &sampleRate);
if (0 != result) {
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
"Could not set preferred sample rate (result=%d)", result);
}
// Note: For AU Remote IO element 0 is output bus, element 1 is input bus WebRtc_UWord32 voiceChat = kAudioSessionMode_VoiceChat;
// for global scope element is irrelevant (always use element 0) AudioSessionSetProperty(kAudioSessionProperty_Mode,
sizeof(voiceChat), &voiceChat);
//////////////////////
// Setup Voice Processing Audio Unit
// Note: For Signal Processing AU element 0 is output bus, element 1 is
// input bus for global scope element is irrelevant (always use
// element 0)
// Enable IO on both elements // Enable IO on both elements
@@ -1243,7 +1247,7 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
// todo: Log info about setup. // todo: Log info about setup.
UInt32 enableIO = 1; UInt32 enableIO = 1;
result = AudioUnitSetProperty(_auRemoteIO, result = AudioUnitSetProperty(_auVoiceProcessing,
kAudioOutputUnitProperty_EnableIO, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input, kAudioUnitScope_Input,
1, // input bus 1, // input bus
@@ -1254,7 +1258,7 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
" Could not enable IO on input (result=%d)", result); " Could not enable IO on input (result=%d)", result);
} }
result = AudioUnitSetProperty(_auRemoteIO, result = AudioUnitSetProperty(_auVoiceProcessing,
kAudioOutputUnitProperty_EnableIO, kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output, kAudioUnitScope_Output,
0, // output bus 0, // output bus
@@ -1268,7 +1272,7 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
// Disable AU buffer allocation for the recorder, we allocate our own // Disable AU buffer allocation for the recorder, we allocate our own
UInt32 flag = 0; UInt32 flag = 0;
result = AudioUnitSetProperty( result = AudioUnitSetProperty(
_auRemoteIO, kAudioUnitProperty_ShouldAllocateBuffer, _auVoiceProcessing, kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output, 1, &flag, sizeof(flag)); kAudioUnitScope_Output, 1, &flag, sizeof(flag));
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
@@ -1277,28 +1281,53 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
// Should work anyway // Should work anyway
} }
// Initialize here already to be able to get/set stream properties. // Set recording callback
result = AudioUnitInitialize(_auRemoteIO); AURenderCallbackStruct auCbS;
memset(&auCbS, 0, sizeof(auCbS));
auCbS.inputProc = RecordProcess;
auCbS.inputProcRefCon = this;
result = AudioUnitSetProperty(_auVoiceProcessing,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global, 1,
&auCbS, sizeof(auCbS));
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not init AU Remote IO (result=%d)", result); " Could not set record callback for Audio Unit (result=%d)",
result);
}
// Set playout callback
memset(&auCbS, 0, sizeof(auCbS));
auCbS.inputProc = PlayoutProcess;
auCbS.inputProcRefCon = this;
result = AudioUnitSetProperty(_auVoiceProcessing,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global, 0,
&auCbS, sizeof(auCbS));
if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not set play callback for Audio Unit (result=%d)",
result);
} }
// Get stream format for out/0 // Get stream format for out/0
AudioStreamBasicDescription playoutDesc; AudioStreamBasicDescription playoutDesc;
UInt32 size = sizeof(playoutDesc); UInt32 size = sizeof(playoutDesc);
result = AudioUnitGetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, result = AudioUnitGetProperty(_auVoiceProcessing,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, 0, &playoutDesc, kAudioUnitScope_Output, 0, &playoutDesc,
&size); &size);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not get stream format AU Remote IO out/0 (result=%d)", " Could not get stream format Audio Unit out/0 (result=%d)",
result); result);
} }
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
" AU Remote IO playout opened in sampling rate %f", " Audio Unit playout opened in sampling rate %f",
playoutDesc.mSampleRate); playoutDesc.mSampleRate);
playoutDesc.mSampleRate = sampleRate;
// Store the sampling frequency to use towards the Audio Device Buffer // Store the sampling frequency to use towards the Audio Device Buffer
// todo: Add 48 kHz (increase buffer sizes). Other fs? // todo: Add 48 kHz (increase buffer sizes). Other fs?
if ((playoutDesc.mSampleRate > 44090.0) if ((playoutDesc.mSampleRate > 44090.0)
@@ -1313,7 +1342,7 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
} else { } else {
_adbSampFreq = 0; _adbSampFreq = 0;
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" AU Remote IO out/0 opened in unknown sampling rate (%f)", " Audio Unit out/0 opened in unknown sampling rate (%f)",
playoutDesc.mSampleRate); playoutDesc.mSampleRate);
// todo: We should bail out here. // todo: We should bail out here.
} }
@@ -1341,37 +1370,32 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
playoutDesc.mBytesPerFrame = 2; playoutDesc.mBytesPerFrame = 2;
playoutDesc.mChannelsPerFrame = 1; playoutDesc.mChannelsPerFrame = 1;
playoutDesc.mBitsPerChannel = 16; playoutDesc.mBitsPerChannel = 16;
result = AudioUnitSetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, result = AudioUnitSetProperty(_auVoiceProcessing,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input, 0, &playoutDesc, size); kAudioUnitScope_Input, 0, &playoutDesc, size);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not set stream format AU Remote IO in/0 (result=%d)", " Could not set stream format Audio Unit in/0 (result=%d)",
result); result);
} }
// Get stream format for in/1 // Get stream format for in/1
AudioStreamBasicDescription recordingDesc; AudioStreamBasicDescription recordingDesc;
size = sizeof(recordingDesc); size = sizeof(recordingDesc);
result = AudioUnitGetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, result = AudioUnitGetProperty(_auVoiceProcessing,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input, 1, &recordingDesc, kAudioUnitScope_Input, 1, &recordingDesc,
&size); &size);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not get stream format AU Remote IO in/1 (result=%d)", " Could not get stream format Audio Unit in/1 (result=%d)",
result); result);
} }
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
" AU Remote IO recording opened in sampling rate %f", " Audio Unit recording opened in sampling rate %f",
recordingDesc.mSampleRate); recordingDesc.mSampleRate);
if (static_cast<int>(playoutDesc.mSampleRate) recordingDesc.mSampleRate = sampleRate;
!= static_cast<int>(recordingDesc.mSampleRate)) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" AU Remote IO recording and playout opened " \
"in different sampling rates");
// todo: Bail out if rec and play sampling rates are not the same?
// Add handling of different sampling rates?
}
// Set stream format for out/1 (use same sampling frequency as for in/1) // Set stream format for out/1 (use same sampling frequency as for in/1)
recordingDesc.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger recordingDesc.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
@@ -1383,54 +1407,36 @@ WebRtc_Word32 AudioDeviceIPhone::InitPlayOrRecord() {
recordingDesc.mBytesPerFrame = 2; recordingDesc.mBytesPerFrame = 2;
recordingDesc.mChannelsPerFrame = 1; recordingDesc.mChannelsPerFrame = 1;
recordingDesc.mBitsPerChannel = 16; recordingDesc.mBitsPerChannel = 16;
result = AudioUnitSetProperty(_auRemoteIO, kAudioUnitProperty_StreamFormat, result = AudioUnitSetProperty(_auVoiceProcessing,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output, 1, &recordingDesc, kAudioUnitScope_Output, 1, &recordingDesc,
size); size);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not set stream format AU Remote IO out/1 (result=%d)", " Could not set stream format Audio Unit out/1 (result=%d)",
result); result);
} }
// Set recording callback // Initialize here already to be able to get/set stream properties.
AURenderCallbackStruct auCbS; result = AudioUnitInitialize(_auVoiceProcessing);
memset(&auCbS, 0, sizeof(auCbS));
auCbS.inputProc = RecordProcess;
auCbS.inputProcRefCon = this;
result = AudioUnitSetProperty(_auRemoteIO,
kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1,
&auCbS, sizeof(auCbS));
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not set record callback for AU Remote IO (result=%d)", " Could not init Audio Unit (result=%d)", result);
result);
}
// Set playout callback
memset(&auCbS, 0, sizeof(auCbS));
auCbS.inputProc = PlayoutProcess;
auCbS.inputProcRefCon = this;
result = AudioUnitSetProperty(_auRemoteIO,
kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Global, 0,
&auCbS, sizeof(auCbS));
if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
" Could not set play callback for AU Remote IO (result=%d)",
result);
} }
// Get hardware sample rate for logging (see if we get what we asked for) // Get hardware sample rate for logging (see if we get what we asked for)
Float64 sampleRate(0.0); Float64 hardwareSampleRate = 0.0;
size = sizeof(sampleRate); size = sizeof(hardwareSampleRate);
result = AudioSessionGetProperty( result = AudioSessionGetProperty(
kAudioSessionProperty_CurrentHardwareSampleRate, &size, &sampleRate); kAudioSessionProperty_CurrentHardwareSampleRate, &size,
&hardwareSampleRate);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
" Could not get current HW sample rate (result=%d)", result); " Could not get current HW sample rate (result=%d)", result);
} }
WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
" Current HW sample rate is %f, ADB sample rate is %d", " Current HW sample rate is %f, ADB sample rate is %d",
sampleRate, _adbSampFreq); hardwareSampleRate, _adbSampFreq);
return 0; return 0;
} }
@@ -1440,23 +1446,18 @@ WebRtc_Word32 AudioDeviceIPhone::ShutdownPlayOrRecord() {
// Close and delete AU // Close and delete AU
OSStatus result = -1; OSStatus result = -1;
if (NULL != _auRemoteIO) { if (NULL != _auVoiceProcessing) {
result = AudioOutputUnitStop(_auRemoteIO); result = AudioOutputUnitStop(_auVoiceProcessing);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
" Error stopping AU Remote IO (result=%d)", result); " Error stopping Audio Unit (result=%d)", result);
} }
result = AudioUnitUninitialize(_auRemoteIO); result = AudioComponentInstanceDispose(_auVoiceProcessing);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
" Error uninitializing AU Remote IO (result=%d)", result); " Error disposing Audio Unit (result=%d)", result);
} }
result = AudioComponentInstanceDispose(_auRemoteIO); _auVoiceProcessing = NULL;
if (0 != result) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
" Error disposing AU Remote IO (result=%d)", result);
}
_auRemoteIO = NULL;
} }
return 0; return 0;
@@ -1502,7 +1503,8 @@ OSStatus
abList.mBuffers[0].mNumberChannels = 1; abList.mBuffers[0].mNumberChannels = 1;
// Get data from mic // Get data from mic
OSStatus res = AudioUnitRender(_auRemoteIO, ioActionFlags, inTimeStamp, OSStatus res = AudioUnitRender(_auVoiceProcessing,
ioActionFlags, inTimeStamp,
inBusNumber, inNumberFrames, &abList); inBusNumber, inNumberFrames, &abList);
if (res != 0) { if (res != 0) {
WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
@@ -1740,7 +1742,7 @@ void AudioDeviceIPhone::UpdatePlayoutDelay() {
// AU latency // AU latency
Float64 f64(0); Float64 f64(0);
size = sizeof(f64); size = sizeof(f64);
result = AudioUnitGetProperty(_auRemoteIO, result = AudioUnitGetProperty(_auVoiceProcessing,
kAudioUnitProperty_Latency, kAudioUnitScope_Global, 0, &f64, &size); kAudioUnitProperty_Latency, kAudioUnitScope_Global, 0, &f64, &size);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
@@ -1790,7 +1792,8 @@ void AudioDeviceIPhone::UpdateRecordingDelay() {
// AU latency // AU latency
Float64 f64(0); Float64 f64(0);
size = sizeof(f64); size = sizeof(f64);
result = AudioUnitGetProperty(_auRemoteIO, kAudioUnitProperty_Latency, result = AudioUnitGetProperty(_auVoiceProcessing,
kAudioUnitProperty_Latency,
kAudioUnitScope_Global, 0, &f64, &size); kAudioUnitScope_Global, 0, &f64, &size);
if (0 != result) { if (0 != result) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,

View File

@@ -227,7 +227,7 @@ private:
WebRtc_Word32 _id; WebRtc_Word32 _id;
AudioUnit _auRemoteIO; AudioUnit _auVoiceProcessing;
private: private:
bool _initialized; bool _initialized;