662 lines
20 KiB
C++
662 lines
20 KiB
C++
/*
|
|
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
|
*
|
|
* Use of this source code is governed by a BSD-style license
|
|
* that can be found in the LICENSE file in the root of the source
|
|
* tree. An additional intellectual property rights grant can be found
|
|
* in the file PATENTS. All contributing project authors may
|
|
* be found in the AUTHORS file in the root of the source tree.
|
|
*/
|
|
|
|
#include "voe_volume_control_impl.h"
|
|
|
|
#include "channel.h"
|
|
#include "critical_section_wrapper.h"
|
|
#include "output_mixer.h"
|
|
#include "trace.h"
|
|
#include "transmit_mixer.h"
|
|
#include "voe_errors.h"
|
|
#include "voice_engine_impl.h"
|
|
|
|
namespace webrtc {
|
|
|
|
VoEVolumeControl* VoEVolumeControl::GetInterface(VoiceEngine* voiceEngine)
|
|
{
|
|
#ifndef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
|
|
return NULL;
|
|
#else
|
|
if (NULL == voiceEngine)
|
|
{
|
|
return NULL;
|
|
}
|
|
VoiceEngineImpl* s =
|
|
reinterpret_cast<VoiceEngineImpl*> (voiceEngine);
|
|
VoEVolumeControlImpl* d = s;
|
|
(*d)++;
|
|
return (d);
|
|
#endif
|
|
}
|
|
|
|
#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
|
|
|
|
VoEVolumeControlImpl::VoEVolumeControlImpl()
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
|
|
"VoEVolumeControlImpl::VoEVolumeControlImpl() - ctor");
|
|
}
|
|
|
|
VoEVolumeControlImpl::~VoEVolumeControlImpl()
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
|
|
"VoEVolumeControlImpl::~VoEVolumeControlImpl() - dtor");
|
|
}
|
|
|
|
int VoEVolumeControlImpl::Release()
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"VoEVolumeControl::Release()");
|
|
(*this)--;
|
|
int refCount = GetCount();
|
|
if (refCount < 0)
|
|
{
|
|
Reset(); // reset reference counter to zero => OK to delete VE
|
|
_engineStatistics.SetLastError(
|
|
VE_INTERFACE_NOT_FOUND, kTraceWarning);
|
|
return (-1);
|
|
}
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"VoEVolumeControl reference counter = %d", refCount);
|
|
return (refCount);
|
|
}
|
|
|
|
int VoEVolumeControlImpl::SetSpeakerVolume(unsigned int volume)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"SetSpeakerVolume(volume=%u)", volume);
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
if (volume > kMaxVolumeLevel)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
"SetSpeakerVolume() invalid argument");
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_UWord32 maxVol(0);
|
|
WebRtc_UWord32 spkrVol(0);
|
|
|
|
// scale: [0,kMaxVolumeLevel] -> [0,MaxSpeakerVolume]
|
|
if (_audioDevicePtr->MaxSpeakerVolume(&maxVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_MIC_VOL_ERROR, kTraceError,
|
|
"SetSpeakerVolume() failed to get max volume");
|
|
return -1;
|
|
}
|
|
// round the value and avoid floating computation
|
|
spkrVol = (WebRtc_UWord32)((volume * maxVol +
|
|
(int)(kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
|
|
|
|
// set the actual volume using the audio mixer
|
|
if (_audioDevicePtr->SetSpeakerVolume(spkrVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_MIC_VOL_ERROR, kTraceError,
|
|
"SetSpeakerVolume() failed to set speaker volume");
|
|
return -1;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetSpeakerVolume(unsigned int& volume)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeakerVolume()");
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_UWord32 spkrVol(0);
|
|
WebRtc_UWord32 maxVol(0);
|
|
|
|
if (_audioDevicePtr->SpeakerVolume(&spkrVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"GetSpeakerVolume() unable to get speaker volume");
|
|
return -1;
|
|
}
|
|
|
|
// scale: [0, MaxSpeakerVolume] -> [0, kMaxVolumeLevel]
|
|
if (_audioDevicePtr->MaxSpeakerVolume(&maxVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"GetSpeakerVolume() unable to get max speaker volume");
|
|
return -1;
|
|
}
|
|
// round the value and avoid floating computation
|
|
volume = (WebRtc_UWord32) ((spkrVol * kMaxVolumeLevel +
|
|
(int)(maxVol / 2)) / (maxVol));
|
|
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeakerVolume() => volume=%d", volume);
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::SetSystemOutputMute(bool enable)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSystemOutputMute(enabled=%d)", enable);
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
if (_audioDevicePtr->SetSpeakerMute(enable) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"SpeakerMute() unable to Set speaker mute");
|
|
return -1;
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetSystemOutputMute(bool& enabled)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSystemOutputMute(enabled=?)");
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
if (_audioDevicePtr->SpeakerMute(&enabled) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"SpeakerMute() unable to get speaker mute state");
|
|
return -1;
|
|
}
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSystemOutputMute() => %d", enabled);
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::SetMicVolume(unsigned int volume)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"SetMicVolume(volume=%u)", volume);
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
if (volume > kMaxVolumeLevel)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
"SetMicVolume() invalid argument");
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_UWord32 maxVol(0);
|
|
WebRtc_UWord32 micVol(0);
|
|
|
|
// scale: [0, kMaxVolumeLevel] -> [0,MaxMicrophoneVolume]
|
|
if (_audioDevicePtr->MaxMicrophoneVolume(&maxVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_MIC_VOL_ERROR, kTraceError,
|
|
"SetMicVolume() failed to get max volume");
|
|
return -1;
|
|
}
|
|
// round the value and avoid floating point computation
|
|
micVol = (WebRtc_UWord32) ((volume * maxVol +
|
|
(int)(kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
|
|
|
|
// set the actual volume using the audio mixer
|
|
if (_audioDevicePtr->SetMicrophoneVolume(micVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_MIC_VOL_ERROR, kTraceError,
|
|
"SetMicVolume() failed to set mic volume");
|
|
return -1;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetMicVolume(unsigned int& volume)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetMicVolume()");
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
WebRtc_UWord32 micVol(0);
|
|
WebRtc_UWord32 maxVol(0);
|
|
|
|
if (_audioDevicePtr->MicrophoneVolume(&micVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"GetMicVolume() unable to get microphone volume");
|
|
return -1;
|
|
}
|
|
|
|
// scale: [0, MaxMicrophoneVolume] -> [0, kMaxVolumeLevel]
|
|
if (_audioDevicePtr->MaxMicrophoneVolume(&maxVol) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"GetMicVolume() unable to get max microphone volume");
|
|
return -1;
|
|
}
|
|
// round the value and avoid floating point calculation
|
|
volume = (WebRtc_UWord32) ((micVol * kMaxVolumeLevel +
|
|
(int)(maxVol / 2)) / (maxVol));
|
|
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetMicVolume() => volume=%d", volume);
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::SetInputMute(int channel, bool enable)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"SetInputMute(channel=%d, enable=%d)", channel, enable);
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
if (channel == -1)
|
|
{
|
|
// Mute before demultiplexing <=> affects all channels
|
|
return _transmitMixerPtr->SetMute(enable);
|
|
}
|
|
else
|
|
{
|
|
// Mute after demultiplexing <=> affects one channel only
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"SetInputMute() failed to locate channel");
|
|
return -1;
|
|
}
|
|
return channelPtr->SetMute(enable);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetInputMute(int channel, bool& enabled)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetInputMute(channel=%d)", channel);
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
if (channel == -1)
|
|
{
|
|
enabled = _transmitMixerPtr->Mute();
|
|
}
|
|
else
|
|
{
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"SetInputMute() failed to locate channel");
|
|
return -1;
|
|
}
|
|
enabled = channelPtr->Mute();
|
|
}
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetInputMute() => enabled = %d", (int)enabled);
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::SetSystemInputMute(bool enable)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"SetSystemInputMute(enabled=%d)", enable);
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
if (_audioDevicePtr->SetMicrophoneMute(enable) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"MicrophoneMute() unable to set microphone mute state");
|
|
return -1;
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetSystemInputMute(bool& enabled)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSystemInputMute(enabled=?)");
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
if (_audioDevicePtr->MicrophoneMute(&enabled) != 0)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_GET_MIC_VOL_ERROR, kTraceError,
|
|
"MicrophoneMute() unable to get microphone mute state");
|
|
return -1;
|
|
}
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSystemInputMute() => %d", enabled);
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetSpeechInputLevel(unsigned int& level)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeechInputLevel()");
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
WebRtc_Word8 currentLevel = _transmitMixerPtr->AudioLevel();
|
|
level = static_cast<unsigned int> (currentLevel);
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeechInputLevel() => %d", level);
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetSpeechOutputLevel(int channel,
|
|
unsigned int& level)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeechOutputLevel(channel=%d, level=?)", channel);
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
if (channel == -1)
|
|
{
|
|
return _outputMixerPtr->GetSpeechOutputLevel((WebRtc_UWord32&)level);
|
|
}
|
|
else
|
|
{
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"GetSpeechOutputLevel() failed to locate channel");
|
|
return -1;
|
|
}
|
|
channelPtr->GetSpeechOutputLevel((WebRtc_UWord32&)level);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetSpeechInputLevelFullRange(unsigned int& level)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeechInputLevelFullRange(level=?)");
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
WebRtc_Word16 currentLevel = _transmitMixerPtr->AudioLevelFullRange();
|
|
level = static_cast<unsigned int> (currentLevel);
|
|
WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeechInputLevelFullRange() => %d", level);
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetSpeechOutputLevelFullRange(int channel,
|
|
unsigned int& level)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetSpeechOutputLevelFullRange(channel=%d, level=?)", channel);
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
if (channel == -1)
|
|
{
|
|
return _outputMixerPtr->GetSpeechOutputLevelFullRange(
|
|
(WebRtc_UWord32&)level);
|
|
}
|
|
else
|
|
{
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"GetSpeechOutputLevelFullRange() failed to locate channel");
|
|
return -1;
|
|
}
|
|
channelPtr->GetSpeechOutputLevelFullRange((WebRtc_UWord32&)level);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::SetChannelOutputVolumeScaling(int channel,
|
|
float scaling)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"SetChannelOutputVolumeScaling(channel=%d, scaling=%3.2f)",
|
|
channel, scaling);
|
|
IPHONE_NOT_SUPPORTED();
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
if (scaling < kMinOutputVolumeScaling ||
|
|
scaling > kMaxOutputVolumeScaling)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
"SetChannelOutputVolumeScaling() invalid parameter");
|
|
return -1;
|
|
}
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"SetChannelOutputVolumeScaling() failed to locate channel");
|
|
return -1;
|
|
}
|
|
return channelPtr->SetChannelOutputVolumeScaling(scaling);
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetChannelOutputVolumeScaling(int channel,
|
|
float& scaling)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetChannelOutputVolumeScaling(channel=%d, scaling=?)", channel);
|
|
IPHONE_NOT_SUPPORTED();
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"GetChannelOutputVolumeScaling() failed to locate channel");
|
|
return -1;
|
|
}
|
|
return channelPtr->GetChannelOutputVolumeScaling(scaling);
|
|
}
|
|
|
|
int VoEVolumeControlImpl::SetOutputVolumePan(int channel,
|
|
float left,
|
|
float right)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)",
|
|
channel, left, right);
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
bool available(false);
|
|
_audioDevicePtr->StereoPlayoutIsAvailable(&available);
|
|
if (!available)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_FUNC_NO_STEREO, kTraceError,
|
|
"SetOutputVolumePan() stereo playout not supported");
|
|
return -1;
|
|
}
|
|
if ((left < kMinOutputVolumePanning) ||
|
|
(left > kMaxOutputVolumePanning) ||
|
|
(right < kMinOutputVolumePanning) ||
|
|
(right > kMaxOutputVolumePanning))
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_INVALID_ARGUMENT, kTraceError,
|
|
"SetOutputVolumePan() invalid parameter");
|
|
return -1;
|
|
}
|
|
|
|
if (channel == -1)
|
|
{
|
|
// Master balance (affectes the signal after output mixing)
|
|
return _outputMixerPtr->SetOutputVolumePan(left, right);
|
|
}
|
|
else
|
|
{
|
|
// Per-channel balance (affects the signal before output mixing)
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"SetOutputVolumePan() failed to locate channel");
|
|
return -1;
|
|
}
|
|
return channelPtr->SetOutputVolumePan(left, right);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int VoEVolumeControlImpl::GetOutputVolumePan(int channel,
|
|
float& left,
|
|
float& right)
|
|
{
|
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_instanceId,-1),
|
|
"GetOutputVolumePan(channel=%d, left=?, right=?)", channel);
|
|
ANDROID_NOT_SUPPORTED();
|
|
IPHONE_NOT_SUPPORTED();
|
|
|
|
if (!_engineStatistics.Initialized())
|
|
{
|
|
_engineStatistics.SetLastError(VE_NOT_INITED, kTraceError);
|
|
return -1;
|
|
}
|
|
|
|
bool available(false);
|
|
_audioDevicePtr->StereoPlayoutIsAvailable(&available);
|
|
if (!available)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_FUNC_NO_STEREO, kTraceError,
|
|
"GetOutputVolumePan() stereo playout not supported");
|
|
return -1;
|
|
}
|
|
|
|
if (channel == -1)
|
|
{
|
|
return _outputMixerPtr->GetOutputVolumePan(left, right);
|
|
}
|
|
else
|
|
{
|
|
voe::ScopedChannel sc(_channelManager, channel);
|
|
voe::Channel* channelPtr = sc.ChannelPtr();
|
|
if (channelPtr == NULL)
|
|
{
|
|
_engineStatistics.SetLastError(
|
|
VE_CHANNEL_NOT_VALID, kTraceError,
|
|
"GetOutputVolumePan() failed to locate channel");
|
|
return -1;
|
|
}
|
|
return channelPtr->GetOutputVolumePan(left, right);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
#endif // #ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
|
|
|
|
} // namespace webrtc
|