Resolves TSan v2 reports data races in voe_auto_test.
--- Note that I will add more fixes to this CL --- BUG=1590 Review URL: https://webrtc-codereview.appspot.com/1286005 git-svn-id: http://webrtc.googlecode.com/svn/trunk@3770 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
parent
10eb92039b
commit
19da719a5f
webrtc
modules
audio_device
audio_processing
rtp_rtcp/source
system_wrappers/source
voice_engine
@ -524,20 +524,29 @@ WebRtc_Word32 AudioDeviceBuffer::DeliverRecordedData()
|
||||
|
||||
WebRtc_Word32 AudioDeviceBuffer::RequestPlayoutData(WebRtc_UWord32 nSamples)
|
||||
{
|
||||
WebRtc_UWord32 playSampleRate = 0;
|
||||
WebRtc_UWord8 playBytesPerSample = 0;
|
||||
WebRtc_UWord8 playChannels = 0;
|
||||
{
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
|
||||
// Store copies under lock and use copies hereafter to avoid race with
|
||||
// setter methods.
|
||||
playSampleRate = _playSampleRate;
|
||||
playBytesPerSample = _playBytesPerSample;
|
||||
playChannels = _playChannels;
|
||||
|
||||
// Ensure that user has initialized all essential members
|
||||
if ((_playBytesPerSample == 0) ||
|
||||
(_playChannels == 0) ||
|
||||
(_playSampleRate == 0))
|
||||
if ((playBytesPerSample == 0) ||
|
||||
(playChannels == 0) ||
|
||||
(playSampleRate == 0))
|
||||
{
|
||||
assert(false);
|
||||
return -1;
|
||||
}
|
||||
|
||||
_playSamples = nSamples;
|
||||
_playSize = _playBytesPerSample * nSamples; // {2,4}*nSamples
|
||||
_playSize = playBytesPerSample * nSamples; // {2,4}*nSamples
|
||||
if (_playSize > kMaxBufferSizeBytes)
|
||||
{
|
||||
assert(false);
|
||||
@ -566,9 +575,9 @@ WebRtc_Word32 AudioDeviceBuffer::RequestPlayoutData(WebRtc_UWord32 nSamples)
|
||||
WebRtc_UWord32 res(0);
|
||||
|
||||
res = _ptrCbAudioTransport->NeedMorePlayData(_playSamples,
|
||||
_playBytesPerSample,
|
||||
_playChannels,
|
||||
_playSampleRate,
|
||||
playBytesPerSample,
|
||||
playChannels,
|
||||
playSampleRate,
|
||||
&_playBuffer[0],
|
||||
nSamplesOut);
|
||||
if (res != 0)
|
||||
|
@ -1512,21 +1512,27 @@ WebRtc_Word32 AudioDeviceLinuxPulse::StartRecording()
|
||||
_timeEventRec.Set();
|
||||
if (kEventTimeout == _recStartEvent.Wait(10000))
|
||||
{
|
||||
_startRec = false;
|
||||
{
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
_startRec = false;
|
||||
}
|
||||
StopRecording();
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
" failed to activate recording");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (_recording)
|
||||
{
|
||||
// the recording state is set by the audio thread after recording has started
|
||||
} else
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
" failed to activate recording");
|
||||
return -1;
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
if (_recording)
|
||||
{
|
||||
// the recording state is set by the audio thread after recording has started
|
||||
} else
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
" failed to activate recording");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
@ -1602,6 +1608,7 @@ bool AudioDeviceLinuxPulse::RecordingIsInitialized() const
|
||||
|
||||
bool AudioDeviceLinuxPulse::Recording() const
|
||||
{
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
return (_recording);
|
||||
}
|
||||
|
||||
@ -1612,7 +1619,6 @@ bool AudioDeviceLinuxPulse::PlayoutIsInitialized() const
|
||||
|
||||
WebRtc_Word32 AudioDeviceLinuxPulse::StartPlayout()
|
||||
{
|
||||
|
||||
if (!_playIsInitialized)
|
||||
{
|
||||
return -1;
|
||||
@ -1626,25 +1632,34 @@ WebRtc_Word32 AudioDeviceLinuxPulse::StartPlayout()
|
||||
// set state to ensure that playout starts from the audio thread
|
||||
_startPlay = true;
|
||||
|
||||
// Both |_startPlay| and |_playing| needs protction since they are also
|
||||
// accessed on the playout thread.
|
||||
|
||||
// the audio thread will signal when playout has started
|
||||
_timeEventPlay.Set();
|
||||
if (kEventTimeout == _playStartEvent.Wait(10000))
|
||||
{
|
||||
_startPlay = false;
|
||||
{
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
_startPlay = false;
|
||||
}
|
||||
StopPlayout();
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
" failed to activate playout");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (_playing)
|
||||
{
|
||||
// the playing state is set by the audio thread after playout has started
|
||||
} else
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
" failed to activate playing");
|
||||
return -1;
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
if (_playing)
|
||||
{
|
||||
// the playing state is set by the audio thread after playout has started
|
||||
} else
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
|
||||
" failed to activate playing");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
@ -1717,18 +1732,21 @@ WebRtc_Word32 AudioDeviceLinuxPulse::StopPlayout()
|
||||
|
||||
WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutDelay(WebRtc_UWord16& delayMS) const
|
||||
{
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
delayMS = (WebRtc_UWord16) _sndCardPlayDelay;
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 AudioDeviceLinuxPulse::RecordingDelay(WebRtc_UWord16& delayMS) const
|
||||
{
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
delayMS = (WebRtc_UWord16) _sndCardRecDelay;
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool AudioDeviceLinuxPulse::Playing() const
|
||||
{
|
||||
CriticalSectionScoped lock(&_critSect);
|
||||
return (_playing);
|
||||
}
|
||||
|
||||
|
@ -209,6 +209,7 @@ int AudioProcessingImpl::set_sample_rate_hz(int rate) {
|
||||
}
|
||||
|
||||
int AudioProcessingImpl::sample_rate_hz() const {
|
||||
CriticalSectionScoped crit_scoped(crit_);
|
||||
return sample_rate_hz_;
|
||||
}
|
||||
|
||||
|
@ -347,6 +347,7 @@ void RTCPSender::SetStartTimestamp(uint32_t start_timestamp) {
|
||||
|
||||
void RTCPSender::SetLastRtpTime(uint32_t rtp_timestamp,
|
||||
int64_t capture_time_ms) {
|
||||
CriticalSectionScoped lock(_criticalSectionRTCPSender);
|
||||
last_rtp_timestamp_ = rtp_timestamp;
|
||||
if (capture_time_ms < 0) {
|
||||
// We don't currently get a capture time from VoiceEngine.
|
||||
@ -631,13 +632,18 @@ RTCPSender::BuildSR(WebRtc_UWord8* rtcpbuffer,
|
||||
if(_audio) {
|
||||
freqHz = _rtpRtcp.CurrentSendFrequencyHz();
|
||||
}
|
||||
|
||||
// The timestamp of this RTCP packet should be estimated as the timestamp of
|
||||
// the frame being captured at this moment. We are calculating that
|
||||
// timestamp as the last frame's timestamp + the time since the last frame
|
||||
// was captured.
|
||||
RTPtime = start_timestamp_ + last_rtp_timestamp_ + (
|
||||
_clock->TimeInMilliseconds() - last_frame_capture_time_ms_) *
|
||||
(freqHz / 1000);
|
||||
{
|
||||
// Needs protection since this method is called on the process thread.
|
||||
CriticalSectionScoped lock(_criticalSectionRTCPSender);
|
||||
RTPtime = start_timestamp_ + last_rtp_timestamp_ + (
|
||||
_clock->TimeInMilliseconds() - last_frame_capture_time_ms_) *
|
||||
(freqHz / 1000);
|
||||
}
|
||||
|
||||
// Add sender data
|
||||
// Save for our length field
|
||||
|
@ -309,19 +309,31 @@ WebRtc_Word32 ModuleRtpRtcpImpl::Process() {
|
||||
}
|
||||
|
||||
void ModuleRtpRtcpImpl::ProcessDeadOrAliveTimer() {
|
||||
if (dead_or_alive_active_) {
|
||||
const WebRtc_Word64 now = clock_->TimeInMilliseconds();
|
||||
if (now > dead_or_alive_timeout_ms_ + dead_or_alive_last_timer_) {
|
||||
// RTCP is alive if we have received a report the last 12 seconds.
|
||||
dead_or_alive_last_timer_ += dead_or_alive_timeout_ms_;
|
||||
|
||||
bool RTCPalive = false;
|
||||
if (rtcp_receiver_.LastReceived() + 12000 > now) {
|
||||
RTCPalive = true;
|
||||
bool RTCPalive = false;
|
||||
WebRtc_Word64 now = 0;
|
||||
bool do_callback = false;
|
||||
|
||||
// Do operations on members under lock but avoid making the
|
||||
// ProcessDeadOrAlive() callback under the same lock.
|
||||
{
|
||||
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
|
||||
if (dead_or_alive_active_) {
|
||||
now = clock_->TimeInMilliseconds();
|
||||
if (now > dead_or_alive_timeout_ms_ + dead_or_alive_last_timer_) {
|
||||
// RTCP is alive if we have received a report the last 12 seconds.
|
||||
dead_or_alive_last_timer_ += dead_or_alive_timeout_ms_;
|
||||
|
||||
if (rtcp_receiver_.LastReceived() + 12000 > now)
|
||||
RTCPalive = true;
|
||||
|
||||
do_callback = true;
|
||||
}
|
||||
rtp_receiver_->ProcessDeadOrAlive(RTCPalive, now);
|
||||
}
|
||||
}
|
||||
|
||||
if (do_callback)
|
||||
rtp_receiver_->ProcessDeadOrAlive(RTCPalive, now);
|
||||
}
|
||||
|
||||
WebRtc_Word32 ModuleRtpRtcpImpl::SetPeriodicDeadOrAliveStatus(
|
||||
@ -342,10 +354,13 @@ WebRtc_Word32 ModuleRtpRtcpImpl::SetPeriodicDeadOrAliveStatus(
|
||||
if (sample_time_seconds == 0) {
|
||||
return -1;
|
||||
}
|
||||
dead_or_alive_active_ = enable;
|
||||
dead_or_alive_timeout_ms_ = sample_time_seconds * 1000;
|
||||
// Trigger the first after one period.
|
||||
dead_or_alive_last_timer_ = clock_->TimeInMilliseconds();
|
||||
{
|
||||
CriticalSectionScoped lock(critical_section_module_ptrs_.get());
|
||||
dead_or_alive_active_ = enable;
|
||||
dead_or_alive_timeout_ms_ = sample_time_seconds * 1000;
|
||||
// Trigger the first after one period.
|
||||
dead_or_alive_last_timer_ = clock_->TimeInMilliseconds();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -38,13 +38,15 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
TracePosix::TracePosix() {
|
||||
TracePosix::TracePosix()
|
||||
: crit_sect_(*CriticalSectionWrapper::CreateCriticalSection()) {
|
||||
struct timeval system_time_high_res;
|
||||
gettimeofday(&system_time_high_res, 0);
|
||||
prev_api_tick_count_ = prev_tick_count_ = system_time_high_res.tv_sec;
|
||||
}
|
||||
|
||||
TracePosix::~TracePosix() {
|
||||
delete &crit_sect_;
|
||||
StopThread();
|
||||
}
|
||||
|
||||
@ -60,13 +62,17 @@ WebRtc_Word32 TracePosix::AddTime(char* trace_message,
|
||||
|
||||
const WebRtc_UWord32 ms_time = system_time_high_res.tv_usec / 1000;
|
||||
WebRtc_UWord32 prev_tickCount = 0;
|
||||
if (level == kTraceApiCall) {
|
||||
prev_tickCount = prev_tick_count_;
|
||||
prev_tick_count_ = ms_time;
|
||||
} else {
|
||||
prev_tickCount = prev_api_tick_count_;
|
||||
prev_api_tick_count_ = ms_time;
|
||||
{
|
||||
CriticalSectionScoped lock(&crit_sect_);
|
||||
if (level == kTraceApiCall) {
|
||||
prev_tickCount = prev_tick_count_;
|
||||
prev_tick_count_ = ms_time;
|
||||
} else {
|
||||
prev_tickCount = prev_api_tick_count_;
|
||||
prev_api_tick_count_ = ms_time;
|
||||
}
|
||||
}
|
||||
|
||||
WebRtc_UWord32 dw_delta_time = ms_time - prev_tickCount;
|
||||
if (prev_tickCount == 0) {
|
||||
dw_delta_time = 0;
|
||||
|
@ -21,6 +21,8 @@ class TracePosix : public TraceImpl {
|
||||
TracePosix();
|
||||
virtual ~TracePosix();
|
||||
|
||||
// This method can be called on several different threads different from
|
||||
// the creating thread.
|
||||
virtual WebRtc_Word32 AddTime(char* trace_message,
|
||||
const TraceLevel level) const;
|
||||
|
||||
@ -30,6 +32,8 @@ class TracePosix : public TraceImpl {
|
||||
private:
|
||||
volatile mutable WebRtc_UWord32 prev_api_tick_count_;
|
||||
volatile mutable WebRtc_UWord32 prev_tick_count_;
|
||||
|
||||
CriticalSectionWrapper& crit_sect_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -542,8 +542,11 @@ Channel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
|
||||
WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
|
||||
"Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
|
||||
|
||||
if (!_connectionObserver)
|
||||
return;
|
||||
{
|
||||
CriticalSectionScoped cs(&_callbackCritSect);
|
||||
if (!_connectionObserver)
|
||||
return;
|
||||
}
|
||||
|
||||
WebRtc_Word32 channel = VoEChannelId(id);
|
||||
assert(channel == _channelId);
|
||||
|
Loading…
Reference in New Issue
Block a user