iReduced the flakiness of the volume tests in linux pulseaudio

Review URL: https://webrtc-codereview.appspot.com/390013

git-svn-id: http://webrtc.googlecode.com/svn/trunk@1774 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
xians@webrtc.org
2012-02-27 17:22:49 +00:00
parent 13e8528f32
commit cf1b6aec30
3 changed files with 124 additions and 113 deletions

View File

@ -784,7 +784,14 @@ WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneBoost(bool& enabled) const
WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecordingIsAvailable(bool& available) WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecordingIsAvailable(bool& available)
{ {
if (_recChannels == 2 && _recording) {
available = true;
return 0;
}
available = false;
bool wasInitialized = _mixerManager.MicrophoneIsInitialized(); bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
int error = 0;
if (!wasInitialized && InitMicrophone() == -1) if (!wasInitialized && InitMicrophone() == -1)
{ {
@ -794,10 +801,11 @@ WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecordingIsAvailable(bool& available)
} }
#ifndef WEBRTC_PA_GTALK #ifndef WEBRTC_PA_GTALK
// Check if the selected microphone can record stereo // Check if the selected microphone can record stereo.
bool isAvailable(false); bool isAvailable(false);
_mixerManager.StereoRecordingIsAvailable(isAvailable); error = _mixerManager.StereoRecordingIsAvailable(isAvailable);
available = isAvailable; if (!error)
available = isAvailable;
#endif #endif
// Close the initialized input mixer // Close the initialized input mixer
@ -806,7 +814,7 @@ WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecordingIsAvailable(bool& available)
_mixerManager.CloseMicrophone(); _mixerManager.CloseMicrophone();
} }
return 0; return error;
} }
WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoRecording(bool enable) WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoRecording(bool enable)
@ -836,20 +844,27 @@ WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecording(bool& enabled) const
WebRtc_Word32 AudioDeviceLinuxPulse::StereoPlayoutIsAvailable(bool& available) WebRtc_Word32 AudioDeviceLinuxPulse::StereoPlayoutIsAvailable(bool& available)
{ {
if (_playChannels == 2 && _playing) {
available = true;
return 0;
}
available = false;
bool wasInitialized = _mixerManager.SpeakerIsInitialized(); bool wasInitialized = _mixerManager.SpeakerIsInitialized();
int error = 0;
if (!wasInitialized && InitSpeaker() == -1) if (!wasInitialized && InitSpeaker() == -1)
{ {
// Cannot open the specified device // Cannot open the specified device.
available = false; return -1;
return 0;
} }
#ifndef WEBRTC_PA_GTALK #ifndef WEBRTC_PA_GTALK
// Check if the selected microphone can record stereo // Check if the selected speaker can play stereo.
bool isAvailable(false); bool isAvailable(false);
_mixerManager.StereoPlayoutIsAvailable(isAvailable); error = _mixerManager.StereoPlayoutIsAvailable(isAvailable);
available = isAvailable; if (!error)
available = isAvailable;
#endif #endif
// Close the initialized input mixer // Close the initialized input mixer
@ -858,7 +873,7 @@ WebRtc_Word32 AudioDeviceLinuxPulse::StereoPlayoutIsAvailable(bool& available)
_mixerManager.CloseSpeaker(); _mixerManager.CloseSpeaker();
} }
return 0; return error;
} }
WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoPlayout(bool enable) WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoPlayout(bool enable)

View File

@ -24,6 +24,8 @@ extern webrtc_adm_linux_pulse::PulseAudioSymbolTable PaSymbolTable;
namespace webrtc namespace webrtc
{ {
enum { kMaxRetryOnFailure = 2 };
AudioMixerManagerLinuxPulse::AudioMixerManagerLinuxPulse(const WebRtc_Word32 id) : AudioMixerManagerLinuxPulse::AudioMixerManagerLinuxPulse(const WebRtc_Word32 id) :
_critSect(*CriticalSectionWrapper::CreateCriticalSection()), _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
_id(id), _id(id),
@ -315,27 +317,8 @@ AudioMixerManagerLinuxPulse::SpeakerVolume(WebRtc_UWord32& volume) const
!= PA_STREAM_UNCONNECTED)) != PA_STREAM_UNCONNECTED))
{ {
// We can only get the volume if we have a connected stream // We can only get the volume if we have a connected stream
pa_operation* paOperation = NULL; if (!GetSinkInputInfo())
ResetCallbackVariables(); return -1;
PaLock();
// Get info for this stream (sink input)
paOperation = LATE(pa_context_get_sink_input_info)(
_paContext,
LATE(pa_stream_get_index)(_paPlayStream),
PaSinkInputInfoCallback,
(void*) this);
WaitForOperationCompletion(paOperation);
PaUnLock();
if (!_callbackValues)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"Error getting output volume: %d",
LATE(pa_context_errno)(_paContext));
return -1;
}
volume = static_cast<WebRtc_UWord32> (_paVolume); volume = static_cast<WebRtc_UWord32> (_paVolume);
ResetCallbackVariables(); ResetCallbackVariables();
@ -513,27 +496,8 @@ WebRtc_Word32 AudioMixerManagerLinuxPulse::SpeakerMute(bool& enabled) const
!= PA_STREAM_UNCONNECTED)) != PA_STREAM_UNCONNECTED))
{ {
// We can only get the mute status if we have a connected stream // We can only get the mute status if we have a connected stream
pa_operation* paOperation = NULL; if (!GetSinkInputInfo())
ResetCallbackVariables(); return -1;
PaLock();
// Get info for this stream (sink input)
paOperation = LATE(pa_context_get_sink_input_info)(
_paContext,
LATE(pa_stream_get_index)(_paPlayStream),
PaSinkInputInfoCallback,
(void*) this);
WaitForOperationCompletion(paOperation);
PaUnLock();
if (!_callbackValues)
{
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"Error getting output volume: %d",
LATE(pa_context_errno)(_paContext));
return -1;
}
enabled = static_cast<bool> (_paMute); enabled = static_cast<bool> (_paMute);
ResetCallbackVariables(); ResetCallbackVariables();
@ -572,33 +536,13 @@ AudioMixerManagerLinuxPulse::StereoPlayoutIsAvailable(bool& available)
deviceIndex = LATE(pa_stream_get_device_index)(_paPlayStream); deviceIndex = LATE(pa_stream_get_device_index)(_paPlayStream);
} }
pa_operation* paOperation = NULL;
ResetCallbackVariables();
// Get info for this sink
// We want to know if the actual device can play out in stereo
paOperation = LATE(pa_context_get_sink_info_by_index)(_paContext,
deviceIndex,
PaSinkInfoCallback,
(void*) this);
WaitForOperationCompletion(paOperation);
PaUnLock(); PaUnLock();
if (!_callbackValues) if (!GetSinkInfoByIndex(deviceIndex))
{ return -1;
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"Error getting number of output channels: %d",
LATE(pa_context_errno)(_paContext));
return -1;
}
available = static_cast<bool> (_paChannels == 2); available = static_cast<bool> (_paChannels == 2);
WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
" AudioMixerManagerLinuxPulse::StereoPlayoutIsAvailable() "
"=> available=%i, available");
// Reset members modified by callback // Reset members modified by callback
ResetCallbackVariables(); ResetCallbackVariables();
@ -762,26 +706,10 @@ WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneMute(bool& enabled) const
deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream); deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
} }
pa_operation* paOperation = NULL;
ResetCallbackVariables();
// Get info for this source
paOperation
= LATE(pa_context_get_source_info_by_index)(_paContext, deviceIndex,
PaSourceInfoCallback,
(void*) this);
WaitForOperationCompletion(paOperation);
PaUnLock(); PaUnLock();
if (!_callbackValues) if (!GetSourceInfoByIndex(deviceIndex))
{ return -1;
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"Error getting input mute status: %d",
LATE(pa_context_errno)(_paContext));
return -1;
}
enabled = static_cast<bool> (_paMute); enabled = static_cast<bool> (_paMute);
@ -997,26 +925,10 @@ AudioMixerManagerLinuxPulse::MicrophoneVolume(WebRtc_UWord32& volume) const
deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream); deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
} }
pa_operation* paOperation = NULL;
ResetCallbackVariables();
// Get info for this source
paOperation
= LATE(pa_context_get_source_info_by_index)(_paContext, deviceIndex,
PaSourceInfoCallback,
(void*) this);
WaitForOperationCompletion(paOperation);
PaUnLock(); PaUnLock();
if (!_callbackValues) if (!GetSourceInfoByIndex(deviceIndex))
{ return -1;
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"Error getting input volume: %d",
LATE(pa_context_errno)(_paContext));
return -1;
}
volume = static_cast<WebRtc_UWord32> (_paVolume); volume = static_cast<WebRtc_UWord32> (_paVolume);
@ -1197,7 +1109,7 @@ void AudioMixerManagerLinuxPulse::PaSinkInputInfoCallbackHandler(
{ {
if (eol) if (eol)
{ {
// Signal that we are done // Signal that we are done.
LATE(pa_threaded_mainloop_signal)(_paMainloop, 0); LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
return; return;
} }
@ -1276,4 +1188,84 @@ void AudioMixerManagerLinuxPulse::PaUnLock() const
LATE(pa_threaded_mainloop_unlock)(_paMainloop); LATE(pa_threaded_mainloop_unlock)(_paMainloop);
} }
bool AudioMixerManagerLinuxPulse::GetSinkInputInfo() const {
pa_operation* paOperation = NULL;
ResetCallbackVariables();
PaLock();
for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
retries ++) {
// Get info for this stream (sink input).
paOperation = LATE(pa_context_get_sink_input_info)(
_paContext,
LATE(pa_stream_get_index)(_paPlayStream),
PaSinkInputInfoCallback,
(void*) this);
WaitForOperationCompletion(paOperation);
}
PaUnLock();
if (!_callbackValues) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"GetSinkInputInfo failed to get volume info : %d",
LATE(pa_context_errno)(_paContext));
return false;
}
return true;
} }
bool AudioMixerManagerLinuxPulse::GetSinkInfoByIndex(
int device_index) const {
pa_operation* paOperation = NULL;
ResetCallbackVariables();
PaLock();
for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
retries ++) {
paOperation = LATE(pa_context_get_sink_info_by_index)(_paContext,
device_index, PaSinkInfoCallback, (void*) this);
WaitForOperationCompletion(paOperation);
}
PaUnLock();
if (!_callbackValues) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"GetSinkInfoByIndex failed to get volume info: %d",
LATE(pa_context_errno)(_paContext));
return false;
}
return true;
}
bool AudioMixerManagerLinuxPulse::GetSourceInfoByIndex(
int device_index) const {
pa_operation* paOperation = NULL;
ResetCallbackVariables();
PaLock();
for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
retries ++) {
paOperation = LATE(pa_context_get_source_info_by_index)(
_paContext, device_index, PaSourceInfoCallback, (void*) this);
WaitForOperationCompletion(paOperation);
}
PaUnLock();
if (!_callbackValues) {
WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
"GetSourceInfoByIndex error: %d",
LATE(pa_context_errno)(_paContext));
return false;
}
return true;
}
}

View File

@ -1,5 +1,5 @@
/* /*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* *
* Use of this source code is governed by a BSD-style license * Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source * that can be found in the LICENSE file in the root of the source
@ -87,6 +87,10 @@ private:
void PaLock() const; void PaLock() const;
void PaUnLock() const; void PaUnLock() const;
bool GetSinkInputInfo() const;
bool GetSinkInfoByIndex(int device_index)const ;
bool GetSourceInfoByIndex(int device_index) const;
private: private:
CriticalSectionWrapper& _critSect; CriticalSectionWrapper& _critSect;
WebRtc_Word32 _id; WebRtc_Word32 _id;