ACM/NetEq: Restructure how post-decode VAD is enabled

This change avoids calling neteq_->EnableVad() and DisableVad from the
AcmReceiver constructor. Instead, the new member
enable_post_decode_vad is added to NetEq's config struct. It is
disabled by defualt, but ACM sets it to enabled. This preserves the
behavior both of NetEq stand-alone (i.e., in tests) and of ACM.

BUG=webrtc:3520

Review URL: https://codereview.webrtc.org/1425133002

Cr-Commit-Position: refs/heads/master@{#10476}
This commit is contained in:
henrik.lundin
2015-11-02 03:25:57 -08:00
committed by Commit bot
parent d56d68cd27
commit 9bc2667fa6
5 changed files with 14 additions and 11 deletions

View File

@ -81,6 +81,7 @@ class NetEq {
Config()
: sample_rate_hz(16000),
enable_audio_classifier(false),
enable_post_decode_vad(false),
max_packets_in_buffer(50),
// |max_delay_ms| has the same effect as calling SetMaximumDelay().
max_delay_ms(2000),
@ -92,6 +93,7 @@ class NetEq {
int sample_rate_hz; // Initial value. Will change with input data.
bool enable_audio_classifier;
bool enable_post_decode_vad;
size_t max_packets_in_buffer;
int max_delay_ms;
BackgroundNoiseMode background_noise_mode;

View File

@ -32,6 +32,8 @@ std::string NetEq::Config::ToString() const {
std::stringstream ss;
ss << "sample_rate_hz=" << sample_rate_hz << ", enable_audio_classifier="
<< (enable_audio_classifier ? "true" : "false")
<< ", enable_post_decode_vad="
<< (enable_post_decode_vad ? "true" : "false")
<< ", max_packets_in_buffer=" << max_packets_in_buffer
<< ", background_noise_mode=" << background_noise_mode
<< ", playout_mode=" << playout_mode

View File

@ -112,6 +112,10 @@ NetEqImpl::NetEqImpl(const NetEq::Config& config,
if (create_components) {
SetSampleRateAndChannels(fs, 1); // Default is 1 channel.
}
RTC_DCHECK(!vad_->enabled());
if (config.enable_post_decode_vad) {
vad_->Enable();
}
}
NetEqImpl::~NetEqImpl() = default;