Check the channels in receive-side processing frames.
The number of channels must be set correctly before calling ProcessStream. This was preventing stereo frames from being processed. Also fix voe_cmd_test, which wasn't enabling rx NS properly. BUG=issue713, 7375579 Review URL: https://webrtc-codereview.appspot.com/929013 git-svn-id: http://webrtc.googlecode.com/svn/trunk@3047 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@ -4744,7 +4744,6 @@ Channel::SetRxAgcStatus(const bool enable, const AgcModes mode)
|
|||||||
}
|
}
|
||||||
|
|
||||||
_rxAgcIsEnabled = enable;
|
_rxAgcIsEnabled = enable;
|
||||||
|
|
||||||
_rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
|
_rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
@ -6622,26 +6621,27 @@ Channel::ApmProcessRx(AudioFrame& audioFrame)
|
|||||||
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
|
WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
|
||||||
"Channel::ApmProcessRx()");
|
"Channel::ApmProcessRx()");
|
||||||
|
|
||||||
// Reset the APM frequency if the frequency has changed
|
// Register the (possibly new) frame parameters.
|
||||||
if (_rxAudioProcessingModulePtr->sample_rate_hz() !=
|
if (_rxAudioProcessingModulePtr->set_sample_rate_hz(
|
||||||
audioFrame.sample_rate_hz_)
|
|
||||||
{
|
|
||||||
if (_rxAudioProcessingModulePtr->set_sample_rate_hz(
|
|
||||||
audioFrame.sample_rate_hz_) != 0)
|
audioFrame.sample_rate_hz_) != 0)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
|
||||||
"AudioProcessingModule::set_sample_rate_hz("
|
"AudioProcessingModule::set_sample_rate_hz(%u) => error",
|
||||||
"sample_rate_hz_=%u) => error",
|
audioFrame.sample_rate_hz_);
|
||||||
_audioFrame.sample_rate_hz_);
|
}
|
||||||
}
|
if (_rxAudioProcessingModulePtr->set_num_channels(audioFrame.num_channels_,
|
||||||
|
audioFrame.num_channels_) != 0)
|
||||||
|
{
|
||||||
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
|
||||||
|
"AudioProcessingModule::set_num_channels(%u, %u) => error",
|
||||||
|
audioFrame.num_channels_, audioFrame.num_channels_);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_rxAudioProcessingModulePtr->ProcessStream(&audioFrame) != 0)
|
if (_rxAudioProcessingModulePtr->ProcessStream(&audioFrame) != 0)
|
||||||
{
|
{
|
||||||
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
|
WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
|
||||||
"AudioProcessingModule::ProcessStream() => error");
|
"AudioProcessingModule::ProcessStream() => error");
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -235,10 +235,10 @@ void RunTest(std::string out_path) {
|
|||||||
int codecinput;
|
int codecinput;
|
||||||
bool AEC = false;
|
bool AEC = false;
|
||||||
bool AGC = true;
|
bool AGC = true;
|
||||||
bool AGC1 = false;
|
bool rx_agc = false;
|
||||||
bool VAD = false;
|
bool VAD = false;
|
||||||
bool NS = false;
|
bool NS = false;
|
||||||
bool NS1 = false;
|
bool rx_ns = false;
|
||||||
bool typing_detection = false;
|
bool typing_detection = false;
|
||||||
bool muted = false;
|
bool muted = false;
|
||||||
bool on_hold = false;
|
bool on_hold = false;
|
||||||
@ -677,20 +677,20 @@ void RunTest(std::string out_path) {
|
|||||||
}
|
}
|
||||||
else if (codecinput == (noCodecs + 14)) {
|
else if (codecinput == (noCodecs + 14)) {
|
||||||
// Remote AGC
|
// Remote AGC
|
||||||
AGC1 = !AGC1;
|
rx_agc = !rx_agc;
|
||||||
res = apm->SetRxAgcStatus(chan, AGC1);
|
res = apm->SetRxAgcStatus(chan, rx_agc);
|
||||||
VALIDATE;
|
VALIDATE;
|
||||||
if (AGC1)
|
if (rx_agc)
|
||||||
printf("\n Receive-side AGC is now on! \n");
|
printf("\n Receive-side AGC is now on! \n");
|
||||||
else
|
else
|
||||||
printf("\n Receive-side AGC is now off! \n");
|
printf("\n Receive-side AGC is now off! \n");
|
||||||
}
|
}
|
||||||
else if (codecinput == (noCodecs + 15)) {
|
else if (codecinput == (noCodecs + 15)) {
|
||||||
// Remote NS
|
// Remote NS
|
||||||
NS1 = !NS1;
|
rx_ns = !rx_ns;
|
||||||
res = apm->SetRxNsStatus(chan, NS);
|
res = apm->SetRxNsStatus(chan, rx_ns);
|
||||||
VALIDATE;
|
VALIDATE;
|
||||||
if (NS1)
|
if (rx_ns)
|
||||||
printf("\n Receive-side NS is now on! \n");
|
printf("\n Receive-side NS is now on! \n");
|
||||||
else
|
else
|
||||||
printf("\n Receive-side NS is now off! \n");
|
printf("\n Receive-side NS is now off! \n");
|
||||||
|
@ -356,7 +356,7 @@ int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
|
|||||||
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
|
||||||
"SetRxNsStatus(channel=%d, enable=%d, mode=%d)",
|
"SetRxNsStatus(channel=%d, enable=%d, mode=%d)",
|
||||||
channel, (int)enable, (int)mode);
|
channel, (int)enable, (int)mode);
|
||||||
#ifdef WEBRTC_VOICE_ENGINE_AGC
|
#ifdef WEBRTC_VOICE_ENGINE_NR
|
||||||
if (!_shared->statistics().Initialized()) {
|
if (!_shared->statistics().Initialized()) {
|
||||||
_shared->SetLastError(VE_NOT_INITED, kTraceError);
|
_shared->SetLastError(VE_NOT_INITED, kTraceError);
|
||||||
return -1;
|
return -1;
|
||||||
@ -372,7 +372,7 @@ int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
|
|||||||
return channelPtr->SetRxNsStatus(enable, mode);
|
return channelPtr->SetRxNsStatus(enable, mode);
|
||||||
#else
|
#else
|
||||||
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
|
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
|
||||||
"SetRxNsStatus() AGC is not supported");
|
"SetRxNsStatus() NS is not supported");
|
||||||
return -1;
|
return -1;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
@ -382,7 +382,7 @@ int VoEAudioProcessingImpl::GetRxNsStatus(int channel,
|
|||||||
NsModes& mode) {
|
NsModes& mode) {
|
||||||
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
|
WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
|
||||||
"GetRxNsStatus(channel=%d, enable=?, mode=?)", channel);
|
"GetRxNsStatus(channel=%d, enable=?, mode=?)", channel);
|
||||||
#ifdef WEBRTC_VOICE_ENGINE_AGC
|
#ifdef WEBRTC_VOICE_ENGINE_NR
|
||||||
if (!_shared->statistics().Initialized()) {
|
if (!_shared->statistics().Initialized()) {
|
||||||
_shared->SetLastError(VE_NOT_INITED, kTraceError);
|
_shared->SetLastError(VE_NOT_INITED, kTraceError);
|
||||||
return -1;
|
return -1;
|
||||||
@ -398,7 +398,7 @@ int VoEAudioProcessingImpl::GetRxNsStatus(int channel,
|
|||||||
return channelPtr->GetRxNsStatus(enabled, mode);
|
return channelPtr->GetRxNsStatus(enabled, mode);
|
||||||
#else
|
#else
|
||||||
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
|
_shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
|
||||||
"GetRxNsStatus() Agc is not supported");
|
"GetRxNsStatus() NS is not supported");
|
||||||
return -1;
|
return -1;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user