Add a deinterleaved float interface to AudioProcessing.

This is mainly to support the native audio format in Chrome. Although
this implementation just moves the float->int conversion under the hood,
we will transition AudioProcessing towards supporting this format
throughout.

- Add a test which verifies we get identical output with the float and
int interfaces.
- The float and int wrappers are tasked with conversion to the
AudioBuffer format. A new shared Process/Analyze method does most of
the work.
- Add a new field to the debug.proto to hold deinterleaved data.
- Add helpers to audio_utils.cc, and start using numeric_limits.
- Note that there was no performance difference between numeric_limits
and a literal value when measured on Linux using gcc or clang.

BUG=2894
R=aluebs@webrtc.org, bjornv@webrtc.org, henrikg@webrtc.org, tommi@webrtc.org, turaj@webrtc.org, xians@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/9179004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5641 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
andrew@webrtc.org
2014-03-04 20:58:13 +00:00
parent b90991dade
commit 17e40641b3
12 changed files with 660 additions and 250 deletions

View File

@ -10,6 +10,7 @@
#include "webrtc/modules/audio_processing/audio_buffer.h"
#include "webrtc/common_audio/include/audio_util.h"
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
namespace webrtc {
@ -79,11 +80,9 @@ AudioBuffer::AudioBuffer(int max_num_channels,
mixed_channels_(NULL),
mixed_low_pass_channels_(NULL),
low_pass_reference_channels_(NULL) {
if (max_num_channels_ > 1) {
channels_.reset(new AudioChannel[max_num_channels_]);
mixed_channels_.reset(new AudioChannel[max_num_channels_]);
mixed_low_pass_channels_.reset(new AudioChannel[max_num_channels_]);
}
channels_.reset(new AudioChannel[max_num_channels_]);
mixed_channels_.reset(new AudioChannel[max_num_channels_]);
mixed_low_pass_channels_.reset(new AudioChannel[max_num_channels_]);
low_pass_reference_channels_.reset(new AudioChannel[max_num_channels_]);
if (samples_per_channel_ == kSamplesPer32kHzChannel) {
@ -94,6 +93,17 @@ AudioBuffer::AudioBuffer(int max_num_channels,
AudioBuffer::~AudioBuffer() {}
void AudioBuffer::InitForNewData(int num_channels) {
num_channels_ = num_channels;
data_ = NULL;
data_was_mixed_ = false;
num_mixed_channels_ = 0;
num_mixed_low_pass_channels_ = 0;
reference_copied_ = false;
activity_ = AudioFrame::kVadUnknown;
is_muted_ = false;
}
int16_t* AudioBuffer::data(int channel) const {
assert(channel >= 0 && channel < num_channels_);
if (data_ != NULL) {
@ -191,13 +201,8 @@ void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
assert(frame->num_channels_ <= max_num_channels_);
assert(frame->samples_per_channel_ == samples_per_channel_);
num_channels_ = frame->num_channels_;
data_was_mixed_ = false;
num_mixed_channels_ = 0;
num_mixed_low_pass_channels_ = 0;
reference_copied_ = false;
InitForNewData(frame->num_channels_);
activity_ = frame->vad_activity_;
is_muted_ = false;
if (frame->energy_ == 0) {
is_muted_ = true;
}
@ -252,6 +257,26 @@ void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
}
}
void AudioBuffer::CopyFrom(const float* const* data, int samples_per_channel,
int num_channels) {
assert(num_channels <= max_num_channels_);
assert(samples_per_channel == samples_per_channel_);
InitForNewData(num_channels);
for (int i = 0; i < num_channels_; ++i) {
ScaleAndRoundToInt16(data[i], samples_per_channel, channels_[i].data);
}
}
void AudioBuffer::CopyTo(int samples_per_channel, int num_channels,
float* const* data) const {
assert(num_channels == num_channels_);
assert(samples_per_channel == samples_per_channel_);
for (int i = 0; i < num_channels_; ++i) {
ScaleToFloat(channels_[i].data, samples_per_channel, data[i]);
}
}
// TODO(andrew): would be good to support the no-mix case with pointer
// assignment.
// TODO(andrew): handle mixing to multiple channels?