Rename _t struct types in audio_processing.

_t names are reserved in POSIX.

R=bjornv@webrtc.org
BUG=162

Review URL: https://webrtc-codereview.appspot.com/34509005

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7943 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
pbos@webrtc.org
2014-12-18 09:11:33 +00:00
parent cab1291745
commit e468bc9e60
25 changed files with 347 additions and 366 deletions

View File

@ -33,7 +33,7 @@ typedef struct {
int skewData[kEstimateLengthFrames];
int skewDataIndex;
float skewEstimate;
} resampler_t;
} AecResampler;
static int EstimateSkew(const int* rawSkew,
int size,
@ -41,7 +41,7 @@ static int EstimateSkew(const int* rawSkew,
float* skewEst);
int WebRtcAec_CreateResampler(void** resampInst) {
resampler_t* obj = malloc(sizeof(resampler_t));
AecResampler* obj = malloc(sizeof(AecResampler));
*resampInst = obj;
if (obj == NULL) {
return -1;
@ -51,7 +51,7 @@ int WebRtcAec_CreateResampler(void** resampInst) {
}
int WebRtcAec_InitResampler(void* resampInst, int deviceSampleRateHz) {
resampler_t* obj = (resampler_t*)resampInst;
AecResampler* obj = (AecResampler*)resampInst;
memset(obj->buffer, 0, sizeof(obj->buffer));
obj->position = 0.0;
@ -64,7 +64,7 @@ int WebRtcAec_InitResampler(void* resampInst, int deviceSampleRateHz) {
}
int WebRtcAec_FreeResampler(void* resampInst) {
resampler_t* obj = (resampler_t*)resampInst;
AecResampler* obj = (AecResampler*)resampInst;
free(obj);
return 0;
@ -76,7 +76,7 @@ void WebRtcAec_ResampleLinear(void* resampInst,
float skew,
float* outspeech,
int* size_out) {
resampler_t* obj = (resampler_t*)resampInst;
AecResampler* obj = (AecResampler*)resampInst;
float* y;
float be, tnew;
@ -123,7 +123,7 @@ void WebRtcAec_ResampleLinear(void* resampInst,
}
int WebRtcAec_GetSkew(void* resampInst, int rawSkew, float* skewEst) {
resampler_t* obj = (resampler_t*)resampInst;
AecResampler* obj = (AecResampler*)resampInst;
int err = 0;
if (obj->skewDataIndex < kEstimateLengthFrames) {

View File

@ -101,9 +101,9 @@ int webrtc_aec_instance_count = 0;
// Estimates delay to set the position of the far-end buffer read pointer
// (controlled by knownDelay)
static void EstBufDelayNormal(aecpc_t* aecInst);
static void EstBufDelayExtended(aecpc_t* aecInst);
static int ProcessNormal(aecpc_t* self,
static void EstBufDelayNormal(Aec* aecInst);
static void EstBufDelayExtended(Aec* aecInst);
static int ProcessNormal(Aec* self,
const float* near,
const float* near_high,
float* out,
@ -111,7 +111,7 @@ static int ProcessNormal(aecpc_t* self,
int16_t num_samples,
int16_t reported_delay_ms,
int32_t skew);
static void ProcessExtended(aecpc_t* self,
static void ProcessExtended(Aec* self,
const float* near,
const float* near_high,
float* out,
@ -121,12 +121,12 @@ static void ProcessExtended(aecpc_t* self,
int32_t skew);
int32_t WebRtcAec_Create(void** aecInst) {
aecpc_t* aecpc;
Aec* aecpc;
if (aecInst == NULL) {
return -1;
}
aecpc = malloc(sizeof(aecpc_t));
aecpc = malloc(sizeof(Aec));
*aecInst = aecpc;
if (aecpc == NULL) {
return -1;
@ -174,7 +174,7 @@ int32_t WebRtcAec_Create(void** aecInst) {
}
int32_t WebRtcAec_Free(void* aecInst) {
aecpc_t* aecpc = aecInst;
Aec* aecpc = aecInst;
if (aecpc == NULL) {
return -1;
@ -196,7 +196,7 @@ int32_t WebRtcAec_Free(void* aecInst) {
}
int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
aecpc_t* aecpc = aecInst;
Aec* aecpc = aecInst;
AecConfig aecConfig;
if (sampFreq != 8000 && sampFreq != 16000 && sampFreq != 32000) {
@ -280,7 +280,7 @@ int32_t WebRtcAec_Init(void* aecInst, int32_t sampFreq, int32_t scSampFreq) {
int32_t WebRtcAec_BufferFarend(void* aecInst,
const float* farend,
int16_t nrOfSamples) {
aecpc_t* aecpc = aecInst;
Aec* aecpc = aecInst;
int newNrOfSamples = (int)nrOfSamples;
float new_farend[MAX_RESAMP_LEN];
const float* farend_ptr = farend;
@ -348,7 +348,7 @@ int32_t WebRtcAec_Process(void* aecInst,
int16_t nrOfSamples,
int16_t msInSndCardBuf,
int32_t skew) {
aecpc_t* aecpc = aecInst;
Aec* aecpc = aecInst;
int32_t retVal = 0;
if (nearend == NULL) {
aecpc->lastError = AEC_NULL_POINTER_ERROR;
@ -418,7 +418,7 @@ int32_t WebRtcAec_Process(void* aecInst,
}
int WebRtcAec_set_config(void* handle, AecConfig config) {
aecpc_t* self = (aecpc_t*)handle;
Aec* self = (Aec*)handle;
if (self->initFlag != initCheck) {
self->lastError = AEC_UNINITIALIZED_ERROR;
return -1;
@ -453,7 +453,7 @@ int WebRtcAec_set_config(void* handle, AecConfig config) {
}
int WebRtcAec_get_echo_status(void* handle, int* status) {
aecpc_t* self = (aecpc_t*)handle;
Aec* self = (Aec*)handle;
if (status == NULL) {
self->lastError = AEC_NULL_POINTER_ERROR;
return -1;
@ -472,7 +472,7 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) {
const float kUpWeight = 0.7f;
float dtmp;
int stmp;
aecpc_t* self = (aecpc_t*)handle;
Aec* self = (Aec*)handle;
Stats erl;
Stats erle;
Stats a_nlp;
@ -566,7 +566,7 @@ int WebRtcAec_GetMetrics(void* handle, AecMetrics* metrics) {
}
int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) {
aecpc_t* self = handle;
Aec* self = handle;
if (median == NULL) {
self->lastError = AEC_NULL_POINTER_ERROR;
return -1;
@ -589,7 +589,7 @@ int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std) {
}
int32_t WebRtcAec_get_error_code(void* aecInst) {
aecpc_t* aecpc = aecInst;
Aec* aecpc = aecInst;
return aecpc->lastError;
}
@ -597,10 +597,10 @@ AecCore* WebRtcAec_aec_core(void* handle) {
if (!handle) {
return NULL;
}
return ((aecpc_t*)handle)->aec;
return ((Aec*)handle)->aec;
}
static int ProcessNormal(aecpc_t* aecpc,
static int ProcessNormal(Aec* aecpc,
const float* nearend,
const float* nearendH,
float* out,
@ -757,7 +757,7 @@ static int ProcessNormal(aecpc_t* aecpc,
return retVal;
}
static void ProcessExtended(aecpc_t* self,
static void ProcessExtended(Aec* self,
const float* near,
const float* near_high,
float* out,
@ -834,7 +834,7 @@ static void ProcessExtended(aecpc_t* self,
}
}
static void EstBufDelayNormal(aecpc_t* aecpc) {
static void EstBufDelayNormal(Aec* aecpc) {
int nSampSndCard = aecpc->msInSndCardBuf * sampMsNb * aecpc->rate_factor;
int current_delay = nSampSndCard - WebRtcAec_system_delay(aecpc->aec);
int delay_difference = 0;
@ -887,7 +887,7 @@ static void EstBufDelayNormal(aecpc_t* aecpc) {
}
}
static void EstBufDelayExtended(aecpc_t* self) {
static void EstBufDelayExtended(Aec* self) {
int reported_delay = self->msInSndCardBuf * sampMsNb * self->rate_factor;
int current_delay = reported_delay - WebRtcAec_system_delay(self->aec);
int delay_difference = 0;

View File

@ -62,6 +62,6 @@ typedef struct {
int farend_started;
AecCore* aec;
} aecpc_t;
} Aec;
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_

View File

@ -43,7 +43,7 @@ class SystemDelayTest : public ::testing::Test {
int MapBufferSizeToSamples(int size_in_ms);
void* handle_;
aecpc_t* self_;
Aec* self_;
int samples_per_frame_;
// Dummy input/output speech data.
static const int kSamplesPerChunk = 160;
@ -64,7 +64,7 @@ SystemDelayTest::SystemDelayTest()
void SystemDelayTest::SetUp() {
ASSERT_EQ(0, WebRtcAec_Create(&handle_));
self_ = reinterpret_cast<aecpc_t*>(handle_);
self_ = reinterpret_cast<Aec*>(handle_);
}
void SystemDelayTest::TearDown() {

View File

@ -152,7 +152,7 @@ static const int16_t kChannelStored16kHz[PART_LEN1] = {
// - far_spectrum : Pointer to the far end spectrum
// - far_q : Q-domain of far end spectrum
//
void WebRtcAecm_UpdateFarHistory(AecmCore_t* self,
void WebRtcAecm_UpdateFarHistory(AecmCore* self,
uint16_t* far_spectrum,
int far_q) {
// Get new buffer position
@ -185,7 +185,7 @@ void WebRtcAecm_UpdateFarHistory(AecmCore_t* self,
// - far_spectrum : Pointer to the aligned far end spectrum
// NULL - Error
//
const uint16_t* WebRtcAecm_AlignedFarend(AecmCore_t* self,
const uint16_t* WebRtcAecm_AlignedFarend(AecmCore* self,
int* far_q,
int delay) {
int buffer_position = 0;
@ -207,9 +207,8 @@ CalcLinearEnergies WebRtcAecm_CalcLinearEnergies;
StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel;
ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
int WebRtcAecm_CreateCore(AecmCore_t **aecmInst)
{
AecmCore_t *aecm = malloc(sizeof(AecmCore_t));
int WebRtcAecm_CreateCore(AecmCore** aecmInst) {
AecmCore* aecm = malloc(sizeof(AecmCore));
*aecmInst = aecm;
if (aecm == NULL)
{
@ -293,8 +292,7 @@ int WebRtcAecm_CreateCore(AecmCore_t **aecmInst)
return 0;
}
void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm, const int16_t* echo_path)
{
void WebRtcAecm_InitEchoPathCore(AecmCore* aecm, const int16_t* echo_path) {
int i = 0;
// Reset the stored channel
@ -313,13 +311,12 @@ void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm, const int16_t* echo_path)
aecm->mseChannelCount = 0;
}
static void CalcLinearEnergiesC(AecmCore_t* aecm,
static void CalcLinearEnergiesC(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est,
uint32_t* far_energy,
uint32_t* echo_energy_adapt,
uint32_t* echo_energy_stored)
{
uint32_t* echo_energy_stored) {
int i;
// Get energy for the delayed far end signal and estimated
@ -334,10 +331,9 @@ static void CalcLinearEnergiesC(AecmCore_t* aecm,
}
}
static void StoreAdaptiveChannelC(AecmCore_t* aecm,
static void StoreAdaptiveChannelC(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est)
{
int32_t* echo_est) {
int i;
// During startup we store the channel every block.
@ -358,8 +354,7 @@ static void StoreAdaptiveChannelC(AecmCore_t* aecm,
far_spectrum[i]);
}
static void ResetAdaptiveChannelC(AecmCore_t* aecm)
{
static void ResetAdaptiveChannelC(AecmCore* aecm) {
int i;
// The stored channel has a significantly lower MSE than the adaptive one for
@ -413,8 +408,7 @@ static void WebRtcAecm_InitMips(void)
// Return value : 0 - Ok
// -1 - Error
//
int WebRtcAecm_InitCore(AecmCore_t * const aecm, int samplingFreq)
{
int WebRtcAecm_InitCore(AecmCore* const aecm, int samplingFreq) {
int i = 0;
int32_t tmp32 = PART_LEN1 * PART_LEN1;
int16_t tmp16 = PART_LEN1;
@ -545,16 +539,14 @@ int WebRtcAecm_InitCore(AecmCore_t * const aecm, int samplingFreq)
// TODO(bjornv): This function is currently not used. Add support for these
// parameters from a higher level
int WebRtcAecm_Control(AecmCore_t *aecm, int delay, int nlpFlag)
{
int WebRtcAecm_Control(AecmCore* aecm, int delay, int nlpFlag) {
aecm->nlpFlag = nlpFlag;
aecm->fixedDelay = delay;
return 0;
}
int WebRtcAecm_FreeCore(AecmCore_t *aecm)
{
int WebRtcAecm_FreeCore(AecmCore* aecm) {
if (aecm == NULL)
{
return -1;
@ -574,12 +566,11 @@ int WebRtcAecm_FreeCore(AecmCore_t *aecm)
return 0;
}
int WebRtcAecm_ProcessFrame(AecmCore_t * aecm,
int WebRtcAecm_ProcessFrame(AecmCore* aecm,
const int16_t* farend,
const int16_t* nearendNoisy,
const int16_t* nearendClean,
int16_t * out)
{
int16_t* out) {
int16_t outBlock_buf[PART_LEN + 8]; // Align buffer to 8-byte boundary.
int16_t* outBlock = (int16_t*) (((uintptr_t) outBlock_buf + 15) & ~ 15);
@ -737,12 +728,11 @@ static int16_t LogOfEnergyInQ8(uint32_t energy, int q_domain) {
// Q(aecm->dfaQDomain).
// @param echoEst [out] Estimated echo in Q(xfa_q+RESOLUTION_CHANNEL16).
//
void WebRtcAecm_CalcEnergies(AecmCore_t * aecm,
void WebRtcAecm_CalcEnergies(AecmCore* aecm,
const uint16_t* far_spectrum,
const int16_t far_q,
const uint32_t nearEner,
int32_t * echoEst)
{
int32_t* echoEst) {
// Local variables
uint32_t tmpAdapt = 0;
uint32_t tmpStored = 0;
@ -871,9 +861,7 @@ void WebRtcAecm_CalcEnergies(AecmCore_t * aecm,
// @param mu [out] (Return value) Stepsize in log2(), i.e. number of shifts.
//
//
int16_t WebRtcAecm_CalcStepSize(AecmCore_t * const aecm)
{
int16_t WebRtcAecm_CalcStepSize(AecmCore* const aecm) {
int32_t tmp32;
int16_t tmp16;
int16_t mu = MU_MAX;
@ -919,14 +907,12 @@ int16_t WebRtcAecm_CalcStepSize(AecmCore_t * const aecm)
// @param mu [in] NLMS step size.
// @param echoEst [i/o] Estimated echo in Q(far_q+RESOLUTION_CHANNEL16).
//
void WebRtcAecm_UpdateChannel(AecmCore_t * aecm,
void WebRtcAecm_UpdateChannel(AecmCore* aecm,
const uint16_t* far_spectrum,
const int16_t far_q,
const uint16_t* const dfa,
const int16_t mu,
int32_t * echoEst)
{
int32_t* echoEst) {
uint32_t tmpU32no1, tmpU32no2;
int32_t tmp32no1, tmp32no2;
int32_t mseStored;
@ -1133,8 +1119,7 @@ void WebRtcAecm_UpdateChannel(AecmCore_t * aecm,
// level (Q14).
//
//
int16_t WebRtcAecm_CalcSuppressionGain(AecmCore_t * const aecm)
{
int16_t WebRtcAecm_CalcSuppressionGain(AecmCore* const aecm) {
int32_t tmp32no1;
int16_t supGain = SUPGAIN_DEFAULT;
@ -1202,10 +1187,9 @@ int16_t WebRtcAecm_CalcSuppressionGain(AecmCore_t * const aecm)
return aecm->supGain;
}
void WebRtcAecm_BufferFarFrame(AecmCore_t* const aecm,
void WebRtcAecm_BufferFarFrame(AecmCore* const aecm,
const int16_t* const farend,
const int farLen)
{
const int farLen) {
int writeLen = farLen, writePos = 0;
// Check if the write position must be wrapped
@ -1225,9 +1209,10 @@ void WebRtcAecm_BufferFarFrame(AecmCore_t* const aecm,
aecm->farBufWritePos += writeLen;
}
void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm, int16_t * const farend,
const int farLen, const int knownDelay)
{
void WebRtcAecm_FetchFarFrame(AecmCore* const aecm,
int16_t* const farend,
const int farLen,
const int knownDelay) {
int readLen = farLen;
int readPos = 0;
int delayChange = knownDelay - aecm->lastKnownDelay;

View File

@ -29,7 +29,7 @@
typedef struct {
int16_t real;
int16_t imag;
} complex16_t;
} ComplexInt16;
typedef struct {
int farBufWritePos;
@ -131,7 +131,7 @@ typedef struct {
FILE *nearFile;
FILE *outFile;
#endif
} AecmCore_t;
} AecmCore;
////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_CreateCore(...)
@ -148,7 +148,7 @@ typedef struct {
// Return value : 0 - Ok
// -1 - Error
//
int WebRtcAecm_CreateCore(AecmCore_t **aecm);
int WebRtcAecm_CreateCore(AecmCore** aecm);
////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_InitCore(...)
@ -165,7 +165,7 @@ int WebRtcAecm_CreateCore(AecmCore_t **aecm);
// Return value : 0 - Ok
// -1 - Error
//
int WebRtcAecm_InitCore(AecmCore_t * const aecm, int samplingFreq);
int WebRtcAecm_InitCore(AecmCore* const aecm, int samplingFreq);
////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_FreeCore(...)
@ -178,9 +178,9 @@ int WebRtcAecm_InitCore(AecmCore_t * const aecm, int samplingFreq);
// -1 - Error
// 11001-11016: Error
//
int WebRtcAecm_FreeCore(AecmCore_t *aecm);
int WebRtcAecm_FreeCore(AecmCore* aecm);
int WebRtcAecm_Control(AecmCore_t *aecm, int delay, int nlpFlag);
int WebRtcAecm_Control(AecmCore* aecm, int delay, int nlpFlag);
////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_InitEchoPathCore(...)
@ -194,8 +194,7 @@ int WebRtcAecm_Control(AecmCore_t *aecm, int delay, int nlpFlag);
// Output:
// - aecm : Initialized instance
//
void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm,
const int16_t* echo_path);
void WebRtcAecm_InitEchoPathCore(AecmCore* aecm, const int16_t* echo_path);
////////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_ProcessFrame(...)
@ -215,7 +214,8 @@ void WebRtcAecm_InitEchoPathCore(AecmCore_t* aecm,
// - out : Out buffer, one frame of nearend signal :
//
//
int WebRtcAecm_ProcessFrame(AecmCore_t * aecm, const int16_t * farend,
int WebRtcAecm_ProcessFrame(AecmCore* aecm,
const int16_t* farend,
const int16_t* nearendNoisy,
const int16_t* nearendClean,
int16_t* out);
@ -238,7 +238,8 @@ int WebRtcAecm_ProcessFrame(AecmCore_t * aecm, const int16_t * farend,
// - out : Out buffer, one block of nearend signal :
//
//
int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, const int16_t * farend,
int WebRtcAecm_ProcessBlock(AecmCore* aecm,
const int16_t* farend,
const int16_t* nearendNoisy,
const int16_t* noisyClean,
int16_t* out);
@ -253,7 +254,7 @@ int WebRtcAecm_ProcessBlock(AecmCore_t * aecm, const int16_t * farend,
// - farend : In buffer containing one frame of farend signal
// - farLen : Length of frame
//
void WebRtcAecm_BufferFarFrame(AecmCore_t * const aecm,
void WebRtcAecm_BufferFarFrame(AecmCore* const aecm,
const int16_t* const farend,
const int farLen);
@ -268,10 +269,10 @@ void WebRtcAecm_BufferFarFrame(AecmCore_t * const aecm,
// - farLen : Length of frame
// - knownDelay : known delay
//
void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm,
void WebRtcAecm_FetchFarFrame(AecmCore* const aecm,
int16_t* const farend,
const int farLen, const int knownDelay);
const int farLen,
const int knownDelay);
// All the functions below are intended to be private
@ -286,7 +287,7 @@ void WebRtcAecm_FetchFarFrame(AecmCore_t * const aecm,
// - far_spectrum : Pointer to the far end spectrum
// - far_q : Q-domain of far end spectrum
//
void WebRtcAecm_UpdateFarHistory(AecmCore_t* self,
void WebRtcAecm_UpdateFarHistory(AecmCore* self,
uint16_t* far_spectrum,
int far_q);
@ -310,9 +311,7 @@ void WebRtcAecm_UpdateFarHistory(AecmCore_t* self,
// - far_spectrum : Pointer to the aligned far end spectrum
// NULL - Error
//
const uint16_t* WebRtcAecm_AlignedFarend(AecmCore_t* self,
int* far_q,
int delay);
const uint16_t* WebRtcAecm_AlignedFarend(AecmCore* self, int* far_q, int delay);
///////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_CalcSuppressionGain()
@ -327,7 +326,7 @@ const uint16_t* WebRtcAecm_AlignedFarend(AecmCore_t* self,
// - supGain : Suppression gain with which to scale the noise
// level (Q14).
//
int16_t WebRtcAecm_CalcSuppressionGain(AecmCore_t * const aecm);
int16_t WebRtcAecm_CalcSuppressionGain(AecmCore* const aecm);
///////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_CalcEnergies()
@ -346,7 +345,7 @@ int16_t WebRtcAecm_CalcSuppressionGain(AecmCore_t * const aecm);
// Output:
// - echoEst : Estimated echo in Q(xfa_q+RESOLUTION_CHANNEL16).
//
void WebRtcAecm_CalcEnergies(AecmCore_t * aecm,
void WebRtcAecm_CalcEnergies(AecmCore* aecm,
const uint16_t* far_spectrum,
const int16_t far_q,
const uint32_t nearEner,
@ -363,7 +362,7 @@ void WebRtcAecm_CalcEnergies(AecmCore_t * aecm,
// Return value:
// - mu : Stepsize in log2(), i.e. number of shifts.
//
int16_t WebRtcAecm_CalcStepSize(AecmCore_t * const aecm);
int16_t WebRtcAecm_CalcStepSize(AecmCore* const aecm);
///////////////////////////////////////////////////////////////////////////////
// WebRtcAecm_UpdateChannel(...)
@ -381,7 +380,7 @@ int16_t WebRtcAecm_CalcStepSize(AecmCore_t * const aecm);
// Input/Output:
// - echoEst : Estimated echo in Q(far_q+RESOLUTION_CHANNEL16).
//
void WebRtcAecm_UpdateChannel(AecmCore_t * aecm,
void WebRtcAecm_UpdateChannel(AecmCore* aecm,
const uint16_t* far_spectrum,
const int16_t far_q,
const uint16_t* const dfa,
@ -395,8 +394,7 @@ extern const int16_t WebRtcAecm_kSinTable[];
// Some function pointers, for internal functions shared by ARM NEON and
// generic C code.
//
typedef void (*CalcLinearEnergies)(
AecmCore_t* aecm,
typedef void (*CalcLinearEnergies)(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echoEst,
uint32_t* far_energy,
@ -404,13 +402,12 @@ typedef void (*CalcLinearEnergies)(
uint32_t* echo_energy_stored);
extern CalcLinearEnergies WebRtcAecm_CalcLinearEnergies;
typedef void (*StoreAdaptiveChannel)(
AecmCore_t* aecm,
typedef void (*StoreAdaptiveChannel)(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est);
extern StoreAdaptiveChannel WebRtcAecm_StoreAdaptiveChannel;
typedef void (*ResetAdaptiveChannel)(AecmCore_t* aecm);
typedef void (*ResetAdaptiveChannel)(AecmCore* aecm);
extern ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
// For the above function pointers, functions for generic platforms are declared
@ -418,33 +415,33 @@ extern ResetAdaptiveChannel WebRtcAecm_ResetAdaptiveChannel;
// are declared below and defined in file aecm_core_neon.c.
#if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON) || \
defined (WEBRTC_ARCH_ARM64_NEON)
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est,
uint32_t* far_energy,
uint32_t* echo_energy_adapt,
uint32_t* echo_energy_stored);
void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est);
void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm);
void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore* aecm);
#endif
#if defined(MIPS32_LE)
void WebRtcAecm_CalcLinearEnergies_mips(AecmCore_t* aecm,
void WebRtcAecm_CalcLinearEnergies_mips(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est,
uint32_t* far_energy,
uint32_t* echo_energy_adapt,
uint32_t* echo_energy_stored);
#if defined(MIPS_DSP_R1_LE)
void WebRtcAecm_StoreAdaptiveChannel_mips(AecmCore_t* aecm,
void WebRtcAecm_StoreAdaptiveChannel_mips(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est);
void WebRtcAecm_ResetAdaptiveChannel_mips(AecmCore_t* aecm);
void WebRtcAecm_ResetAdaptiveChannel_mips(AecmCore* aecm);
#endif
#endif

View File

@ -57,15 +57,15 @@ static const uint16_t kBeta3 = 18927;
static const int16_t kNoiseEstQDomain = 15;
static const int16_t kNoiseEstIncCount = 5;
static void ComfortNoise(AecmCore_t* aecm,
static void ComfortNoise(AecmCore* aecm,
const uint16_t* dfa,
complex16_t* out,
ComplexInt16* out,
const int16_t* lambda);
static void WindowAndFFT(AecmCore_t* aecm,
static void WindowAndFFT(AecmCore* aecm,
int16_t* fft,
const int16_t* time_signal,
complex16_t* freq_signal,
ComplexInt16* freq_signal,
int time_signal_scaling) {
int i = 0;
@ -91,12 +91,11 @@ static void WindowAndFFT(AecmCore_t* aecm,
}
}
static void InverseFFTAndWindow(AecmCore_t* aecm,
static void InverseFFTAndWindow(AecmCore* aecm,
int16_t* fft,
complex16_t* efw,
ComplexInt16* efw,
int16_t* output,
const int16_t* nearendClean)
{
const int16_t* nearendClean) {
int i, j, outCFFT;
int32_t tmp32no1;
// Reuse |efw| for the inverse FFT output after transferring
@ -162,12 +161,11 @@ static void InverseFFTAndWindow(AecmCore_t* aecm,
// the frequency domain array
// return value The Q-domain of current frequency values
//
static int TimeToFrequencyDomain(AecmCore_t* aecm,
static int TimeToFrequencyDomain(AecmCore* aecm,
const int16_t* time_signal,
complex16_t* freq_signal,
ComplexInt16* freq_signal,
uint16_t* freq_signal_abs,
uint32_t* freq_signal_sum_abs)
{
uint32_t* freq_signal_sum_abs) {
int i = 0;
int time_signal_scaling = 0;
@ -283,12 +281,11 @@ static int TimeToFrequencyDomain(AecmCore_t* aecm,
return time_signal_scaling;
}
int WebRtcAecm_ProcessBlock(AecmCore_t * aecm,
int WebRtcAecm_ProcessBlock(AecmCore* aecm,
const int16_t* farend,
const int16_t* nearendNoisy,
const int16_t* nearendClean,
int16_t * output)
{
int16_t* output) {
int i;
uint32_t xfaSum;
@ -306,7 +303,7 @@ int WebRtcAecm_ProcessBlock(AecmCore_t * aecm,
const uint16_t* far_spectrum_ptr = NULL;
// 32 byte aligned buffers (with +8 or +16).
// TODO (kma): define fft with complex16_t.
// TODO(kma): define fft with ComplexInt16.
int16_t fft_buf[PART_LEN4 + 2 + 16]; // +2 to make a loop safe.
int32_t echoEst32_buf[PART_LEN1 + 8];
int32_t dfw_buf[PART_LEN2 + 8];
@ -314,8 +311,8 @@ int WebRtcAecm_ProcessBlock(AecmCore_t * aecm,
int16_t* fft = (int16_t*) (((uintptr_t) fft_buf + 31) & ~ 31);
int32_t* echoEst32 = (int32_t*) (((uintptr_t) echoEst32_buf + 31) & ~ 31);
complex16_t* dfw = (complex16_t*) (((uintptr_t) dfw_buf + 31) & ~ 31);
complex16_t* efw = (complex16_t*) (((uintptr_t) efw_buf + 31) & ~ 31);
ComplexInt16* dfw = (ComplexInt16*)(((uintptr_t)dfw_buf + 31) & ~31);
ComplexInt16* efw = (ComplexInt16*)(((uintptr_t)efw_buf + 31) & ~31);
int16_t hnl[PART_LEN1];
int16_t numPosCoef = 0;
@ -644,12 +641,10 @@ int WebRtcAecm_ProcessBlock(AecmCore_t * aecm,
return 0;
}
static void ComfortNoise(AecmCore_t* aecm,
static void ComfortNoise(AecmCore* aecm,
const uint16_t* dfa,
complex16_t* out,
const int16_t* lambda)
{
ComplexInt16* out,
const int16_t* lambda) {
int16_t i;
int16_t tmp16;
int32_t tmp32;

View File

@ -67,20 +67,20 @@ static int16_t coefTable_ifft[] = {
120, 68, 376, 388, 248, 132, 504, 260
};
static void ComfortNoise(AecmCore_t* aecm,
static void ComfortNoise(AecmCore* aecm,
const uint16_t* dfa,
complex16_t* out,
ComplexInt16* out,
const int16_t* lambda);
static void WindowAndFFT(AecmCore_t* aecm,
static void WindowAndFFT(AecmCore* aecm,
int16_t* fft,
const int16_t* time_signal,
complex16_t* freq_signal,
ComplexInt16* freq_signal,
int time_signal_scaling) {
int i, j;
int32_t tmp1, tmp2, tmp3, tmp4;
int16_t* pfrfi;
complex16_t* pfreq_signal;
ComplexInt16* pfreq_signal;
int16_t f_coef, s_coef;
int32_t load_ptr, store_ptr1, store_ptr2, shift, shift1;
int32_t hann, hann1, coefs;
@ -199,9 +199,9 @@ static void WindowAndFFT(AecmCore_t* aecm,
);
}
static void InverseFFTAndWindow(AecmCore_t* aecm,
static void InverseFFTAndWindow(AecmCore* aecm,
int16_t* fft,
complex16_t* efw,
ComplexInt16* efw,
int16_t* output,
const int16_t* nearendClean) {
int i, outCFFT;
@ -209,7 +209,7 @@ static void InverseFFTAndWindow(AecmCore_t* aecm,
int16_t* pcoefTable_ifft = coefTable_ifft;
int16_t* pfft = fft;
int16_t* ppfft = fft;
complex16_t* pefw = efw;
ComplexInt16* pefw = efw;
int32_t out_aecm;
int16_t* paecm_buf = aecm->outBuf;
const int16_t* p_kSqrtHanning = WebRtcAecm_kSqrtHanning;
@ -432,7 +432,7 @@ static void InverseFFTAndWindow(AecmCore_t* aecm,
}
}
void WebRtcAecm_CalcLinearEnergies_mips(AecmCore_t* aecm,
void WebRtcAecm_CalcLinearEnergies_mips(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est,
uint32_t* far_energy,
@ -521,7 +521,7 @@ void WebRtcAecm_CalcLinearEnergies_mips(AecmCore_t* aecm,
}
#if defined(MIPS_DSP_R1_LE)
void WebRtcAecm_StoreAdaptiveChannel_mips(AecmCore_t* aecm,
void WebRtcAecm_StoreAdaptiveChannel_mips(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est) {
int i;
@ -568,7 +568,7 @@ void WebRtcAecm_StoreAdaptiveChannel_mips(AecmCore_t* aecm,
far_spectrum[i]);
}
void WebRtcAecm_ResetAdaptiveChannel_mips(AecmCore_t* aecm) {
void WebRtcAecm_ResetAdaptiveChannel_mips(AecmCore* aecm) {
int i;
int32_t* temp3;
int16_t* temp0;
@ -623,12 +623,11 @@ void WebRtcAecm_ResetAdaptiveChannel_mips(AecmCore_t* aecm) {
// the frequency domain array
// return value The Q-domain of current frequency values
//
static int TimeToFrequencyDomain(AecmCore_t* aecm,
static int TimeToFrequencyDomain(AecmCore* aecm,
const int16_t* time_signal,
complex16_t* freq_signal,
ComplexInt16* freq_signal,
uint16_t* freq_signal_abs,
uint32_t* freq_signal_sum_abs)
{
uint32_t* freq_signal_sum_abs) {
int i = 0;
int time_signal_scaling = 0;
@ -794,7 +793,7 @@ static int TimeToFrequencyDomain(AecmCore_t* aecm,
return time_signal_scaling;
}
int WebRtcAecm_ProcessBlock(AecmCore_t* aecm,
int WebRtcAecm_ProcessBlock(AecmCore* aecm,
const int16_t* farend,
const int16_t* nearendNoisy,
const int16_t* nearendClean,
@ -821,8 +820,8 @@ int WebRtcAecm_ProcessBlock(AecmCore_t* aecm,
int16_t* fft = (int16_t*)(((uint32_t)fft_buf + 31) & ~ 31);
int32_t* echoEst32 = (int32_t*)(((uint32_t)echoEst32_buf + 31) & ~ 31);
complex16_t* dfw = (complex16_t*)(((uint32_t)dfw_buf + 31) & ~ 31);
complex16_t* efw = (complex16_t*)(((uint32_t)efw_buf + 31) & ~ 31);
ComplexInt16* dfw = (ComplexInt16*)(((uint32_t)dfw_buf + 31) & ~31);
ComplexInt16* efw = (ComplexInt16*)(((uint32_t)efw_buf + 31) & ~31);
int16_t hnl[PART_LEN1];
int16_t numPosCoef = 0;
@ -1250,9 +1249,9 @@ int WebRtcAecm_ProcessBlock(AecmCore_t* aecm,
}
// Generate comfort noise and add to output signal.
static void ComfortNoise(AecmCore_t* aecm,
static void ComfortNoise(AecmCore* aecm,
const uint16_t* dfa,
complex16_t* out,
ComplexInt16* out,
const int16_t* lambda) {
int16_t i;
int16_t tmp16, tmp161, tmp162, tmp163, nrsh1, nrsh2;

View File

@ -42,7 +42,7 @@ static inline void AddLanes(uint32_t* ptr, uint32x4_t v) {
#endif
}
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est,
uint32_t* far_energy,
@ -114,7 +114,7 @@ void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
*echo_energy_adapt += aecm->channelAdapt16[PART_LEN] * far_spectrum[PART_LEN];
}
void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore* aecm,
const uint16_t* far_spectrum,
int32_t* echo_est) {
assert((uintptr_t)echo_est % 32 == 0);
@ -173,7 +173,7 @@ void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
far_spectrum[PART_LEN]);
}
void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm) {
void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore* aecm) {
assert((uintptr_t)(aecm->channelStored) % 16 == 0);
assert((uintptr_t)(aecm->channelAdapt16) % 16 == 0);
assert((uintptr_t)(aecm->channelAdapt32) % 32 == 0);

View File

@ -70,25 +70,25 @@ typedef struct
int lastError;
AecmCore_t *aecmCore;
} aecmob_t;
AecmCore* aecmCore;
} AecMobile;
// Estimates delay to set the position of the farend buffer read pointer
// (controlled by knownDelay)
static int WebRtcAecm_EstBufDelay(aecmob_t *aecmInst, short msInSndCardBuf);
static int WebRtcAecm_EstBufDelay(AecMobile* aecmInst, short msInSndCardBuf);
// Stuffs the farend buffer if the estimated delay is too large
static int WebRtcAecm_DelayComp(aecmob_t *aecmInst);
static int WebRtcAecm_DelayComp(AecMobile* aecmInst);
int32_t WebRtcAecm_Create(void **aecmInst)
{
aecmob_t *aecm;
AecMobile* aecm;
if (aecmInst == NULL)
{
return -1;
}
aecm = malloc(sizeof(aecmob_t));
aecm = malloc(sizeof(AecMobile));
*aecmInst = aecm;
if (aecm == NULL)
{
@ -132,7 +132,7 @@ int32_t WebRtcAecm_Create(void **aecmInst)
int32_t WebRtcAecm_Free(void *aecmInst)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
if (aecm == NULL)
{
@ -159,7 +159,7 @@ int32_t WebRtcAecm_Free(void *aecmInst)
int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
AecmConfig aecConfig;
if (aecm == NULL)
@ -223,7 +223,7 @@ int32_t WebRtcAecm_Init(void *aecmInst, int32_t sampFreq)
int32_t WebRtcAecm_BufferFarend(void *aecmInst, const int16_t *farend,
int16_t nrOfSamples)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
int32_t retVal = 0;
if (aecm == NULL)
@ -264,7 +264,7 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
const int16_t *nearendClean, int16_t *out,
int16_t nrOfSamples, int16_t msInSndCardBuf)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
int32_t retVal = 0;
short i;
short nmbrOfFilledBuffers;
@ -466,7 +466,7 @@ int32_t WebRtcAecm_Process(void *aecmInst, const int16_t *nearendNoisy,
int32_t WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
if (aecm == NULL)
{
@ -548,7 +548,7 @@ int32_t WebRtcAecm_set_config(void *aecmInst, AecmConfig config)
int32_t WebRtcAecm_get_config(void *aecmInst, AecmConfig *config)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
if (aecm == NULL)
{
@ -577,7 +577,7 @@ int32_t WebRtcAecm_InitEchoPath(void* aecmInst,
const void* echo_path,
size_t size_bytes)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
const int16_t* echo_path_ptr = echo_path;
if (aecmInst == NULL) {
@ -608,7 +608,7 @@ int32_t WebRtcAecm_GetEchoPath(void* aecmInst,
void* echo_path,
size_t size_bytes)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
int16_t* echo_path_ptr = echo_path;
if (aecmInst == NULL) {
@ -641,7 +641,7 @@ size_t WebRtcAecm_echo_path_size_bytes()
int32_t WebRtcAecm_get_error_code(void *aecmInst)
{
aecmob_t *aecm = aecmInst;
AecMobile* aecm = aecmInst;
if (aecm == NULL)
{
@ -651,8 +651,7 @@ int32_t WebRtcAecm_get_error_code(void *aecmInst)
return aecm->lastError;
}
static int WebRtcAecm_EstBufDelay(aecmob_t *aecm, short msInSndCardBuf)
{
static int WebRtcAecm_EstBufDelay(AecMobile* aecm, short msInSndCardBuf) {
short delayNew, nSampSndCard;
short nSampFar = (short) WebRtc_available_read(aecm->farendBuf);
short diff;
@ -701,8 +700,7 @@ static int WebRtcAecm_EstBufDelay(aecmob_t *aecm, short msInSndCardBuf)
return 0;
}
static int WebRtcAecm_DelayComp(aecmob_t *aecm)
{
static int WebRtcAecm_DelayComp(AecMobile* aecm) {
int nSampFar = (int) WebRtc_available_read(aecm->farendBuf);
int nSampSndCard, delayNew, nSampAdd;
const int maxStuffSamp = 10 * FRAME_LEN;

View File

@ -119,8 +119,8 @@ int WebRtcAgc_AddMic(void *state, int16_t* const* in_mic, int16_t num_bands,
int32_t *ptr;
uint16_t targetGainIdx, gain;
int16_t i, n, L, tmp16, tmp_speech[16];
Agc_t *stt;
stt = (Agc_t *)state;
LegacyAgc* stt;
stt = (LegacyAgc*)state;
if (stt->fs == 8000) {
L = 8;
@ -252,8 +252,8 @@ int WebRtcAgc_AddMic(void *state, int16_t* const* in_mic, int16_t num_bands,
int WebRtcAgc_AddFarend(void *state, const int16_t *in_far, int16_t samples)
{
Agc_t *stt;
stt = (Agc_t *)state;
LegacyAgc* stt;
stt = (LegacyAgc*)state;
if (stt == NULL)
{
@ -287,7 +287,7 @@ int WebRtcAgc_VirtualMic(void *agcInst, int16_t* const* in_near,
int32_t tmpFlt, micLevelTmp, gainIdx;
uint16_t gain;
int16_t ii, j;
Agc_t *stt;
LegacyAgc* stt;
uint32_t nrg;
int16_t sampleCntr;
@ -297,7 +297,7 @@ int WebRtcAgc_VirtualMic(void *agcInst, int16_t* const* in_near,
const int16_t kZeroCrossingLowLim = 15;
const int16_t kZeroCrossingHighLim = 20;
stt = (Agc_t *)agcInst;
stt = (LegacyAgc*)agcInst;
/*
* Before applying gain decide if this is a low-level signal.
@ -423,9 +423,7 @@ int WebRtcAgc_VirtualMic(void *agcInst, int16_t* const* in_near,
return 0;
}
void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt)
{
void WebRtcAgc_UpdateAgcThresholds(LegacyAgc* stt) {
int16_t tmp16;
#ifdef MIC_LEVEL_FEEDBACK
int zeros;
@ -475,8 +473,9 @@ void WebRtcAgc_UpdateAgcThresholds(Agc_t *stt)
stt->lowerLimit = stt->startLowerLimit;
}
void WebRtcAgc_SaturationCtrl(Agc_t *stt, uint8_t *saturated, int32_t *env)
{
void WebRtcAgc_SaturationCtrl(LegacyAgc* stt,
uint8_t* saturated,
int32_t* env) {
int16_t i, tmpW16;
/* Check if the signal is saturated */
@ -500,8 +499,7 @@ void WebRtcAgc_SaturationCtrl(Agc_t *stt, uint8_t *saturated, int32_t *env)
(int16_t)32440, 15);
}
void WebRtcAgc_ZeroCtrl(Agc_t *stt, int32_t *inMicLevel, int32_t *env)
{
void WebRtcAgc_ZeroCtrl(LegacyAgc* stt, int32_t* inMicLevel, int32_t* env) {
int16_t i;
int32_t tmp32 = 0;
int32_t midVal;
@ -562,8 +560,7 @@ void WebRtcAgc_ZeroCtrl(Agc_t *stt, int32_t *inMicLevel, int32_t *env)
}
}
void WebRtcAgc_SpeakerInactiveCtrl(Agc_t *stt)
{
void WebRtcAgc_SpeakerInactiveCtrl(LegacyAgc* stt) {
/* Check if the near end speaker is inactive.
* If that is the case the VAD threshold is
* increased since the VAD speech model gets
@ -653,9 +650,9 @@ int32_t WebRtcAgc_ProcessAnalog(void *state, int32_t inMicLevel,
int32_t inMicLevelTmp, lastMicVol;
int16_t i;
uint8_t saturated = 0;
Agc_t *stt;
LegacyAgc* stt;
stt = (Agc_t *)state;
stt = (LegacyAgc*)state;
inMicLevelTmp = inMicLevel << stt->scale;
if (inMicLevelTmp > stt->maxAnalog)
@ -1146,9 +1143,9 @@ int WebRtcAgc_Process(void *agcInst, const int16_t* const* in_near,
int32_t *outMicLevel, int16_t echo,
uint8_t *saturationWarning)
{
Agc_t *stt;
LegacyAgc* stt;
stt = (Agc_t *)agcInst;
stt = (LegacyAgc*)agcInst;
//
if (stt == NULL)
@ -1237,10 +1234,9 @@ int WebRtcAgc_Process(void *agcInst, const int16_t* const* in_near,
return 0;
}
int WebRtcAgc_set_config(void *agcInst, WebRtcAgc_config_t agcConfig)
{
Agc_t *stt;
stt = (Agc_t *)agcInst;
int WebRtcAgc_set_config(void* agcInst, WebRtcAgcConfig agcConfig) {
LegacyAgc* stt;
stt = (LegacyAgc*)agcInst;
if (stt == NULL)
{
@ -1287,7 +1283,7 @@ int WebRtcAgc_set_config(void *agcInst, WebRtcAgc_config_t agcConfig)
#endif
return -1;
}
/* Store the config in a WebRtcAgc_config_t */
/* Store the config in a WebRtcAgcConfig */
stt->usedConfig.compressionGaindB = agcConfig.compressionGaindB;
stt->usedConfig.limiterEnable = agcConfig.limiterEnable;
stt->usedConfig.targetLevelDbfs = agcConfig.targetLevelDbfs;
@ -1295,10 +1291,9 @@ int WebRtcAgc_set_config(void *agcInst, WebRtcAgc_config_t agcConfig)
return 0;
}
int WebRtcAgc_get_config(void *agcInst, WebRtcAgc_config_t *config)
{
Agc_t *stt;
stt = (Agc_t *)agcInst;
int WebRtcAgc_get_config(void* agcInst, WebRtcAgcConfig* config) {
LegacyAgc* stt;
stt = (LegacyAgc*)agcInst;
if (stt == NULL)
{
@ -1326,12 +1321,12 @@ int WebRtcAgc_get_config(void *agcInst, WebRtcAgc_config_t *config)
int WebRtcAgc_Create(void **agcInst)
{
Agc_t *stt;
LegacyAgc* stt;
if (agcInst == NULL)
{
return -1;
}
stt = (Agc_t *)malloc(sizeof(Agc_t));
stt = (LegacyAgc*)malloc(sizeof(LegacyAgc));
*agcInst = stt;
if (stt == NULL)
@ -1353,9 +1348,9 @@ int WebRtcAgc_Create(void **agcInst)
int WebRtcAgc_Free(void *state)
{
Agc_t *stt;
LegacyAgc* stt;
stt = (Agc_t *)state;
stt = (LegacyAgc*)state;
#ifdef WEBRTC_AGC_DEBUG_DUMP
fclose(stt->fpt);
fclose(stt->agcLog);
@ -1375,10 +1370,10 @@ int WebRtcAgc_Init(void *agcInst, int32_t minLevel, int32_t maxLevel,
int32_t max_add, tmp32;
int16_t i;
int tmpNorm;
Agc_t *stt;
LegacyAgc* stt;
/* typecast state pointer */
stt = (Agc_t *)agcInst;
stt = (LegacyAgc*)agcInst;
if (WebRtcAgc_InitDigital(&stt->digitalAgc, agcMode) != 0)
{

View File

@ -50,8 +50,8 @@ typedef struct
int16_t targetLevelDbfs; // Target level in -dBfs of envelope (default -3)
int16_t agcMode; // Hard coded mode (adaptAna/adaptDig/fixedDig)
uint8_t limiterEnable; // Enabling limiter (on/off (default off))
WebRtcAgc_config_t defaultConfig;
WebRtcAgc_config_t usedConfig;
WebRtcAgcConfig defaultConfig;
WebRtcAgcConfig usedConfig;
// General variables
int16_t initFlag;
@ -118,8 +118,8 @@ typedef struct
uint8_t micLvlSat;
#endif
// Structs for VAD and digital_agc
AgcVad_t vadMic;
DigitalAgc_t digitalAgc;
AgcVad vadMic;
DigitalAgc digitalAgc;
#ifdef WEBRTC_AGC_DEBUG_DUMP
FILE* fpt;
@ -128,6 +128,6 @@ typedef struct
#endif
int16_t lowLevelSignal;
} Agc_t;
} LegacyAgc;
#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_LEGACY_ANALOG_AGC_H_

View File

@ -256,9 +256,7 @@ int32_t WebRtcAgc_CalculateGainTable(int32_t *gainTable, // Q16
return 0;
}
int32_t WebRtcAgc_InitDigital(DigitalAgc_t *stt, int16_t agcMode)
{
int32_t WebRtcAgc_InitDigital(DigitalAgc* stt, int16_t agcMode) {
if (agcMode == kAgcModeFixedDigital)
{
// start at minimum to find correct gain faster
@ -283,9 +281,9 @@ int32_t WebRtcAgc_InitDigital(DigitalAgc_t *stt, int16_t agcMode)
return 0;
}
int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const int16_t *in_far,
int16_t nrSamples)
{
int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc* stt,
const int16_t* in_far,
int16_t nrSamples) {
assert(stt != NULL);
// VAD for far end
WebRtcAgc_ProcessVad(&stt->vadFarend, in_far, nrSamples);
@ -293,13 +291,12 @@ int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc_t *stt, const int16_t *in_far,
return 0;
}
int32_t WebRtcAgc_ProcessDigital(DigitalAgc_t *stt,
int32_t WebRtcAgc_ProcessDigital(DigitalAgc* stt,
const int16_t* const* in_near,
int16_t num_bands,
int16_t* const* out,
uint32_t FS,
int16_t lowlevelSignal)
{
int16_t lowlevelSignal) {
// array for gains (one value per ms, incl start & end)
int32_t gains[11];
@ -607,8 +604,7 @@ int32_t WebRtcAgc_ProcessDigital(DigitalAgc_t *stt,
return 0;
}
void WebRtcAgc_InitVad(AgcVad_t *state)
{
void WebRtcAgc_InitVad(AgcVad* state) {
int16_t k;
state->HPstate = 0; // state of high pass filter
@ -635,7 +631,7 @@ void WebRtcAgc_InitVad(AgcVad_t *state)
}
}
int16_t WebRtcAgc_ProcessVad(AgcVad_t *state, // (i) VAD state
int16_t WebRtcAgc_ProcessVad(AgcVad* state, // (i) VAD state
const int16_t* in, // (i) Speech signal
int16_t nrSamples) // (i) number of samples
{

View File

@ -34,7 +34,7 @@ typedef struct
int16_t meanShortTerm; // Q10
int32_t varianceShortTerm; // Q8
int16_t stdShortTerm; // Q10
} AgcVad_t; // total = 54 bytes
} AgcVad; // total = 54 bytes
typedef struct
{
@ -44,30 +44,30 @@ typedef struct
int32_t gainTable[32];
int16_t gatePrevious;
int16_t agcMode;
AgcVad_t vadNearend;
AgcVad_t vadFarend;
AgcVad vadNearend;
AgcVad vadFarend;
#ifdef WEBRTC_AGC_DEBUG_DUMP
FILE* logFile;
int frameCounter;
#endif
} DigitalAgc_t;
} DigitalAgc;
int32_t WebRtcAgc_InitDigital(DigitalAgc_t *digitalAgcInst, int16_t agcMode);
int32_t WebRtcAgc_InitDigital(DigitalAgc* digitalAgcInst, int16_t agcMode);
int32_t WebRtcAgc_ProcessDigital(DigitalAgc_t *digitalAgcInst,
int32_t WebRtcAgc_ProcessDigital(DigitalAgc* digitalAgcInst,
const int16_t* const* inNear,
int16_t num_bands,
int16_t* const* out,
uint32_t FS,
int16_t lowLevelSignal);
int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc_t *digitalAgcInst,
int32_t WebRtcAgc_AddFarendToDigital(DigitalAgc* digitalAgcInst,
const int16_t* inFar,
int16_t nrSamples);
void WebRtcAgc_InitVad(AgcVad_t *vadInst);
void WebRtcAgc_InitVad(AgcVad* vadInst);
int16_t WebRtcAgc_ProcessVad(AgcVad_t *vadInst, // (i) VAD state
int16_t WebRtcAgc_ProcessVad(AgcVad* vadInst, // (i) VAD state
const int16_t* in, // (i) Speech signal
int16_t nrSamples); // (i) number of samples

View File

@ -42,7 +42,7 @@ typedef struct
int16_t targetLevelDbfs; // default 3 (-3 dBOv)
int16_t compressionGaindB; // default 9 dB
uint8_t limiterEnable; // default kAgcTrue (on)
} WebRtcAgc_config_t;
} WebRtcAgcConfig;
#if defined(__cplusplus)
extern "C"
@ -181,7 +181,7 @@ int WebRtcAgc_Process(void* agcInst,
* : 0 - Normal operation.
* : -1 - Error
*/
int WebRtcAgc_set_config(void* agcInst, WebRtcAgc_config_t config);
int WebRtcAgc_set_config(void* agcInst, WebRtcAgcConfig config);
/*
* This function returns the config parameters (targetLevelDbfs,
@ -197,7 +197,7 @@ int WebRtcAgc_set_config(void* agcInst, WebRtcAgc_config_t config);
* : 0 - Normal operation.
* : -1 - Error
*/
int WebRtcAgc_get_config(void* agcInst, WebRtcAgc_config_t* config);
int WebRtcAgc_get_config(void* agcInst, WebRtcAgcConfig* config);
/*
* This function creates an AGC instance, which will contain the state

View File

@ -323,7 +323,7 @@ int GainControlImpl::InitializeHandle(void* handle) const {
}
int GainControlImpl::ConfigureHandle(void* handle) const {
WebRtcAgc_config_t config;
WebRtcAgcConfig config;
// TODO(ajm): Flip the sign here (since AGC expects a positive value) if we
// change the interface.
//assert(target_level_dbfs_ <= 0);

View File

@ -18,9 +18,9 @@
#include "webrtc/modules/audio_processing/ns/ns_core.h"
int WebRtcNs_Create(NsHandle** NS_inst) {
*NS_inst = (NsHandle*) malloc(sizeof(NSinst_t));
*NS_inst = (NsHandle*)malloc(sizeof(NoiseSuppressionC));
if (*NS_inst != NULL) {
(*(NSinst_t**)NS_inst)->initFlag = 0;
(*(NoiseSuppressionC**)NS_inst)->initFlag = 0;
return 0;
} else {
return -1;
@ -35,26 +35,27 @@ int WebRtcNs_Free(NsHandle* NS_inst) {
int WebRtcNs_Init(NsHandle* NS_inst, uint32_t fs) {
return WebRtcNs_InitCore((NSinst_t*) NS_inst, fs);
return WebRtcNs_InitCore((NoiseSuppressionC*)NS_inst, fs);
}
int WebRtcNs_set_policy(NsHandle* NS_inst, int mode) {
return WebRtcNs_set_policy_core((NSinst_t*) NS_inst, mode);
return WebRtcNs_set_policy_core((NoiseSuppressionC*)NS_inst, mode);
}
void WebRtcNs_Analyze(NsHandle* NS_inst, const float* spframe) {
WebRtcNs_AnalyzeCore((NSinst_t*) NS_inst, spframe);
WebRtcNs_AnalyzeCore((NoiseSuppressionC*)NS_inst, spframe);
}
void WebRtcNs_Process(NsHandle* NS_inst,
const float* const* spframe,
int num_bands,
float* const* outframe) {
WebRtcNs_ProcessCore((NSinst_t*)NS_inst, spframe, num_bands, outframe);
WebRtcNs_ProcessCore((NoiseSuppressionC*)NS_inst, spframe, num_bands,
outframe);
}
float WebRtcNs_prior_speech_probability(NsHandle* handle) {
NSinst_t* self = (NSinst_t*) handle;
NoiseSuppressionC* self = (NoiseSuppressionC*)handle;
if (handle == NULL) {
return -1;
}

View File

@ -17,7 +17,7 @@
#include "webrtc/modules/audio_processing/ns/nsx_defines.h"
int WebRtcNsx_Create(NsxHandle** nsxInst) {
NsxInst_t* self = malloc(sizeof(NsxInst_t));
NoiseSuppressionFixedC* self = malloc(sizeof(NoiseSuppressionFixedC));
*nsxInst = (NsxHandle*)self;
if (self != NULL) {
@ -32,22 +32,23 @@ int WebRtcNsx_Create(NsxHandle** nsxInst) {
}
int WebRtcNsx_Free(NsxHandle* nsxInst) {
WebRtcSpl_FreeRealFFT(((NsxInst_t*)nsxInst)->real_fft);
WebRtcSpl_FreeRealFFT(((NoiseSuppressionFixedC*)nsxInst)->real_fft);
free(nsxInst);
return 0;
}
int WebRtcNsx_Init(NsxHandle* nsxInst, uint32_t fs) {
return WebRtcNsx_InitCore((NsxInst_t*)nsxInst, fs);
return WebRtcNsx_InitCore((NoiseSuppressionFixedC*)nsxInst, fs);
}
int WebRtcNsx_set_policy(NsxHandle* nsxInst, int mode) {
return WebRtcNsx_set_policy_core((NsxInst_t*)nsxInst, mode);
return WebRtcNsx_set_policy_core((NoiseSuppressionFixedC*)nsxInst, mode);
}
void WebRtcNsx_Process(NsxHandle* nsxInst,
const short* const* speechFrame,
int num_bands,
short* const* outFrame) {
WebRtcNsx_ProcessCore((NsxInst_t*)nsxInst, speechFrame, num_bands, outFrame);
WebRtcNsx_ProcessCore((NoiseSuppressionFixedC*)nsxInst, speechFrame,
num_bands, outFrame);
}

View File

@ -20,7 +20,7 @@
#include "webrtc/modules/audio_processing/utility/fft4g.h"
// Set Feature Extraction Parameters.
static void set_feature_extraction_parameters(NSinst_t* self) {
static void set_feature_extraction_parameters(NoiseSuppressionC* self) {
// Bin size of histogram.
self->featureExtractionParams.binSizeLrt = 0.1f;
self->featureExtractionParams.binSizeSpecFlat = 0.05f;
@ -71,7 +71,7 @@ static void set_feature_extraction_parameters(NSinst_t* self) {
}
// Initialize state.
int WebRtcNs_InitCore(NSinst_t* self, uint32_t fs) {
int WebRtcNs_InitCore(NoiseSuppressionC* self, uint32_t fs) {
int i;
// Check for valid pointer.
if (self == NULL) {
@ -214,7 +214,9 @@ int WebRtcNs_InitCore(NSinst_t* self, uint32_t fs) {
}
// Estimate noise.
static void NoiseEstimation(NSinst_t* self, float* magn, float* noise) {
static void NoiseEstimation(NoiseSuppressionC* self,
float* magn,
float* noise) {
int i, s, offset;
float lmagn[HALF_ANAL_BLOCKL], delta;
@ -288,7 +290,7 @@ static void NoiseEstimation(NSinst_t* self, float* magn, float* noise) {
// Thresholds and weights are extracted every window.
// |flag| = 0 updates histogram only, |flag| = 1 computes the threshold/weights.
// Threshold and weights are returned in: self->priorModelPars.
static void FeatureParameterExtraction(NSinst_t* self, int flag) {
static void FeatureParameterExtraction(NoiseSuppressionC* self, int flag) {
int i, useFeatureSpecFlat, useFeatureSpecDiff, numHistLrt;
int maxPeak1, maxPeak2;
int weightPeak1SpecFlat, weightPeak2SpecFlat, weightPeak1SpecDiff,
@ -518,7 +520,8 @@ static void FeatureParameterExtraction(NSinst_t* self, int flag) {
// Compute spectral flatness on input spectrum.
// |magnIn| is the magnitude spectrum.
// Spectral flatness is returned in self->featureData[0].
static void ComputeSpectralFlatness(NSinst_t* self, const float* magnIn) {
static void ComputeSpectralFlatness(NoiseSuppressionC* self,
const float* magnIn) {
int i;
int shiftLP = 1; // Option to remove first bin(s) from spectral measures.
float avgSpectralFlatnessNum, avgSpectralFlatnessDen, spectralTmp;
@ -560,7 +563,7 @@ static void ComputeSpectralFlatness(NSinst_t* self, const float* magnIn) {
// Outputs:
// * |snrLocPrior| is the computed prior SNR.
// * |snrLocPost| is the computed post SNR.
static void ComputeSnr(const NSinst_t* self,
static void ComputeSnr(const NoiseSuppressionC* self,
const float* magn,
const float* noise,
float* snrLocPrior,
@ -589,7 +592,7 @@ static void ComputeSnr(const NSinst_t* self,
// |magnIn| is the input spectrum.
// The reference/template spectrum is self->magnAvgPause[i].
// Returns (normalized) spectral difference in self->featureData[4].
static void ComputeSpectralDifference(NSinst_t* self,
static void ComputeSpectralDifference(NoiseSuppressionC* self,
const float* magnIn) {
// avgDiffNormMagn = var(magnIn) - cov(magnIn, magnAvgPause)^2 /
// var(magnAvgPause)
@ -636,7 +639,7 @@ static void ComputeSpectralDifference(NSinst_t* self,
// |noise| is the noise spectrum.
// |snrLocPrior| is the prior SNR for each frequency.
// |snrLocPost| is the post SNR for each frequency.
static void SpeechNoiseProb(NSinst_t* self,
static void SpeechNoiseProb(NoiseSuppressionC* self,
float* probSpeechFinal,
const float* snrLocPrior,
const float* snrLocPost) {
@ -749,7 +752,7 @@ static void SpeechNoiseProb(NSinst_t* self,
// Inputs:
// * |magn| is the signal magnitude spectrum estimate.
// * |updateParsFlag| is an update flag for parameters.
static void FeatureUpdate(NSinst_t* self,
static void FeatureUpdate(NoiseSuppressionC* self,
const float* magn,
int updateParsFlag) {
// Compute spectral flatness on input spectrum.
@ -794,7 +797,7 @@ static void FeatureUpdate(NSinst_t* self,
// * |snrLocPost| is the post SNR.
// Output:
// * |noise| is the updated noise magnitude spectrum estimate.
static void UpdateNoiseEstimate(NSinst_t* self,
static void UpdateNoiseEstimate(NoiseSuppressionC* self,
const float* magn,
const float* snrLocPrior,
const float* snrLocPost,
@ -880,7 +883,7 @@ static void UpdateBuffer(const float* frame,
// * |real| is the real part of the frequency domain.
// * |imag| is the imaginary part of the frequency domain.
// * |magn| is the calculated signal magnitude in the frequency domain.
static void FFT(NSinst_t* self,
static void FFT(NoiseSuppressionC* self,
float* time_data,
int time_data_length,
int magnitude_length,
@ -917,7 +920,7 @@ static void FFT(NSinst_t* self,
// (2 * (magnitude_length - 1)).
// Output:
// * |time_data| is the signal in the time domain.
static void IFFT(NSinst_t* self,
static void IFFT(NoiseSuppressionC* self,
const float* real,
const float* imag,
int magnitude_length,
@ -979,7 +982,7 @@ static void Windowing(const float* window,
// * |magn| is the signal magnitude spectrum estimate.
// Output:
// * |theFilter| is the frequency response of the computed Wiener filter.
static void ComputeDdBasedWienerFilter(const NSinst_t* self,
static void ComputeDdBasedWienerFilter(const NoiseSuppressionC* self,
const float* magn,
float* theFilter) {
int i;
@ -1007,7 +1010,7 @@ static void ComputeDdBasedWienerFilter(const NSinst_t* self,
// |mode| = 0 is mild (6dB), |mode| = 1 is medium (10dB) and |mode| = 2 is
// aggressive (15dB).
// Returns 0 on success and -1 otherwise.
int WebRtcNs_set_policy_core(NSinst_t* self, int mode) {
int WebRtcNs_set_policy_core(NoiseSuppressionC* self, int mode) {
// Allow for modes: 0, 1, 2, 3.
if (mode < 0 || mode > 3) {
return (-1);
@ -1037,7 +1040,7 @@ int WebRtcNs_set_policy_core(NSinst_t* self, int mode) {
return 0;
}
void WebRtcNs_AnalyzeCore(NSinst_t* self, const float* speechFrame) {
void WebRtcNs_AnalyzeCore(NoiseSuppressionC* self, const float* speechFrame) {
int i;
const int kStartBand = 5; // Skip first frequency bins during estimation.
int updateParsFlag;
@ -1177,7 +1180,7 @@ void WebRtcNs_AnalyzeCore(NSinst_t* self, const float* speechFrame) {
memcpy(self->magnPrevAnalyze, magn, sizeof(*magn) * self->magnLen);
}
void WebRtcNs_ProcessCore(NSinst_t* self,
void WebRtcNs_ProcessCore(NoiseSuppressionC* self,
const float* const* speechFrame,
int num_bands,
float* const* outFrame) {

View File

@ -13,7 +13,7 @@
#include "webrtc/modules/audio_processing/ns/defines.h"
typedef struct NSParaExtract_t_ {
typedef struct NSParaExtract_ {
// Bin size of histogram.
float binSizeLrt;
float binSizeSpecFlat;
@ -47,9 +47,9 @@ typedef struct NSParaExtract_t_ {
int thresWeightSpecFlat;
int thresWeightSpecDiff;
} NSParaExtract_t;
} NSParaExtract;
typedef struct NSinst_t_ {
typedef struct NoiseSuppressionC_ {
uint32_t fs;
int blockLen;
int windShift;
@ -101,7 +101,7 @@ typedef struct NSinst_t_ {
float pinkNoiseExp; // Pink noise parameter: power of frequencies.
float parametricNoise[HALF_ANAL_BLOCKL];
// Parameters for feature extraction.
NSParaExtract_t featureExtractionParams;
NSParaExtract featureExtractionParams;
// Histograms for parameter estimation.
int histLrt[HIST_PAR_EST];
int histSpecFlat[HIST_PAR_EST];
@ -111,7 +111,7 @@ typedef struct NSinst_t_ {
// Buffering data for HB.
float dataBufHB[NUM_HIGH_BANDS_MAX][ANAL_BLOCKL_MAX];
} NSinst_t;
} NoiseSuppressionC;
#ifdef __cplusplus
extern "C" {
@ -132,7 +132,7 @@ extern "C" {
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNs_InitCore(NSinst_t* self, uint32_t fs);
int WebRtcNs_InitCore(NoiseSuppressionC* self, uint32_t fs);
/****************************************************************************
* WebRtcNs_set_policy_core(...)
@ -149,7 +149,7 @@ int WebRtcNs_InitCore(NSinst_t* self, uint32_t fs);
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNs_set_policy_core(NSinst_t* self, int mode);
int WebRtcNs_set_policy_core(NoiseSuppressionC* self, int mode);
/****************************************************************************
* WebRtcNs_AnalyzeCore
@ -163,7 +163,7 @@ int WebRtcNs_set_policy_core(NSinst_t* self, int mode);
* Output:
* - self : Updated instance
*/
void WebRtcNs_AnalyzeCore(NSinst_t* self, const float* speechFrame);
void WebRtcNs_AnalyzeCore(NoiseSuppressionC* self, const float* speechFrame);
/****************************************************************************
* WebRtcNs_ProcessCore
@ -179,7 +179,7 @@ void WebRtcNs_AnalyzeCore(NSinst_t* self, const float* speechFrame);
* - self : Updated instance
* - outFrame : Output speech frame for each band
*/
void WebRtcNs_ProcessCore(NSinst_t* self,
void WebRtcNs_ProcessCore(NoiseSuppressionC* self,
const float* const* inFrame,
int num_bands,
float* const* outFrame);

View File

@ -300,7 +300,7 @@ static const int16_t kDeterminantEstMatrix[66] = {
};
// Update the noise estimation information.
static void UpdateNoiseEstimate(NsxInst_t* inst, int offset) {
static void UpdateNoiseEstimate(NoiseSuppressionFixedC* inst, int offset) {
int32_t tmp32no1 = 0;
int32_t tmp32no2 = 0;
int16_t tmp16 = 0;
@ -332,7 +332,7 @@ static void UpdateNoiseEstimate(NsxInst_t* inst, int offset) {
}
// Noise Estimation
static void NoiseEstimationC(NsxInst_t* inst,
static void NoiseEstimationC(NoiseSuppressionFixedC* inst,
uint16_t* magn,
uint32_t* noise,
int16_t* q_noise) {
@ -453,7 +453,7 @@ static void NoiseEstimationC(NsxInst_t* inst,
}
// Filter the data in the frequency domain, and create spectrum.
static void PrepareSpectrumC(NsxInst_t* inst, int16_t* freq_buf) {
static void PrepareSpectrumC(NoiseSuppressionFixedC* inst, int16_t* freq_buf) {
int i = 0, j = 0;
for (i = 0; i < inst->magnLen; i++) {
@ -474,7 +474,9 @@ static void PrepareSpectrumC(NsxInst_t* inst, int16_t* freq_buf) {
}
// Denormalize the real-valued signal |in|, the output from inverse FFT.
static void DenormalizeC(NsxInst_t* inst, int16_t* in, int factor) {
static void DenormalizeC(NoiseSuppressionFixedC* inst,
int16_t* in,
int factor) {
int i = 0;
int32_t tmp32 = 0;
for (i = 0; i < inst->anaLen; i += 1) {
@ -486,7 +488,7 @@ static void DenormalizeC(NsxInst_t* inst, int16_t* in, int factor) {
// For the noise supression process, synthesis, read out fully processed
// segment, and update synthesis buffer.
static void SynthesisUpdateC(NsxInst_t* inst,
static void SynthesisUpdateC(NoiseSuppressionFixedC* inst,
int16_t* out_frame,
int16_t gain_factor) {
int i = 0;
@ -519,7 +521,7 @@ static void SynthesisUpdateC(NsxInst_t* inst,
}
// Update analysis buffer for lower band, and window data before FFT.
static void AnalysisUpdateC(NsxInst_t* inst,
static void AnalysisUpdateC(NoiseSuppressionFixedC* inst,
int16_t* out,
int16_t* new_speech) {
int i = 0;
@ -539,7 +541,7 @@ static void AnalysisUpdateC(NsxInst_t* inst,
}
// Normalize the real-valued signal |in|, the input to forward FFT.
static void NormalizeRealBufferC(NsxInst_t* inst,
static void NormalizeRealBufferC(NoiseSuppressionFixedC* inst,
const int16_t* in,
int16_t* out) {
int i = 0;
@ -581,7 +583,7 @@ static void WebRtcNsx_InitMips(void) {
}
#endif
void WebRtcNsx_CalcParametricNoiseEstimate(NsxInst_t* inst,
void WebRtcNsx_CalcParametricNoiseEstimate(NoiseSuppressionFixedC* inst,
int16_t pink_noise_exp_avg,
int32_t pink_noise_num_avg,
int freq_index,
@ -627,7 +629,7 @@ void WebRtcNsx_CalcParametricNoiseEstimate(NsxInst_t* inst,
}
// Initialize state
int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) {
int32_t WebRtcNsx_InitCore(NoiseSuppressionFixedC* inst, uint32_t fs) {
int i;
//check for valid pointer
@ -782,7 +784,7 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs) {
return 0;
}
int WebRtcNsx_set_policy_core(NsxInst_t* inst, int mode) {
int WebRtcNsx_set_policy_core(NoiseSuppressionFixedC* inst, int mode) {
// allow for modes:0,1,2,3
if (mode < 0 || mode > 3) {
return -1;
@ -817,7 +819,8 @@ int WebRtcNsx_set_policy_core(NsxInst_t* inst, int mode) {
// thresholds and weights are extracted every window
// flag 0 means update histogram only, flag 1 means compute the thresholds/weights
// threshold and weights are returned in: inst->priorModelPars
void WebRtcNsx_FeatureParameterExtraction(NsxInst_t* inst, int flag) {
void WebRtcNsx_FeatureParameterExtraction(NoiseSuppressionFixedC* inst,
int flag) {
uint32_t tmpU32;
uint32_t histIndex;
uint32_t posPeak1SpecFlatFX, posPeak2SpecFlatFX;
@ -1017,7 +1020,8 @@ void WebRtcNsx_FeatureParameterExtraction(NsxInst_t* inst, int flag) {
// Compute spectral flatness on input spectrum
// magn is the magnitude spectrum
// spectral flatness is returned in inst->featureSpecFlat
void WebRtcNsx_ComputeSpectralFlatness(NsxInst_t* inst, uint16_t* magn) {
void WebRtcNsx_ComputeSpectralFlatness(NoiseSuppressionFixedC* inst,
uint16_t* magn) {
uint32_t tmpU32;
uint32_t avgSpectralFlatnessNum, avgSpectralFlatnessDen;
@ -1085,7 +1089,8 @@ void WebRtcNsx_ComputeSpectralFlatness(NsxInst_t* inst, uint16_t* magn) {
// magn_tmp is the input spectrum
// the reference/template spectrum is inst->magn_avg_pause[i]
// returns (normalized) spectral difference in inst->featureSpecDiff
void WebRtcNsx_ComputeSpectralDifference(NsxInst_t* inst, uint16_t* magnIn) {
void WebRtcNsx_ComputeSpectralDifference(NoiseSuppressionFixedC* inst,
uint16_t* magnIn) {
// This is to be calculated:
// avgDiffNormMagn = var(magnIn) - cov(magnIn, magnAvgPause)^2 / var(magnAvgPause)
@ -1177,8 +1182,9 @@ void WebRtcNsx_ComputeSpectralDifference(NsxInst_t* inst, uint16_t* magnIn) {
}
// Transform input (speechFrame) to frequency domain magnitude (magnU16)
void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU16) {
void WebRtcNsx_DataAnalysis(NoiseSuppressionFixedC* inst,
short* speechFrame,
uint16_t* magnU16) {
uint32_t tmpU32no1;
int32_t tmp_1_w32 = 0;
@ -1414,7 +1420,7 @@ void WebRtcNsx_DataAnalysis(NsxInst_t* inst, short* speechFrame, uint16_t* magnU
}
}
void WebRtcNsx_DataSynthesis(NsxInst_t* inst, short* outFrame) {
void WebRtcNsx_DataSynthesis(NoiseSuppressionFixedC* inst, short* outFrame) {
int32_t energyOut;
int16_t realImag_buff[ANAL_BLOCKL_MAX * 2 + 16];
@ -1495,7 +1501,7 @@ void WebRtcNsx_DataSynthesis(NsxInst_t* inst, short* outFrame) {
WebRtcNsx_SynthesisUpdate(inst, outFrame, gainFactor);
}
void WebRtcNsx_ProcessCore(NsxInst_t* inst,
void WebRtcNsx_ProcessCore(NoiseSuppressionFixedC* inst,
const short* const* speechFrame,
int num_bands,
short* const* outFrame) {

View File

@ -19,7 +19,7 @@
#include "webrtc/modules/audio_processing/ns/nsx_defines.h"
#include "webrtc/typedefs.h"
typedef struct NsxInst_t_ {
typedef struct NoiseSuppressionFixedC_ {
uint32_t fs;
const int16_t* window;
@ -107,7 +107,7 @@ typedef struct NsxInst_t_ {
int normData;
struct RealFFT* real_fft;
} NsxInst_t;
} NoiseSuppressionFixedC;
#ifdef __cplusplus
extern "C"
@ -129,7 +129,7 @@ extern "C"
* Return value : 0 - Ok
* -1 - Error
*/
int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs);
int32_t WebRtcNsx_InitCore(NoiseSuppressionFixedC* inst, uint32_t fs);
/****************************************************************************
* WebRtcNsx_set_policy_core(...)
@ -146,7 +146,7 @@ int32_t WebRtcNsx_InitCore(NsxInst_t* inst, uint32_t fs);
* Return value : 0 - Ok
* -1 - Error
*/
int WebRtcNsx_set_policy_core(NsxInst_t* inst, int mode);
int WebRtcNsx_set_policy_core(NoiseSuppressionFixedC* inst, int mode);
/****************************************************************************
* WebRtcNsx_ProcessCore
@ -162,7 +162,7 @@ int WebRtcNsx_set_policy_core(NsxInst_t* inst, int mode);
* - inst : Updated instance
* - outFrame : Output speech frame for each band
*/
void WebRtcNsx_ProcessCore(NsxInst_t* inst,
void WebRtcNsx_ProcessCore(NoiseSuppressionFixedC* inst,
const short* const* inFrame,
int num_bands,
short* const* outFrame);
@ -172,43 +172,45 @@ void WebRtcNsx_ProcessCore(NsxInst_t* inst,
* generic C code.
*/
// Noise Estimation.
typedef void (*NoiseEstimation)(NsxInst_t* inst,
typedef void (*NoiseEstimation)(NoiseSuppressionFixedC* inst,
uint16_t* magn,
uint32_t* noise,
int16_t* q_noise);
extern NoiseEstimation WebRtcNsx_NoiseEstimation;
// Filter the data in the frequency domain, and create spectrum.
typedef void (*PrepareSpectrum)(NsxInst_t* inst,
typedef void (*PrepareSpectrum)(NoiseSuppressionFixedC* inst,
int16_t* freq_buff);
extern PrepareSpectrum WebRtcNsx_PrepareSpectrum;
// For the noise supression process, synthesis, read out fully processed
// segment, and update synthesis buffer.
typedef void (*SynthesisUpdate)(NsxInst_t* inst,
typedef void (*SynthesisUpdate)(NoiseSuppressionFixedC* inst,
int16_t* out_frame,
int16_t gain_factor);
extern SynthesisUpdate WebRtcNsx_SynthesisUpdate;
// Update analysis buffer for lower band, and window data before FFT.
typedef void (*AnalysisUpdate)(NsxInst_t* inst,
typedef void (*AnalysisUpdate)(NoiseSuppressionFixedC* inst,
int16_t* out,
int16_t* new_speech);
extern AnalysisUpdate WebRtcNsx_AnalysisUpdate;
// Denormalize the real-valued signal |in|, the output from inverse FFT.
typedef void (*Denormalize) (NsxInst_t* inst, int16_t* in, int factor);
typedef void (*Denormalize)(NoiseSuppressionFixedC* inst,
int16_t* in,
int factor);
extern Denormalize WebRtcNsx_Denormalize;
// Normalize the real-valued signal |in|, the input to forward FFT.
typedef void (*NormalizeRealBuffer) (NsxInst_t* inst,
typedef void (*NormalizeRealBuffer)(NoiseSuppressionFixedC* inst,
const int16_t* in,
int16_t* out);
extern NormalizeRealBuffer WebRtcNsx_NormalizeRealBuffer;
// Compute speech/noise probability.
// Intended to be private.
void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
uint16_t* nonSpeechProbFinal,
uint32_t* priorLocSnr,
uint32_t* postLocSnr);
@ -218,35 +220,39 @@ void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file nsx_core.c, while those for ARM Neon platforms
// are declared below and defined in file nsx_core_neon.c.
void WebRtcNsx_NoiseEstimationNeon(NsxInst_t* inst,
void WebRtcNsx_NoiseEstimationNeon(NoiseSuppressionFixedC* inst,
uint16_t* magn,
uint32_t* noise,
int16_t* q_noise);
void WebRtcNsx_SynthesisUpdateNeon(NsxInst_t* inst,
void WebRtcNsx_SynthesisUpdateNeon(NoiseSuppressionFixedC* inst,
int16_t* out_frame,
int16_t gain_factor);
void WebRtcNsx_AnalysisUpdateNeon(NsxInst_t* inst,
void WebRtcNsx_AnalysisUpdateNeon(NoiseSuppressionFixedC* inst,
int16_t* out,
int16_t* new_speech);
void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buff);
void WebRtcNsx_PrepareSpectrumNeon(NoiseSuppressionFixedC* inst,
int16_t* freq_buff);
#endif
#if defined(MIPS32_LE)
// For the above function pointers, functions for generic platforms are declared
// and defined as static in file nsx_core.c, while those for MIPS platforms
// are declared below and defined in file nsx_core_mips.c.
void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst,
void WebRtcNsx_SynthesisUpdate_mips(NoiseSuppressionFixedC* inst,
int16_t* out_frame,
int16_t gain_factor);
void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst,
void WebRtcNsx_AnalysisUpdate_mips(NoiseSuppressionFixedC* inst,
int16_t* out,
int16_t* new_speech);
void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buff);
void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst,
void WebRtcNsx_PrepareSpectrum_mips(NoiseSuppressionFixedC* inst,
int16_t* freq_buff);
void WebRtcNsx_NormalizeRealBuffer_mips(NoiseSuppressionFixedC* inst,
const int16_t* in,
int16_t* out);
#if defined(MIPS_DSP_R1_LE)
void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor);
void WebRtcNsx_Denormalize_mips(NoiseSuppressionFixedC* inst,
int16_t* in,
int factor);
#endif
#endif

View File

@ -23,11 +23,10 @@ static const int16_t kIndicatorTable[17] = {
// speech/noise probability is returned in: probSpeechFinal
//snrLocPrior is the prior SNR for each frequency (in Q11)
//snrLocPost is the post SNR for each frequency (in Q11)
void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
uint16_t* nonSpeechProbFinal,
uint32_t* priorLocSnr,
uint32_t* postLocSnr) {
uint32_t zeros, num, den, tmpU32no1, tmpU32no2, tmpU32no3;
int32_t invLrtFX, indPriorFX, tmp32, tmp32no1, tmp32no2, besselTmpFX32;
int32_t frac32, logTmp;

View File

@ -22,11 +22,10 @@ static const int16_t kIndicatorTable[17] = {
// speech/noise probability is returned in: probSpeechFinal
//snrLocPrior is the prior SNR for each frequency (in Q11)
//snrLocPost is the post SNR for each frequency (in Q11)
void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
void WebRtcNsx_SpeechNoiseProb(NoiseSuppressionFixedC* inst,
uint16_t* nonSpeechProbFinal,
uint32_t* priorLocSnr,
uint32_t* postLocSnr) {
uint32_t tmpU32no1, tmpU32no2, tmpU32no3;
int32_t indPriorFX, tmp32no1;
int32_t logLrtTimeAvgKsumFX;
@ -328,10 +327,9 @@ void WebRtcNsx_SpeechNoiseProb(NsxInst_t* inst,
}
// Update analysis buffer for lower band, and window data before FFT.
void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst,
void WebRtcNsx_AnalysisUpdate_mips(NoiseSuppressionFixedC* inst,
int16_t* out,
int16_t* new_speech) {
int iters, after;
int anaLen = inst->anaLen;
int *window = (int*)inst->window;
@ -504,10 +502,9 @@ void WebRtcNsx_AnalysisUpdate_mips(NsxInst_t* inst,
// For the noise supression process, synthesis, read out fully processed
// segment, and update synthesis buffer.
void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst,
void WebRtcNsx_SynthesisUpdate_mips(NoiseSuppressionFixedC* inst,
int16_t* out_frame,
int16_t gain_factor) {
int iters = inst->blockLen10ms >> 2;
int after = inst->blockLen10ms & 3;
int r0, r1, r2, r3, r4, r5, r6, r7;
@ -756,8 +753,8 @@ void WebRtcNsx_SynthesisUpdate_mips(NsxInst_t* inst,
}
// Filter the data in the frequency domain, and create spectrum.
void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buf) {
void WebRtcNsx_PrepareSpectrum_mips(NoiseSuppressionFixedC* inst,
int16_t* freq_buf) {
uint16_t *noiseSupFilter = inst->noiseSupFilter;
int16_t *real = inst->real;
int16_t *imag = inst->imag;
@ -862,7 +859,9 @@ void WebRtcNsx_PrepareSpectrum_mips(NsxInst_t* inst, int16_t* freq_buf) {
#if defined(MIPS_DSP_R1_LE)
// Denormalize the real-valued signal |in|, the output from inverse FFT.
void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor) {
void WebRtcNsx_Denormalize_mips(NoiseSuppressionFixedC* inst,
int16_t* in,
int factor) {
int32_t r0, r1, r2, r3, t0;
int len = inst->anaLen;
int16_t *out = &inst->real[0];
@ -950,7 +949,7 @@ void WebRtcNsx_Denormalize_mips(NsxInst_t* inst, int16_t* in, int factor) {
#endif
// Normalize the real-valued signal |in|, the input to forward FFT.
void WebRtcNsx_NormalizeRealBuffer_mips(NsxInst_t* inst,
void WebRtcNsx_NormalizeRealBuffer_mips(NoiseSuppressionFixedC* inst,
const int16_t* in,
int16_t* out) {
int32_t r0, r1, r2, r3, t0;

View File

@ -56,7 +56,7 @@ const int16_t WebRtcNsx_kLogTableFrac[256] = {
};
// Update the noise estimation information.
static void UpdateNoiseEstimateNeon(NsxInst_t* inst, int offset) {
static void UpdateNoiseEstimateNeon(NoiseSuppressionFixedC* inst, int offset) {
const int16_t kExp2Const = 11819; // Q13
int16_t* ptr_noiseEstLogQuantile = NULL;
int16_t* ptr_noiseEstQuantile = NULL;
@ -133,7 +133,7 @@ static void UpdateNoiseEstimateNeon(NsxInst_t* inst, int offset) {
}
// Noise Estimation
void WebRtcNsx_NoiseEstimationNeon(NsxInst_t* inst,
void WebRtcNsx_NoiseEstimationNeon(NoiseSuppressionFixedC* inst,
uint16_t* magn,
uint32_t* noise,
int16_t* q_noise) {
@ -353,7 +353,8 @@ void WebRtcNsx_NoiseEstimationNeon(NsxInst_t* inst,
}
// Filter the data in the frequency domain, and create spectrum.
void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buf) {
void WebRtcNsx_PrepareSpectrumNeon(NoiseSuppressionFixedC* inst,
int16_t* freq_buf) {
assert(inst->magnLen % 8 == 1);
assert(inst->anaLen2 % 16 == 0);
@ -442,7 +443,7 @@ void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buf) {
// For the noise supress process, synthesis, read out fully processed segment,
// and update synthesis buffer.
void WebRtcNsx_SynthesisUpdateNeon(NsxInst_t* inst,
void WebRtcNsx_SynthesisUpdateNeon(NoiseSuppressionFixedC* inst,
int16_t* out_frame,
int16_t gain_factor) {
assert(inst->anaLen % 16 == 0);
@ -534,7 +535,7 @@ void WebRtcNsx_SynthesisUpdateNeon(NsxInst_t* inst,
}
// Update analysis buffer for lower band, and window data before FFT.
void WebRtcNsx_AnalysisUpdateNeon(NsxInst_t* inst,
void WebRtcNsx_AnalysisUpdateNeon(NoiseSuppressionFixedC* inst,
int16_t* out,
int16_t* new_speech) {
assert(inst->blockLen10ms % 16 == 0);