Reformat existing code. There should be no functional effects.

This includes changes like:
* Attempt to break lines at better positions
* Use "override" in more places, don't use "virtual" with it
* Use {} where the body is more than one line
* Make declaration and definition arg names match
* Eliminate unused code
* EXPECT_EQ(expected, actual) (but use (actual, expected) for e.g. _GT)
* Correct #include order
* Use anonymous namespaces in preference to "static" for file-scoping
* Eliminate unnecessary casts
* Update reference code in comments of ARM assembly sources to match actual current C code
* Fix indenting to be more style-guide compliant
* Use arraysize() in more places
* Use bool instead of int for "boolean" values (0/1)
* Shorten and simplify code
* Spaces around operators
* 80 column limit
* Use const more consistently
* Space goes after '*' in type name, not before
* Remove unnecessary return values
* Use "(var == const)", not "(const == var)"
* Spelling
* Prefer true, typed constants to "enum hack" constants
* Avoid "virtual" on non-overridden functions
* ASSERT(x == y) -> ASSERT_EQ(y, x)

BUG=none
R=andrew@webrtc.org, asapersson@webrtc.org, henrika@webrtc.org, juberti@webrtc.org, kjellander@webrtc.org, kwiberg@webrtc.org

Review URL: https://codereview.webrtc.org/1172163004

Cr-Commit-Position: refs/heads/master@{#9420}
This commit is contained in:
Peter Kasting
2015-06-11 14:31:38 -07:00
parent b7e5054414
commit 728d9037c0
90 changed files with 416 additions and 490 deletions

View File

@ -52,13 +52,6 @@ int16_t WebRtcG711_DecodeU(const uint8_t* encoded,
return len;
}
int WebRtcG711_DurationEst(const uint8_t* payload,
int payload_length_bytes) {
(void) payload;
/* G.711 is one byte per sample, so we can just return the number of bytes. */
return payload_length_bytes;
}
int16_t WebRtcG711_Version(char* version, int16_t lenBytes) {
strncpy(version, "2.0.0", lenBytes);
return 0;

View File

@ -111,23 +111,6 @@ int16_t WebRtcG711_DecodeU(const uint8_t* encoded,
int16_t* decoded,
int16_t* speechType);
/****************************************************************************
* WebRtcG711_DurationEst(...)
*
* This function estimates the duration of a G711 packet in samples.
*
* Input:
* - payload : Encoded data
* - payloadLengthBytes : Bytes in encoded vector
*
* Return value : The duration of the packet in samples, which is
* just payload_length_bytes, since G.711 uses one
* byte per sample.
*/
int WebRtcG711_DurationEst(const uint8_t* payload,
int payload_length_bytes);
/**********************************************************************
* WebRtcG711_Version(...)
*

View File

@ -24,18 +24,12 @@
#define CLOCKS_PER_SEC_G711 1000
/* function for reading audio data from PCM file */
int readframe(int16_t* data, FILE* inp, int length) {
short k, rlen, status = 0;
rlen = (short) fread(data, sizeof(int16_t), length, inp);
if (rlen < length) {
for (k = rlen; k < length; k++)
data[k] = 0;
status = 1;
}
return status;
bool readframe(int16_t* data, FILE* inp, int length) {
short rlen = (short) fread(data, sizeof(int16_t), length, inp);
if (rlen >= length)
return false;
memset(data + rlen, 0, (length - rlen) * sizeof(int16_t));
return true;
}
int main(int argc, char* argv[]) {
@ -43,7 +37,8 @@ int main(int argc, char* argv[]) {
FILE* inp;
FILE* outp;
FILE* bitp = NULL;
int framecnt, endfile;
int framecnt;
bool endfile;
int16_t framelength = 80;
@ -122,8 +117,8 @@ int main(int argc, char* argv[]) {
/* Initialize encoder and decoder */
framecnt = 0;
endfile = 0;
while (endfile == 0) {
endfile = false;
while (!endfile) {
framecnt++;
/* Read speech block */
endfile = readframe(shortdata, inp, framelength);

View File

@ -52,8 +52,8 @@ int16_t WebRtcG722_Encode(G722EncInst *G722enc_inst,
{
unsigned char *codechar = (unsigned char*) encoded;
// Encode the input speech vector
return WebRtc_g722_encode((G722EncoderState*) G722enc_inst,
codechar, speechIn, len);
return WebRtc_g722_encode((G722EncoderState*) G722enc_inst, codechar,
speechIn, len);
}
int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst)
@ -93,8 +93,8 @@ int16_t WebRtcG722_Decode(G722DecInst *G722dec_inst,
{
// Decode the G.722 encoder stream
*speechType=G722_WEBRTC_SPEECH;
return WebRtc_g722_decode((G722DecoderState*) G722dec_inst,
decoded, encoded, len);
return WebRtc_g722_decode((G722DecoderState*) G722dec_inst, decoded,
encoded, len);
}
int16_t WebRtcG722_Version(char *versionStr, short len)

View File

@ -91,7 +91,7 @@ int16_t WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
* Output:
* - encoded : The encoded data vector
*
* Return value : Length (in bytes) of coded data
* Return value : Length (in bytes) of coded data
*/
int16_t WebRtcG722_Encode(G722EncInst* G722enc_inst,

View File

@ -29,18 +29,13 @@ typedef struct WebRtcG722EncInst G722EncInst;
typedef struct WebRtcG722DecInst G722DecInst;
/* function for reading audio data from PCM file */
int readframe(int16_t *data, FILE *inp, int length)
bool readframe(int16_t *data, FILE *inp, int length)
{
short k, rlen, status = 0;
rlen = (short)fread(data, sizeof(int16_t), length, inp);
if (rlen < length) {
for (k = rlen; k < length; k++)
data[k] = 0;
status = 1;
}
return status;
short rlen = (short)fread(data, sizeof(int16_t), length, inp);
if (rlen >= length)
return false;
memset(data + rlen, 0, (length - rlen) * sizeof(int16_t));
return true;
}
int main(int argc, char* argv[])
@ -48,7 +43,8 @@ int main(int argc, char* argv[])
char inname[60], outbit[40], outname[40];
FILE *inp, *outbitp, *outp;
int framecnt, endfile;
int framecnt;
bool endfile;
int16_t framelength = 160;
G722EncInst *G722enc_inst;
G722DecInst *G722dec_inst;
@ -116,8 +112,8 @@ int main(int argc, char* argv[])
/* Initialize encoder and decoder */
framecnt = 0;
endfile = 0;
while (endfile == 0) {
endfile = false;
while (!endfile) {
framecnt++;
/* Read speech block */
@ -139,13 +135,13 @@ int main(int argc, char* argv[])
printf("Error in encoder/decoder\n");
} else {
/* Write coded bits to file */
if (fwrite(streamdata, sizeof(short), stream_len/2,
outbitp) != static_cast<size_t>(stream_len/2)) {
if (fwrite(streamdata, sizeof(short), stream_len / 2, outbitp) !=
static_cast<size_t>(stream_len / 2)) {
return -1;
}
/* Write coded speech to file */
if (fwrite(decoded, sizeof(short), framelength,
outp) != static_cast<size_t>(framelength)) {
if (fwrite(decoded, sizeof(short), framelength, outp) !=
static_cast<size_t>(framelength)) {
return -1;
}
}

View File

@ -23,7 +23,7 @@
void WebRtcIlbcfix_AbsQuantLoop(int16_t *syntOutIN, int16_t *in_weightedIN,
int16_t *weightDenumIN, int16_t *quantLenIN,
int16_t *idxVecIN ) {
int n, k1, k2;
int k1, k2;
int16_t index;
int32_t toQW32;
int32_t toQ32;
@ -36,8 +36,6 @@ void WebRtcIlbcfix_AbsQuantLoop(int16_t *syntOutIN, int16_t *in_weightedIN,
int16_t *quantLen = quantLenIN;
int16_t *idxVec = idxVecIN;
n=0;
for(k1=0;k1<2;k1++) {
for(k2=0;k2<quantLen[k1];k2++){
@ -81,7 +79,6 @@ void WebRtcIlbcfix_AbsQuantLoop(int16_t *syntOutIN, int16_t *in_weightedIN,
*syntOut = (int16_t) (tmp16a + (int32_t)(*in_weighted) - toQW32);
n++;
syntOut++; in_weighted++;
}
/* Update perceptual weighting filter at subframe border */

View File

@ -25,12 +25,9 @@ const int kSampleRateHz = 8000;
} // namespace
bool AudioEncoderIlbc::Config::IsOk() const {
if (!(frame_size_ms == 20 || frame_size_ms == 30 || frame_size_ms == 40 ||
frame_size_ms == 60))
return false;
if (kSampleRateHz / 100 * (frame_size_ms / 10) > kMaxSamplesPerPacket)
return false;
return true;
return (frame_size_ms == 20 || frame_size_ms == 30 || frame_size_ms == 40 ||
frame_size_ms == 60) &&
(kSampleRateHz / 100 * (frame_size_ms / 10)) <= kMaxSamplesPerPacket;
}
AudioEncoderIlbc::AudioEncoderIlbc(const Config& config)

View File

@ -35,7 +35,7 @@ void WebRtcIlbcfix_CbMemEnergy(
int16_t *energyW16, /* (o) Energy in the CB vectors */
int16_t *energyShifts, /* (o) Shift value of the energy */
int16_t scale, /* (i) The scaling of all energy values */
int16_t base_size /* (i) Index to where the energy values should be stored */
int16_t base_size /* (i) Index to where energy values should be stored */
) {
int16_t *ppi, *ppo, *pp;
int32_t energy, tmp32;

View File

@ -23,7 +23,7 @@ void WebRtcIlbcfix_CbMemEnergyAugmentation(
int16_t *interpSamples, /* (i) The interpolated samples */
int16_t *CBmem, /* (i) The CB memory */
int16_t scale, /* (i) The scaling of all energy values */
int16_t base_size, /* (i) Index to where the energy values should be stored */
int16_t base_size, /* (i) Index to where energy values should be stored */
int16_t *energyW16, /* (o) Energy in the CB vectors */
int16_t *energyShifts /* (o) Shift value of the energy */
){

View File

@ -23,7 +23,7 @@ void WebRtcIlbcfix_CbMemEnergyAugmentation(
int16_t *interpSamples, /* (i) The interpolated samples */
int16_t *CBmem, /* (i) The CB memory */
int16_t scale, /* (i) The scaling of all energy values */
int16_t base_size, /* (i) Index to where the energy values should be stored */
int16_t base_size, /* (i) Index to where energy values should be stored */
int16_t *energyW16, /* (o) Energy in the CB vectors */
int16_t *energyShifts /* (o) Shift value of the energy */
);

View File

@ -29,7 +29,7 @@ void WebRtcIlbcfix_CbMemEnergyCalc(
int16_t *energyW16, /* (o) Energy in the CB vectors */
int16_t *energyShifts, /* (o) Shift value of the energy */
int16_t scale, /* (i) The scaling of all energy values */
int16_t base_size /* (i) Index to where the energy values should be stored */
int16_t base_size /* (i) Index to where energy values should be stored */
)
{
int16_t j,shft;

View File

@ -27,7 +27,7 @@ void WebRtcIlbcfix_CbMemEnergyCalc(
int16_t *energyW16, /* (o) Energy in the CB vectors */
int16_t *energyShifts, /* (o) Shift value of the energy */
int16_t scale, /* (i) The scaling of all energy values */
int16_t base_size /* (i) Index to where the energy values should be stored */
int16_t base_size /* (i) Index to where energy values should be stored */
);
#endif

View File

@ -147,7 +147,8 @@ void WebRtcIlbcfix_CbSearch(
/* Compute the CB vectors' energies for the second cb section (filtered cb) */
WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamplesFilt, cbvectors,
scale, (int16_t)(base_size+20), energyW16, energyShifts);
scale, (int16_t)(base_size + 20),
energyW16, energyShifts);
/* Compute the CB vectors' energies and store them in the vector
* energyW16. Also the corresponding shift values are stored. The
@ -238,9 +239,12 @@ void WebRtcIlbcfix_CbSearch(
if (lTarget==SUBL) {
i=sInd;
if (sInd<20) {
WebRtcIlbcfix_AugmentedCbCorr(target, cbvectors+lMem,
WebRtcIlbcfix_AugmentedCbCorr(target, cbvectors + lMem,
interpSamplesFilt, cDot,
(int16_t)(sInd+20), (int16_t)(WEBRTC_SPL_MIN(39, (eInd+20))), scale);
(int16_t)(sInd + 20),
(int16_t)(WEBRTC_SPL_MIN(39,
(eInd + 20))),
scale);
i=20;
cDotPtr = &cDot[20 - sInd];
} else {
@ -250,14 +254,16 @@ void WebRtcIlbcfix_CbSearch(
cb_vecPtr = cbvectors+lMem-20-i;
/* Calculate the cross correlations (main part of the filtered CB) */
WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (int16_t)(eInd-i+1), scale, -1);
WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget,
(int16_t)(eInd - i + 1), scale, -1);
} else {
cDotPtr = cDot;
cb_vecPtr = cbvectors+lMem-lTarget-sInd;
/* Calculate the cross correlations (main part of the filtered CB) */
WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (int16_t)(eInd-sInd+1), scale, -1);
WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget,
(int16_t)(eInd - sInd + 1), scale, -1);
}

View File

@ -103,9 +103,10 @@ void WebRtcIlbcfix_DecodeImpl(
WebRtcIlbcfix_DecodeResidual(iLBCdec_inst, iLBCbits_inst, decresidual, syntdenum);
/* preparing the plc for a future loss! */
WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 0,
decresidual, syntdenum + (LPC_FILTERORDER + 1)*(iLBCdec_inst->nsub - 1),
(int16_t)(iLBCdec_inst->last_lag), iLBCdec_inst);
WebRtcIlbcfix_DoThePlc(
PLCresidual, PLClpc, 0, decresidual,
syntdenum + (LPC_FILTERORDER + 1) * (iLBCdec_inst->nsub - 1),
(int16_t)(iLBCdec_inst->last_lag), iLBCdec_inst);
/* Use the output from doThePLC */
WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);
@ -120,8 +121,8 @@ void WebRtcIlbcfix_DecodeImpl(
/* packet loss conceal */
WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 1,
decresidual, syntdenum, (int16_t)(iLBCdec_inst->last_lag), iLBCdec_inst);
WebRtcIlbcfix_DoThePlc(PLCresidual, PLClpc, 1, decresidual, syntdenum,
(int16_t)(iLBCdec_inst->last_lag), iLBCdec_inst);
WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);

View File

@ -336,8 +336,8 @@ int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
enh_bufPtr1,
synt,
&iLBCdec_inst->old_syntdenum[
(iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
LPC_FILTERORDER+1, lag);
(iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
LPC_FILTERORDER+1, lag);
WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], &synt[lag-LPC_FILTERORDER],
LPC_FILTERORDER);
@ -347,8 +347,8 @@ int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
WebRtcSpl_FilterARFastQ12(
enh_bufPtr1, synt,
&iLBCdec_inst->old_syntdenum[
(iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
LPC_FILTERORDER+1, lag);
(iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
LPC_FILTERORDER+1, lag);
WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &synt[lag-LPC_FILTERORDER],
LPC_FILTERORDER);

View File

@ -23,10 +23,10 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_MyCorr(
int32_t *corr, /* (o) correlation of seq1 and seq2 */
int16_t *seq1, /* (i) first sequence */
int32_t* corr, /* (o) correlation of seq1 and seq2 */
const int16_t* seq1, /* (i) first sequence */
int16_t dim1, /* (i) dimension first seq1 */
const int16_t *seq2, /* (i) second sequence */
const int16_t* seq2, /* (i) second sequence */
int16_t dim2 /* (i) dimension seq2 */
){
int16_t max, scale, loops;

View File

@ -26,10 +26,10 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_MyCorr(
int32_t *corr, /* (o) correlation of seq1 and seq2 */
int16_t *seq1, /* (i) first sequence */
int32_t* corr, /* (o) correlation of seq1 and seq2 */
const int16_t* seq1, /* (i) first sequence */
int16_t dim1, /* (i) dimension first seq1 */
const int16_t *seq2, /* (i) second sequence */
const int16_t* seq2, /* (i) second sequence */
int16_t dim2 /* (i) dimension seq2 */
);

View File

@ -52,6 +52,7 @@ int main(int argc, char* argv[])
int blockcount = 0;
int packetlosscount = 0;
int frameLen;
size_t len_i16s;
int16_t speechType;
IlbcEncoderInstance *Enc_Inst;
IlbcDecoderInstance *Dec_Inst;
@ -173,9 +174,8 @@ int main(int argc, char* argv[])
/* write byte file */
if (fwrite(encoded_data, sizeof(int16_t),
((len+1)/sizeof(int16_t)), efileid) !=
(size_t)(((len+1)/sizeof(int16_t)))) {
len_i16s = (len + 1) / sizeof(int16_t);
if (fwrite(encoded_data, sizeof(int16_t), len_i16s, efileid) != len_i16s) {
return -1;
}

View File

@ -42,6 +42,7 @@ int main(int argc, char* argv[])
FILE *ifileid,*efileid,*ofileid, *chfileid;
short encoded_data[55], data[240], speechType;
short len, mode, pli;
size_t readlen;
int blockcount = 0;
IlbcEncoderInstance *Enc_Inst;
@ -125,19 +126,16 @@ int main(int argc, char* argv[])
/* loop over input blocks */
#ifdef SPLIT_10MS
while(fread(data, sizeof(short), 80, ifileid) == 80) {
readlen = 80;
#else
while((short)fread(data,sizeof(short),(mode<<3),ifileid)==(mode<<3)) {
readlen = (size_t)(mode << 3);
#endif
while(fread(data, sizeof(short), readlen, ifileid) == readlen) {
blockcount++;
/* encoding */
fprintf(stderr, "--- Encoding block %i --- ",blockcount);
#ifdef SPLIT_10MS
len=WebRtcIlbcfix_Encode(Enc_Inst, data, 80, encoded_data);
#else
len=WebRtcIlbcfix_Encode(Enc_Inst, data, (short)(mode<<3), encoded_data);
#endif
len=WebRtcIlbcfix_Encode(Enc_Inst, data, (short)readlen, encoded_data);
if (len < 0) {
fprintf(stderr, "Error encoding\n");
exit(0);
@ -152,9 +150,7 @@ int main(int argc, char* argv[])
/* write byte file */
if(len != 0){ //len may be 0 in 10ms split case
fwrite(encoded_data,1,len,efileid);
}
if(len != 0){ //len may be 0 in 10ms split case
/* get channel data if provided */
if (argc==6) {
if (fread(&pli, sizeof(int16_t), 1, chfileid)) {

View File

@ -57,11 +57,11 @@ int WebRtcIlbcfix_XcorrCoef(
if (step==1) {
max=WebRtcSpl_MaxAbsValueW16(regressor, subl + searchLen - 1);
rp_beg = regressor;
rp_end = &regressor[subl];
rp_end = regressor + subl;
} else { /* step==-1 */
max=WebRtcSpl_MaxAbsValueW16(&regressor[-searchLen], subl + searchLen - 1);
rp_beg = &regressor[-1];
rp_end = &regressor[subl-1];
max = WebRtcSpl_MaxAbsValueW16(regressor - searchLen, subl + searchLen - 1);
rp_beg = regressor - 1;
rp_end = regressor + subl - 1;
}
/* Introduce a scale factor on the Energy in int32_t in

View File

@ -227,10 +227,10 @@ extern "C" {
int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
const uint8_t* encoded,
int32_t packet_size,
uint16_t rtp_seq_number,
uint32_t send_ts,
uint32_t arr_ts);
int32_t packet_size,
uint16_t rtp_seq_number,
uint32_t send_ts,
uint32_t arr_ts);
/****************************************************************************
* WebRtcIsacfix_Decode(...)

View File

@ -36,9 +36,9 @@ int16_t WebRtcIsacfix_DecodeImpl(int16_t* signal_out16,
IsacFixDecoderInstance* ISACdec_obj,
int16_t* current_framesamples);
int16_t WebRtcIsacfix_DecodePlcImpl(int16_t* decoded,
IsacFixDecoderInstance* ISACdec_obj,
int16_t* current_framesample );
void WebRtcIsacfix_DecodePlcImpl(int16_t* decoded,
IsacFixDecoderInstance* ISACdec_obj,
int16_t* current_framesample );
int WebRtcIsacfix_EncodeImpl(int16_t* in,
IsacFixEncoderInstance* ISACenc_obj,

View File

@ -175,7 +175,10 @@ static void MemshipValQ15( int16_t in, int16_t *A, int16_t *B )
static void LinearResampler( int16_t *in, int16_t *out, int16_t lenIn, int16_t lenOut )
static void LinearResampler(int16_t* in,
int16_t* out,
int16_t lenIn,
int16_t lenOut)
{
int32_t n = (lenIn - 1) * RESAMP_RES;
int16_t resOut, i, j, relativePos, diff; /* */
@ -230,12 +233,11 @@ static void LinearResampler( int16_t *in, int16_t *out, int16_t lenIn, int16_t l
int16_t WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16,
IsacFixDecoderInstance *ISACdec_obj,
int16_t *current_framesamples )
void WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16,
IsacFixDecoderInstance *ISACdec_obj,
int16_t *current_framesamples )
{
int subframecnt;
int16_t len = 0;
int16_t* Vector_Word16_1;
int16_t Vector_Word16_Extended_1[FRAMESAMPLES_HALF + NOISE_FILTER_LEN];
@ -797,6 +799,4 @@ int16_t WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16,
(ISACdec_obj->plcstr_obj).used = PLC_WAS_USED;
*current_framesamples = 480;
return len;
}

View File

@ -1675,7 +1675,7 @@ int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata,
int32_t meangainQ12;
int32_t CQ11, CQ10,tmp32a,tmp32b;
int16_t shft,tmp16a,tmp16c;
int16_t shft;
meangainQ12=0;
for (k = 0; k < 4; k++)
@ -1725,22 +1725,19 @@ int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata,
CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32a = WEBRTC_SPL_MUL_16_32_RSFT11(WebRtcIsacfix_kTransform[0][k], CQ11);
tmp16a = (int16_t)(tmp32a >> 5);
PitchLags_Q7[k] = tmp16a;
PitchLags_Q7[k] = (int16_t)(tmp32a >> 5);
}
CQ10 = mean_val2Q10[index[1]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32b = WebRtcIsacfix_kTransform[1][k] * (int16_t)CQ10 >> 10;
tmp16c = (int16_t)(tmp32b >> 5);
PitchLags_Q7[k] += tmp16c;
PitchLags_Q7[k] += (int16_t)(tmp32b >> 5);
}
CQ10 = mean_val4Q10[index[3]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32b = WebRtcIsacfix_kTransform[3][k] * (int16_t)CQ10 >> 10;
tmp16c = (int16_t)(tmp32b >> 5);
PitchLags_Q7[k] += tmp16c;
PitchLags_Q7[k] += (int16_t)(tmp32b >> 5);
}
return 0;
@ -1761,7 +1758,7 @@ int WebRtcIsacfix_EncodePitchLag(int16_t* PitchLagsQ7,
const int16_t *mean_val2Q10,*mean_val4Q10;
const int16_t *lower_limit, *upper_limit;
const uint16_t **cdf;
int16_t shft, tmp16a, tmp16b, tmp16c;
int16_t shft, tmp16b;
int32_t tmp32b;
int status = 0;
@ -1832,22 +1829,19 @@ int WebRtcIsacfix_EncodePitchLag(int16_t* PitchLagsQ7,
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32a = WEBRTC_SPL_MUL_16_32_RSFT11(WebRtcIsacfix_kTransform[0][k], CQ11); // Q12
tmp16a = (int16_t)(tmp32a >> 5); // Q7.
PitchLagsQ7[k] = tmp16a;
PitchLagsQ7[k] = (int16_t)(tmp32a >> 5); // Q7.
}
CQ10 = mean_val2Q10[index[1]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32b = WebRtcIsacfix_kTransform[1][k] * (int16_t)CQ10 >> 10;
tmp16c = (int16_t)(tmp32b >> 5); // Q7.
PitchLagsQ7[k] += tmp16c;
PitchLagsQ7[k] += (int16_t)(tmp32b >> 5); // Q7.
}
CQ10 = mean_val4Q10[index[3]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32b = WebRtcIsacfix_kTransform[3][k] * (int16_t)CQ10 >> 10;
tmp16c = (int16_t)(tmp32b >> 5); // Q7.
PitchLagsQ7[k] += tmp16c;
PitchLagsQ7[k] += (int16_t)(tmp32b >> 5); // Q7.
}
/* entropy coding of quantization pitch lags */

View File

@ -620,9 +620,9 @@ int16_t WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst)
int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
const uint8_t* encoded,
int32_t packet_size,
uint16_t rtp_seq_number,
uint32_t arr_ts)
int32_t packet_size,
uint16_t rtp_seq_number,
uint32_t arr_ts)
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
@ -692,10 +692,10 @@ int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
const uint8_t* encoded,
int32_t packet_size,
uint16_t rtp_seq_number,
uint32_t send_ts,
uint32_t arr_ts)
int32_t packet_size,
uint16_t rtp_seq_number,
uint32_t send_ts,
uint32_t arr_ts)
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
@ -767,11 +767,11 @@ int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
*/
int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct* ISAC_main_inst,
const uint8_t* encoded,
int16_t len,
int16_t *decoded,
int16_t *speechType)
int16_t len,
int16_t* decoded,
int16_t* speechType)
{
ISACFIX_SubStruct *ISAC_inst;
/* number of samples (480 or 960), output from decoder */
@ -981,9 +981,8 @@ int16_t WebRtcIsacfix_DecodePlcNb(ISACFIX_MainStruct *ISAC_main_inst,
declen = 0;
while( noOfLostFrames > 0 )
{
ok = WebRtcIsacfix_DecodePlcImpl( outframeWB, &ISAC_inst->ISACdec_obj, &no_of_samples );
if(ok)
return -1;
WebRtcIsacfix_DecodePlcImpl(outframeWB, &ISAC_inst->ISACdec_obj,
&no_of_samples);
WebRtcIsacfix_SplitAndFilter2(outframeWB, &(outframeNB[k*240]), dummy, &ISAC_inst->ISACdec_obj.decimatorstr_obj);
@ -1029,7 +1028,7 @@ int16_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
int16_t noOfLostFrames)
{
int16_t no_of_samples, declen, k, ok;
int16_t no_of_samples, declen, k;
int16_t outframe16[MAX_FRAMESAMPLES];
ISACFIX_SubStruct *ISAC_inst;
@ -1044,9 +1043,8 @@ int16_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
declen = 0;
while( noOfLostFrames > 0 )
{
ok = WebRtcIsacfix_DecodePlcImpl( &(outframe16[k*480]), &ISAC_inst->ISACdec_obj, &no_of_samples );
if(ok)
return -1;
WebRtcIsacfix_DecodePlcImpl(&(outframe16[k*480]), &ISAC_inst->ISACdec_obj,
&no_of_samples);
declen += no_of_samples;
noOfLostFrames--;
k++;

View File

@ -279,7 +279,8 @@ void WebRtcIsacfix_NormLatticeFilterAr(int16_t orderCoef,
ARfQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
}
for (i=orderCoef;i>0;i--) //get the state of f&g for the first input, for all orders
// Get the state of f & g for the first input, for all orders.
for (i = orderCoef; i > 0; i--)
{
tmp32 = (cthQ15[i - 1] * ARfQ0vec[0] - sthQ15[i - 1] * stateGQ0[i - 1] +
16384) >> 15;

View File

@ -55,9 +55,11 @@ ORDER_COEF_LOOP: @ for(k = order_coef ; k > 0; k--)
smlabb r11, r7, r5, r12 @ sth_Q15[k - 1] * tmpAR + 16384
smlabb r10, r6, r5, r12 @ cth_Q15[k - 1] * tmpAR + 16384
smulbb r7, r7, r8 @ sth_Q15[k - 1] * ar_g_Q0[k - 1]
smlabb r11, r6, r8, r11 @ cth_Q15[k - 1]*ar_g_Q0[k - 1]+(sth_Q15[k - 1]*tmpAR+16384)
smlabb r11, r6, r8, r11 @ cth_Q15[k - 1] * ar_g_Q0[k - 1] +
@ (sth_Q15[k - 1] * tmpAR + 16384)
sub r10, r10, r7 @ cth_Q15[k - 1]*tmpAR+16384-(sth_Q15[k - 1]*ar_g_Q0[k - 1])
sub r10, r10, r7 @ cth_Q15[k - 1] * tmpAR + 16384 -
@ (sth_Q15[k - 1] * ar_g_Q0[k - 1])
ssat r11, #16, r11, asr #15
ssat r5, #16, r10, asr #15
strh r11, [r0], #-2 @ Output: ar_g_Q0[k]

View File

@ -34,19 +34,6 @@ static const int16_t kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = {
{ 271, -743, 1570, -3320, 12963, 7301, -2292, 953, -325}
};
// Function prototype for pitch filtering.
// TODO(Turaj): Add descriptions of input and output parameters.
void WebRtcIsacfix_PitchFilterCore(int loopNumber,
int16_t gain,
int index,
int16_t sign,
int16_t* inputState,
int16_t* outputBuf2,
const int16_t* coefficient,
int16_t* inputBuf,
int16_t* outputBuf,
int* index2);
static __inline int32_t CalcLrIntQ(int32_t fixVal,
int16_t qDomain) {
int32_t roundVal = 1 << (qDomain - 1);

View File

@ -83,14 +83,14 @@ float IsacSpeedTest::EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
return 1000.0 * clocks / CLOCKS_PER_SEC;
}
float IsacSpeedTest::DecodeABlock(const uint8_t* bit_stream, int encoded_bytes,
float IsacSpeedTest::DecodeABlock(const uint8_t* bit_stream,
int encoded_bytes,
int16_t* out_data) {
int value;
int16_t audio_type;
clock_t clocks = clock();
value = WebRtcIsacfix_Decode(ISACFIX_main_inst_,
bit_stream,
encoded_bytes, out_data, &audio_type);
value = WebRtcIsacfix_Decode(ISACFIX_main_inst_, bit_stream, encoded_bytes,
out_data, &audio_type);
clocks = clock() - clocks;
EXPECT_EQ(output_length_sample_, value);
return 1000.0 * clocks / CLOCKS_PER_SEC;

View File

@ -582,8 +582,7 @@ int main(int argc, char* argv[]) {
totalsmpls += declen;
totalbits += 8 * stream_len;
kbps = ((double)FS) / ((double)cur_framesmpls) * 8.0 * stream_len /
1000.0; // kbits/s
kbps = ((double)FS) / ((double)cur_framesmpls) * 8.0 * stream_len / 1000.0;
fy = fopen("bit_rate.dat", "a");
fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps);
fclose(fy);

View File

@ -132,12 +132,12 @@ int32_t WebRtcIsac_InitBandwidthEstimator(
/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
/* returns 0 if everything went fine, -1 otherwise */
int16_t WebRtcIsac_UpdateBandwidthEstimator(
BwEstimatorstr *bwest_str,
BwEstimatorstr* bwest_str,
const uint16_t rtp_number,
const int32_t frame_length,
const int32_t frame_length,
const uint32_t send_ts,
const uint32_t arr_ts,
const int32_t pksize
const int32_t pksize
/*, const uint16_t Index*/)
{
float weight = 0.0f;

View File

@ -90,12 +90,12 @@ extern "C" {
/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
/* returns 0 if everything went fine, -1 otherwise */
int16_t WebRtcIsac_UpdateBandwidthEstimator(
BwEstimatorstr* bwest_str,
BwEstimatorstr* bwest_str,
const uint16_t rtp_number,
const int32_t frame_length,
const int32_t frame_length,
const uint32_t send_ts,
const uint32_t arr_ts,
const int32_t pksize);
const int32_t pksize);
/* Update receiving estimates. Used when we only receive BWE index, no iSAC data packet. */
int16_t WebRtcIsac_UpdateUplinkBwImpl(

View File

@ -78,8 +78,8 @@ static const double kLpcCorrWindow[WINLEN] = {
double WebRtcIsac_LevDurb(double *a, double *k, double *r, int order)
{
double sum, alpha;
int m, m_h, i;
double sum, alpha;
int m, m_h, i;
alpha = 0; //warning -DH
a[0] = 1.0;
if (r[0] < LEVINSON_EPS) { /* if r[0] <= 0, set LPC coeff. to zero */

View File

@ -114,10 +114,10 @@ int AudioEncoderOpus::NumChannels() const {
size_t AudioEncoderOpus::MaxEncodedBytes() const {
// Calculate the number of bytes we expect the encoder to produce,
// then multiply by two to give a wide margin for error.
int frame_size_ms = num_10ms_frames_per_packet_ * 10;
size_t bytes_per_millisecond =
static_cast<size_t>(bitrate_bps_ / (1000 * 8) + 1);
size_t approx_encoded_bytes = frame_size_ms * bytes_per_millisecond;
static_cast<size_t>(bitrate_bps_ / (1000 * 8) + 1);
size_t approx_encoded_bytes =
num_10ms_frames_per_packet_ * 10 * bytes_per_millisecond;
return 2 * approx_encoded_bytes;
}

View File

@ -46,7 +46,7 @@ class OpusTest : public TestWithParam<::testing::tuple<int, int>> {
int EncodeDecode(WebRtcOpusEncInst* encoder,
const int16_t* input_audio,
const int input_samples,
int input_samples,
WebRtcOpusDecInst* decoder,
int16_t* output_audio,
int16_t* audio_type);
@ -98,7 +98,7 @@ void OpusTest::SetMaxPlaybackRate(WebRtcOpusEncInst* encoder,
int OpusTest::EncodeDecode(WebRtcOpusEncInst* encoder,
const int16_t* input_audio,
const int input_samples,
int input_samples,
WebRtcOpusDecInst* decoder,
int16_t* output_audio,
int16_t* audio_type) {
@ -165,7 +165,7 @@ void OpusTest::TestDtxEffect(bool dtx) {
EXPECT_EQ(0, opus_encoder_->in_dtx_mode);
EXPECT_EQ(0, opus_decoder_->in_dtx_mode);
EXPECT_EQ(0, audio_type); // Speech.
} else if (1 == encoded_bytes_) {
} else if (encoded_bytes_ == 1) {
EXPECT_EQ(1, opus_encoder_->in_dtx_mode);
EXPECT_EQ(1, opus_decoder_->in_dtx_mode);
EXPECT_EQ(2, audio_type); // Comfort noise.