Prepare to convert various types to size_t.

This makes some behaviorally-invariant changes to make certain code that
currently only works correctly with signed types work safely regardless of the
signedness of the types in question.  This is preparation for a future change
that will convert a variety of types to size_t.

There are also some formatting changes (e.g. converting "enum hack" usage to real consts) to make it simpler to just change "int" to "size_t" in the future to change the types of those constants.

BUG=none
R=andrew@webrtc.org, juberti@webrtc.org, kwiberg@webrtc.org
TBR=ajm

Review URL: https://codereview.webrtc.org/1174813003

Cr-Commit-Position: refs/heads/master@{#9413}
This commit is contained in:
Peter Kasting
2015-06-10 21:15:38 -07:00
parent 786dbdcc38
commit f045e4da43
42 changed files with 153 additions and 127 deletions

View File

@ -56,7 +56,7 @@ int AudioDecoder::DecodeRedundantInternal(const uint8_t* encoded,
bool AudioDecoder::HasDecodePlc() const { return false; }
int AudioDecoder::DecodePlc(int num_frames, int16_t* decoded) { return -1; }
int AudioDecoder::DecodePlc(int num_frames, int16_t* decoded) { return 0; }
int AudioDecoder::IncomingPacket(const uint8_t* payload,
size_t payload_len,

View File

@ -136,9 +136,9 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodeInternal(
(frames_to_encode > 3 ? 3 : frames_to_encode);
if (frames_to_encode == 4)
blocks_in_first_vad_call = 2;
CHECK_GE(frames_to_encode, blocks_in_first_vad_call);
const int blocks_in_second_vad_call =
frames_to_encode - blocks_in_first_vad_call;
CHECK_GE(blocks_in_second_vad_call, 0);
// Check if all of the buffer is passive speech. Start with checking the first
// block.
@ -217,7 +217,7 @@ AudioEncoder::EncodedInfo AudioEncoderCng::EncodeActive(
info = speech_encoder_->Encode(
rtp_timestamps_.front(), &speech_buffer_[i * samples_per_10ms_frame],
samples_per_10ms_frame, max_encoded_bytes, encoded);
if (i == frames_to_encode - 1) {
if (i + 1 == frames_to_encode) {
CHECK_GT(info.encoded_bytes, 0u) << "Encoder didn't deliver data.";
} else {
CHECK_EQ(info.encoded_bytes, 0u) << "Encoder delivered data too early.";

View File

@ -88,13 +88,13 @@ AudioEncoder::EncodedInfo AudioEncoderPcm::EncodeInternal(
}
CHECK_EQ(speech_buffer_.size(), full_frame_samples_);
CHECK_GE(max_encoded_bytes, full_frame_samples_);
int16_t ret = EncodeCall(&speech_buffer_[0], full_frame_samples_, encoded);
CHECK_GE(ret, 0);
speech_buffer_.clear();
EncodedInfo info;
info.encoded_timestamp = first_timestamp_in_buffer_;
info.payload_type = payload_type_;
int16_t ret = EncodeCall(&speech_buffer_[0], full_frame_samples_, encoded);
CHECK_GE(ret, 0);
info.encoded_bytes = static_cast<size_t>(ret);
speech_buffer_.clear();
return info;
}

View File

@ -86,6 +86,10 @@ int main(int argc, char* argv[]) {
printf("G.711 version: %s\n\n", versionNumber);
/* Get frame length */
framelength = atoi(argv[1]);
if (framelength < 0) {
printf(" G.711: Invalid framelength %d.\n", framelength);
exit(1);
}
/* Get compression law */
strcpy(law, argv[2]);

View File

@ -24,7 +24,8 @@ const int kSampleRateHz = 16000;
} // namespace
bool AudioEncoderG722::Config::IsOk() const {
return (frame_size_ms % 10 == 0) && (num_channels >= 1);
return (frame_size_ms > 0) && (frame_size_ms % 10 == 0) &&
(num_channels >= 1);
}
AudioEncoderG722::EncoderState::EncoderState() {

View File

@ -83,6 +83,10 @@ int main(int argc, char* argv[])
/* Get frame length */
framelength = atoi(argv[1]);
if (framelength < 0) {
printf(" G.722: Invalid framelength %d.\n", framelength);
exit(1);
}
/* Get Input and Output files */
sscanf(argv[2], "%s", inname);

View File

@ -41,7 +41,7 @@ void WebRtcIlbcfix_CbMemEnergyCalc(
eSh_ptr = &energyShifts[1+base_size];
eW16_ptr = &energyW16[1+base_size];
for(j=0;j<range-1;j++) {
for (j = 0; j + 1 < range; j++) {
/* Calculate next energy by a +/-
operation on the edge samples */

View File

@ -227,12 +227,9 @@ void WebRtcIlbcfix_CbSearch(
inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset],
&CritMax, &shTotMax, &bestIndex, &bestGain);
sInd=bestIndex-(int16_t)(CB_RESRANGE>>1);
sInd = ((CB_RESRANGE >> 1) > bestIndex) ?
0 : (bestIndex - (CB_RESRANGE >> 1));
eInd=sInd+CB_RESRANGE;
if (sInd<0) {
eInd-=sInd;
sInd=0;
}
if (eInd>=range) {
eInd=range-1;
sInd=eInd-CB_RESRANGE;
@ -247,9 +244,11 @@ void WebRtcIlbcfix_CbSearch(
interpSamplesFilt, cDot,
(int16_t)(sInd+20), (int16_t)(WEBRTC_SPL_MIN(39, (eInd+20))), scale);
i=20;
cDotPtr = &cDot[20 - sInd];
} else {
cDotPtr = cDot;
}
cDotPtr=&cDot[WEBRTC_SPL_MAX(0,(20-sInd))];
cb_vecPtr = cbvectors+lMem-20-i;
/* Calculate the cross correlations (main part of the filtered CB) */

View File

@ -41,7 +41,7 @@ void WebRtcIlbcfix_DecodeResidual(
int16_t *syntdenum /* (i) the decoded synthesis filter
coefficients */
) {
int16_t meml_gotten, Nfor, Nback, diff, start_pos;
int16_t meml_gotten, diff, start_pos;
int16_t subcount, subframe;
int16_t *reverseDecresidual = iLBCdec_inst->enh_buf; /* Reversed decoded data, used for decoding backwards in time (reuse memory in state) */
int16_t *memVec = iLBCdec_inst->prevResidual; /* Memory for codebook and filter state (reuse memory in state) */
@ -110,9 +110,7 @@ void WebRtcIlbcfix_DecodeResidual(
/* forward prediction of subframes */
Nfor = iLBCdec_inst->nsub-iLBC_encbits->startIdx-1;
if( Nfor > 0 ) {
if (iLBCdec_inst->nsub > iLBC_encbits->startIdx + 1) {
/* setup memory */
WebRtcSpl_MemSetW16(mem, 0, CB_MEML-STATE_LEN);
@ -121,6 +119,7 @@ void WebRtcIlbcfix_DecodeResidual(
/* loop over subframes to encode */
int16_t Nfor = iLBCdec_inst->nsub - iLBC_encbits->startIdx - 1;
for (subframe=0; subframe<Nfor; subframe++) {
/* construct decoded vector */
@ -143,9 +142,7 @@ void WebRtcIlbcfix_DecodeResidual(
/* backward prediction of subframes */
Nback = iLBC_encbits->startIdx-1;
if( Nback > 0 ){
if (iLBC_encbits->startIdx > 1) {
/* setup memory */
@ -160,6 +157,7 @@ void WebRtcIlbcfix_DecodeResidual(
/* loop over subframes to decode */
int16_t Nback = iLBC_encbits->startIdx - 1;
for (subframe=0; subframe<Nback; subframe++) {
/* construct decoded vector */

View File

@ -37,7 +37,7 @@ void WebRtcIlbcfix_DoThePlc(
IlbcDecoder *iLBCdec_inst
/* (i/o) decoder instance */
){
int16_t i, pick;
int16_t i;
int32_t cross, ener, cross_comp, ener_comp = 0;
int32_t measure, maxMeasure, energy;
int16_t max, crossSquareMax, crossSquare;
@ -234,22 +234,19 @@ void WebRtcIlbcfix_DoThePlc(
/* noise component - 52 < randlagFIX < 117 */
iLBCdec_inst->seed = (int16_t)(iLBCdec_inst->seed * 31821 + 13849);
randlag = 53 + (int16_t)(iLBCdec_inst->seed & 63);
pick = i - randlag;
if (pick < 0) {
randvec[i] = iLBCdec_inst->prevResidual[iLBCdec_inst->blockl+pick];
if (randlag > i) {
randvec[i] =
iLBCdec_inst->prevResidual[iLBCdec_inst->blockl + i - randlag];
} else {
randvec[i] = iLBCdec_inst->prevResidual[pick];
randvec[i] = iLBCdec_inst->prevResidual[i - randlag];
}
/* pitch repeatition component */
pick = i - use_lag;
if (pick < 0) {
PLCresidual[i] = iLBCdec_inst->prevResidual[iLBCdec_inst->blockl+pick];
if (use_lag > i) {
PLCresidual[i] =
iLBCdec_inst->prevResidual[iLBCdec_inst->blockl + i - use_lag];
} else {
PLCresidual[i] = PLCresidual[pick];
PLCresidual[i] = PLCresidual[i - use_lag];
}
/* Attinuate total gain for each 10 ms */

View File

@ -48,7 +48,7 @@ void WebRtcIlbcfix_EncodeImpl(
IlbcEncoder *iLBCenc_inst /* (i/o) the general encoder
state */
){
int n, meml_gotten, Nfor, Nback;
int n, meml_gotten, Nfor;
int16_t diff, start_pos;
int index;
int subcount, subframe;
@ -379,15 +379,14 @@ void WebRtcIlbcfix_EncodeImpl(
/* backward prediction of subframes */
Nback = iLBCbits_inst->startIdx-1;
if( Nback > 0 ){
if (iLBCbits_inst->startIdx > 1) {
/* create reverse order vectors
(The decresidual does not need to be copied since it is
contained in the same vector as the residual)
*/
int Nback = iLBCbits_inst->startIdx - 1;
WebRtcSpl_MemCpyReversedOrder(&reverseResidual[Nback*SUBL-1], residual, Nback*SUBL);
/* setup memory */
@ -425,11 +424,11 @@ void WebRtcIlbcfix_EncodeImpl(
if (iLBCenc_inst->section == 1)
{
start_count = 0;
end_count = WEBRTC_SPL_MAX (2 - Nfor, 0);
end_count = (Nfor >= 2) ? 0 : (2 - NFor);
}
if (iLBCenc_inst->section == 2)
{
start_count = WEBRTC_SPL_MAX (2 - Nfor, 0);
start_count = (Nfor >= 2) ? 0 : (2 - NFor);
end_count = Nback;
}
}

View File

@ -110,9 +110,8 @@ int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
for(iblock = 0; iblock<new_blocks; iblock++){
/* references */
i = 60 + iblock * ENH_BLOCKL_HALF;
target=downsampled+i;
regressor=downsampled+i-10;
target = downsampled + 60 + iblock * ENH_BLOCKL_HALF;
regressor = target - 10;
/* scaling */
max16=WebRtcSpl_MaxAbsValueW16(&regressor[-50],

View File

@ -165,6 +165,10 @@ int main(int argc, char* argv[])
fprintf(stderr, "--- Encoding block %i --- ",blockcount);
len = WebRtcIlbcfix_Encode(Enc_Inst, data, (int16_t)frameLen, encoded_data);
if (len < 0) {
fprintf(stderr, "Error encoding\n");
exit(0);
}
fprintf(stderr, "\r");
/* write byte file */
@ -202,6 +206,10 @@ int main(int argc, char* argv[])
if (pli==1) {
len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data,
(int16_t)len, decoded_data,&speechType);
if (len < 0) {
fprintf(stderr, "Error decoding\n");
exit(0);
}
} else {
len=WebRtcIlbcfix_DecodePlc(Dec_Inst, decoded_data, 1);
}

View File

@ -139,6 +139,10 @@ int main(int argc, char* argv[])
#else
len=WebRtcIlbcfix_Encode(Enc_Inst, data, (short)(mode<<3), encoded_data);
#endif
if (len < 0) {
fprintf(stderr, "Error encoding\n");
exit(0);
}
fprintf(stderr, "\r");
#ifdef JUNK_DATA
@ -176,6 +180,10 @@ int main(int argc, char* argv[])
if (pli==1) {
len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data, (int16_t)len, data,
&speechType);
if (len < 0) {
fprintf(stderr, "Error decoding\n");
exit(0);
}
} else {
len=WebRtcIlbcfix_DecodePlc(Dec_Inst, data, 1);
}

View File

@ -37,8 +37,8 @@ int WebRtcIsacfix_DecodeImpl(int16_t* signal_out16,
int16_t* current_framesamples);
int16_t WebRtcIsacfix_DecodePlcImpl(int16_t* decoded,
IsacFixDecoderInstance* ISACdec_obj,
int16_t* current_framesample );
IsacFixDecoderInstance* ISACdec_obj,
int16_t* current_framesample );
int WebRtcIsacfix_EncodeImpl(int16_t* in,
IsacFixEncoderInstance* ISACenc_obj,

View File

@ -309,7 +309,7 @@ int16_t WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16,
&((ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF - lag0]);
minCorr = WEBRTC_SPL_WORD32_MAX;
if ( (FRAMESAMPLES_HALF - 2*lag0 - 10) > 0 )
if ((FRAMESAMPLES_HALF - 10) > 2 * lag0)
{
minIdx = 11;
for( i = 0; i < 21; i++ )

View File

@ -279,13 +279,15 @@ void WebRtcIsacfix_NormLatticeFilterAr(int16_t orderCoef,
ARfQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
}
for (i=orderCoef-1;i>=0;i--) //get the state of f&g for the first input, for all orders
for (i=orderCoef;i>0;i--) //get the state of f&g for the first input, for all orders
{
tmp32 = (cthQ15[i] * ARfQ0vec[0] - sthQ15[i] * stateGQ0[i] + 16384) >> 15;
tmp32 = (cthQ15[i - 1] * ARfQ0vec[0] - sthQ15[i - 1] * stateGQ0[i - 1] +
16384) >> 15;
tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
tmp32 = (sthQ15[i] * ARfQ0vec[0] + cthQ15[i] * stateGQ0[i] + 16384) >> 15;
ARgQ0vec[i+1] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
tmp32 = (sthQ15[i - 1] * ARfQ0vec[0] + cthQ15[i - 1] * stateGQ0[i - 1] +
16384) >> 15;
ARgQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
ARfQ0vec[0] = tmpAR;
}
ARgQ0vec[0] = ARfQ0vec[0];

View File

@ -46,21 +46,21 @@ HALF_SUBFRAME_LOOP: @ for(n = 0; n < HALF_SUBFRAMELEN - 1; n++)
add r2, r9, asl #1 @ Restore r2 to &cth_Q15[order_coef]
add r3, r9, asl #1 @ Restore r3 to &sth_Q15[order_coef]
ORDER_COEF_LOOP: @ for(k = order_coef - 1 ; k >= 0; k--)
ORDER_COEF_LOOP: @ for(k = order_coef ; k > 0; k--)
ldrh r7, [r3, #-2]! @ sth_Q15[k]
ldrh r6, [r2, #-2]! @ cth_Q15[k]
ldrh r7, [r3, #-2]! @ sth_Q15[k - 1]
ldrh r6, [r2, #-2]! @ cth_Q15[k - 1]
ldrh r8, [r0, #-2] @ ar_g_Q0[k]
smlabb r11, r7, r5, r12 @ sth_Q15[k] * tmpAR + 16384
smlabb r10, r6, r5, r12 @ cth_Q15[k] * tmpAR + 16384
smulbb r7, r7, r8 @ sth_Q15[k] * ar_g_Q0[k]
smlabb r11, r6, r8, r11 @ cth_Q15[k]*ar_g_Q0[k]+(sth_Q15[k]*tmpAR+16384)
ldrh r8, [r0, #-2] @ ar_g_Q0[k - 1]
smlabb r11, r7, r5, r12 @ sth_Q15[k - 1] * tmpAR + 16384
smlabb r10, r6, r5, r12 @ cth_Q15[k - 1] * tmpAR + 16384
smulbb r7, r7, r8 @ sth_Q15[k - 1] * ar_g_Q0[k - 1]
smlabb r11, r6, r8, r11 @ cth_Q15[k - 1]*ar_g_Q0[k - 1]+(sth_Q15[k - 1]*tmpAR+16384)
sub r10, r10, r7 @ cth_Q15[k]*tmpAR+16384-(sth_Q15[k]*ar_g_Q0[k])
sub r10, r10, r7 @ cth_Q15[k - 1]*tmpAR+16384-(sth_Q15[k - 1]*ar_g_Q0[k - 1])
ssat r11, #16, r11, asr #15
ssat r5, #16, r10, asr #15
strh r11, [r0], #-2 @ Output: ar_g_Q0[k+1]
strh r11, [r0], #-2 @ Output: ar_g_Q0[k]
subs r9, #1
bgt ORDER_COEF_LOOP

View File

@ -35,11 +35,13 @@ void WebRtcIsacfix_FilterArLoop(int16_t* ar_g_Q0, // Input samples
int32_t tmp32_2 = 0;
tmpAR = ar_f_Q0[n + 1];
for (k = order_coef - 1; k >= 0; k--) {
tmp32 = (cth_Q15[k] * tmpAR - sth_Q15[k] * ar_g_Q0[k] + 16384) >> 15;
tmp32_2 = (sth_Q15[k] * tmpAR + cth_Q15[k] * ar_g_Q0[k] + 16384) >> 15;
for (k = order_coef; k > 0; k--) {
tmp32 = (cth_Q15[k - 1] * tmpAR - sth_Q15[k - 1] * ar_g_Q0[k - 1] +
16384) >> 15;
tmp32_2 = (sth_Q15[k - 1] * tmpAR + cth_Q15[k - 1] * ar_g_Q0[k - 1] +
16384) >> 15;
tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32);
ar_g_Q0[k + 1] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32_2);
ar_g_Q0[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32_2);
}
ar_f_Q0[n + 1] = tmpAR;
ar_g_Q0[0] = tmpAR;

View File

@ -69,7 +69,6 @@ void WebRtcIsacfix_PitchFilter(int16_t* indatQQ, // Q10 if type is 1 or 4,
int16_t oldLagQ7;
int16_t oldGainQ12, lagdeltaQ7, curLagQ7, gaindeltaQ12, curGainQ12;
int indW32 = 0, frcQQ = 0;
int32_t tmpW32;
const int16_t* fracoeffQQ = NULL;
// Assumptions in ARM assembly for WebRtcIsacfix_PitchFilterCoreARM().
@ -123,8 +122,7 @@ void WebRtcIsacfix_PitchFilter(int16_t* indatQQ, // Q10 if type is 1 or 4,
curGainQ12 += gaindeltaQ12;
curLagQ7 += lagdeltaQ7;
indW32 = CalcLrIntQ(curLagQ7, 7);
tmpW32 = (indW32 << 7) - curLagQ7;
frcQQ = (tmpW32 >> 4) + 4;
frcQQ = ((indW32 << 7) + 64 - curLagQ7) >> 4;
if (frcQQ == PITCH_FRACS) {
frcQQ = 0;
@ -195,8 +193,7 @@ void WebRtcIsacfix_PitchFilterGains(const int16_t* indatQ0,
// Update parameters for each segment.
curLagQ7 += lagdeltaQ7;
indW16 = (int16_t)CalcLrIntQ(curLagQ7, 7);
tmpW16 = (indW16 << 7) - curLagQ7;
frcQQ = (tmpW16 >> 4) + 4;
frcQQ = ((indW16 << 7) + 64 - curLagQ7) >> 4;
if (frcQQ == PITCH_FRACS) {
frcQQ = 0;

View File

@ -65,18 +65,21 @@ float IsacSpeedTest::EncodeABlock(int16_t* in_data, uint8_t* bit_stream,
// ISAC takes 10 ms everycall
const int subblocks = block_duration_ms_ / 10;
const int subblock_length = 10 * input_sampling_khz_;
int value;
int value = 0;
clock_t clocks = clock();
size_t pointer = 0;
for (int idx = 0; idx < subblocks; idx++, pointer += subblock_length) {
value = WebRtcIsacfix_Encode(ISACFIX_main_inst_, &in_data[pointer],
bit_stream);
if (idx == subblocks - 1)
EXPECT_GT(value, 0);
else
EXPECT_EQ(0, value);
}
clocks = clock() - clocks;
EXPECT_GT(value, 0);
assert(value <= max_bytes);
*encoded_bytes = value;
assert(*encoded_bytes <= max_bytes);
return 1000.0 * clocks / CLOCKS_PER_SEC;
}

View File

@ -232,7 +232,7 @@ int main(int argc, char* argv[])
CodingMode = 0;
testNum = 0;
testCE = 0;
for (i = 1; i < argc-2;i++) {
for (i = 1; i + 2 < argc; i++) {
/* Instantaneous mode */
if (!strcmp ("-I", argv[i])) {
printf("\nInstantaneous BottleNeck\n");

View File

@ -185,7 +185,7 @@ int main(int argc, char* argv[]) {
char transCodingFileName[500];
int16_t totFileLoop = 0;
int16_t numFileLoop = 0;
for (i = 1; i < argc - 2; i++) {
for (i = 1; i + 2 < argc; i++) {
if (!strcmp("-LOOP", argv[i])) {
i++;
totFileLoop = (int16_t)atol(argv[i]);

View File

@ -106,6 +106,7 @@ int OpusTest::EncodeDecode(WebRtcOpusEncInst* encoder,
input_audio,
input_samples, kMaxBytes,
bitstream_);
EXPECT_GE(encoded_bytes_, 0);
return WebRtcOpus_Decode(decoder, bitstream_,
encoded_bytes_, output_audio,
audio_type);
@ -539,6 +540,7 @@ TEST_P(OpusTest, DISABLED_ON_IOS(OpusDurationEstimation)) {
speech_data_.GetNextBlock(),
kOpus10msFrameSamples, kMaxBytes,
bitstream_);
EXPECT_GE(encoded_bytes_, 0);
EXPECT_EQ(kOpus10msFrameSamples,
WebRtcOpus_DurationEst(opus_decoder_, bitstream_,
encoded_bytes_));
@ -548,6 +550,7 @@ TEST_P(OpusTest, DISABLED_ON_IOS(OpusDurationEstimation)) {
speech_data_.GetNextBlock(),
kOpus20msFrameSamples, kMaxBytes,
bitstream_);
EXPECT_GE(encoded_bytes_, 0);
EXPECT_EQ(kOpus20msFrameSamples,
WebRtcOpus_DurationEst(opus_decoder_, bitstream_,
encoded_bytes_));