Use size_t more consistently for packet/payload lengths.
See design doc at https://docs.google.com/a/chromium.org/document/d/1I6nmE9D_BmCY-IoV6MDPY2V6WYpEI-dg2apWXTfZyUI/edit?usp=sharing for more information. This CL was reviewed and approved in pieces in the following CLs: https://webrtc-codereview.appspot.com/24209004/ https://webrtc-codereview.appspot.com/24229004/ https://webrtc-codereview.appspot.com/24259004/ https://webrtc-codereview.appspot.com/25109004/ https://webrtc-codereview.appspot.com/26099004/ https://webrtc-codereview.appspot.com/27069004/ https://webrtc-codereview.appspot.com/27969004/ https://webrtc-codereview.appspot.com/27989004/ https://webrtc-codereview.appspot.com/29009004/ https://webrtc-codereview.appspot.com/30929004/ https://webrtc-codereview.appspot.com/30939004/ https://webrtc-codereview.appspot.com/31999004/ Committing as TBR to the original reviewers. BUG=chromium:81439 TEST=none TBR=pthatcher,henrik.lundin,tina.legrand,stefan,tkchin,glaznev,kjellander,perkj,mflodman,henrika,asapersson,niklas.enbom Review URL: https://webrtc-codereview.appspot.com/23129004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@7726 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@ -360,7 +360,7 @@ int32_t AviFile::GetAudioStreamInfo(WAVEFORMATEX& waveHeader)
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t AviFile::WriteAudio(const uint8_t* data, int32_t length)
|
||||
int32_t AviFile::WriteAudio(const uint8_t* data, size_t length)
|
||||
{
|
||||
_crit->Enter();
|
||||
size_t newBytesWritten = _bytesWritten;
|
||||
@ -410,7 +410,7 @@ int32_t AviFile::WriteAudio(const uint8_t* data, int32_t length)
|
||||
return static_cast<int32_t>(newBytesWritten);
|
||||
}
|
||||
|
||||
int32_t AviFile::WriteVideo(const uint8_t* data, int32_t length)
|
||||
int32_t AviFile::WriteVideo(const uint8_t* data, size_t length)
|
||||
{
|
||||
_crit->Enter();
|
||||
size_t newBytesWritten = _bytesWritten;
|
||||
@ -482,7 +482,7 @@ int32_t AviFile::PrepareDataChunkHeaders()
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t AviFile::ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
||||
int32_t AviFile::ReadMoviSubChunk(uint8_t* data, size_t& length, uint32_t tag1,
|
||||
uint32_t tag2)
|
||||
{
|
||||
if (!_reading)
|
||||
@ -563,7 +563,7 @@ int32_t AviFile::ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
||||
_bytesRead += size;
|
||||
}
|
||||
|
||||
if (static_cast<int32_t>(size) > length)
|
||||
if (size > length)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
|
||||
"AviFile::ReadMoviSubChunk(): AVI read buffer too small!");
|
||||
@ -589,7 +589,7 @@ int32_t AviFile::ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
||||
return 0;
|
||||
}
|
||||
|
||||
int32_t AviFile::ReadAudio(uint8_t* data, int32_t& length)
|
||||
int32_t AviFile::ReadAudio(uint8_t* data, size_t& length)
|
||||
{
|
||||
_crit->Enter();
|
||||
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadAudio()");
|
||||
@ -616,7 +616,7 @@ int32_t AviFile::ReadAudio(uint8_t* data, int32_t& length)
|
||||
return ret;
|
||||
}
|
||||
|
||||
int32_t AviFile::ReadVideo(uint8_t* data, int32_t& length)
|
||||
int32_t AviFile::ReadVideo(uint8_t* data, size_t& length)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadVideo()");
|
||||
|
||||
|
||||
@ -104,8 +104,8 @@ public:
|
||||
const WAVEFORMATEX& waveFormatHeader);
|
||||
int32_t Create(const char* fileName);
|
||||
|
||||
int32_t WriteAudio(const uint8_t* data, int32_t length);
|
||||
int32_t WriteVideo(const uint8_t* data, int32_t length);
|
||||
int32_t WriteAudio(const uint8_t* data, size_t length);
|
||||
int32_t WriteVideo(const uint8_t* data, size_t length);
|
||||
|
||||
int32_t GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
|
||||
BITMAPINFOHEADER& bitmapInfo,
|
||||
@ -116,8 +116,8 @@ public:
|
||||
|
||||
int32_t GetAudioStreamInfo(WAVEFORMATEX& waveHeader);
|
||||
|
||||
int32_t ReadAudio(uint8_t* data, int32_t& length);
|
||||
int32_t ReadVideo(uint8_t* data, int32_t& length);
|
||||
int32_t ReadAudio(uint8_t* data, size_t& length);
|
||||
int32_t ReadVideo(uint8_t* data, size_t& length);
|
||||
|
||||
int32_t Close();
|
||||
|
||||
@ -145,7 +145,7 @@ private:
|
||||
|
||||
int32_t PrepareDataChunkHeaders();
|
||||
|
||||
int32_t ReadMoviSubChunk(uint8_t* data, int32_t& length, uint32_t tag1,
|
||||
int32_t ReadMoviSubChunk(uint8_t* data, size_t& length, uint32_t tag1,
|
||||
uint32_t tag2 = 0);
|
||||
|
||||
int32_t WriteRIFF();
|
||||
|
||||
@ -10,6 +10,7 @@
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "webrtc/base/format_macros.h"
|
||||
#include "webrtc/modules/media_file/source/media_file_impl.h"
|
||||
#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
|
||||
#include "webrtc/system_wrappers/interface/file_wrapper.h"
|
||||
@ -109,25 +110,25 @@ int32_t MediaFileImpl::Process()
|
||||
|
||||
int32_t MediaFileImpl::PlayoutAVIVideoData(
|
||||
int8_t* buffer,
|
||||
uint32_t& dataLengthInBytes)
|
||||
size_t& dataLengthInBytes)
|
||||
{
|
||||
return PlayoutData( buffer, dataLengthInBytes, true);
|
||||
}
|
||||
|
||||
int32_t MediaFileImpl::PlayoutAudioData(int8_t* buffer,
|
||||
uint32_t& dataLengthInBytes)
|
||||
size_t& dataLengthInBytes)
|
||||
{
|
||||
return PlayoutData( buffer, dataLengthInBytes, false);
|
||||
}
|
||||
|
||||
int32_t MediaFileImpl::PlayoutData(int8_t* buffer, uint32_t& dataLengthInBytes,
|
||||
int32_t MediaFileImpl::PlayoutData(int8_t* buffer, size_t& dataLengthInBytes,
|
||||
bool video)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
||||
"MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %ld)",
|
||||
"MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %" PRIuS ")",
|
||||
buffer, dataLengthInBytes);
|
||||
|
||||
const uint32_t bufferLengthInBytes = dataLengthInBytes;
|
||||
const size_t bufferLengthInBytes = dataLengthInBytes;
|
||||
dataLengthInBytes = 0;
|
||||
|
||||
if(buffer == NULL || bufferLengthInBytes == 0)
|
||||
@ -185,7 +186,7 @@ int32_t MediaFileImpl::PlayoutData(int8_t* buffer, uint32_t& dataLengthInBytes,
|
||||
bufferLengthInBytes);
|
||||
if(bytesRead > 0)
|
||||
{
|
||||
dataLengthInBytes = bytesRead;
|
||||
dataLengthInBytes = static_cast<size_t>(bytesRead);
|
||||
return 0;
|
||||
}
|
||||
break;
|
||||
@ -216,7 +217,7 @@ int32_t MediaFileImpl::PlayoutData(int8_t* buffer, uint32_t& dataLengthInBytes,
|
||||
|
||||
if( bytesRead > 0)
|
||||
{
|
||||
dataLengthInBytes =(uint32_t) bytesRead;
|
||||
dataLengthInBytes = static_cast<size_t>(bytesRead);
|
||||
}
|
||||
}
|
||||
HandlePlayCallbacks(bytesRead);
|
||||
@ -266,16 +267,16 @@ void MediaFileImpl::HandlePlayCallbacks(int32_t bytesRead)
|
||||
int32_t MediaFileImpl::PlayoutStereoData(
|
||||
int8_t* bufferLeft,
|
||||
int8_t* bufferRight,
|
||||
uint32_t& dataLengthInBytes)
|
||||
size_t& dataLengthInBytes)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
||||
"MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,\
|
||||
Len= %ld)",
|
||||
"MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,"
|
||||
" Len= %" PRIuS ")",
|
||||
bufferLeft,
|
||||
bufferRight,
|
||||
dataLengthInBytes);
|
||||
|
||||
const uint32_t bufferLengthInBytes = dataLengthInBytes;
|
||||
const size_t bufferLengthInBytes = dataLengthInBytes;
|
||||
dataLengthInBytes = 0;
|
||||
|
||||
if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0)
|
||||
@ -328,7 +329,7 @@ int32_t MediaFileImpl::PlayoutStereoData(
|
||||
|
||||
if(bytesRead > 0)
|
||||
{
|
||||
dataLengthInBytes = bytesRead;
|
||||
dataLengthInBytes = static_cast<size_t>(bytesRead);
|
||||
|
||||
// Check if it's time for PlayNotification(..).
|
||||
_playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
|
||||
@ -690,25 +691,25 @@ bool MediaFileImpl::IsPlaying()
|
||||
|
||||
int32_t MediaFileImpl::IncomingAudioData(
|
||||
const int8_t* buffer,
|
||||
const uint32_t bufferLengthInBytes)
|
||||
const size_t bufferLengthInBytes)
|
||||
{
|
||||
return IncomingAudioVideoData( buffer, bufferLengthInBytes, false);
|
||||
}
|
||||
|
||||
int32_t MediaFileImpl::IncomingAVIVideoData(
|
||||
const int8_t* buffer,
|
||||
const uint32_t bufferLengthInBytes)
|
||||
const size_t bufferLengthInBytes)
|
||||
{
|
||||
return IncomingAudioVideoData( buffer, bufferLengthInBytes, true);
|
||||
}
|
||||
|
||||
int32_t MediaFileImpl::IncomingAudioVideoData(
|
||||
const int8_t* buffer,
|
||||
const uint32_t bufferLengthInBytes,
|
||||
const size_t bufferLengthInBytes,
|
||||
const bool video)
|
||||
{
|
||||
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
||||
"MediaFile::IncomingData(buffer= 0x%x, bufLen= %hd",
|
||||
"MediaFile::IncomingData(buffer= 0x%x, bufLen= %" PRIuS,
|
||||
buffer, bufferLengthInBytes);
|
||||
|
||||
if(buffer == NULL || bufferLengthInBytes == 0)
|
||||
@ -803,7 +804,7 @@ int32_t MediaFileImpl::IncomingAudioVideoData(
|
||||
{
|
||||
if(_ptrOutStream->Write(buffer, bufferLengthInBytes))
|
||||
{
|
||||
bytesWritten = bufferLengthInBytes;
|
||||
bytesWritten = static_cast<int32_t>(bufferLengthInBytes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -32,12 +32,12 @@ public:
|
||||
|
||||
// MediaFile functions
|
||||
virtual int32_t PlayoutAudioData(int8_t* audioBuffer,
|
||||
uint32_t& dataLengthInBytes) OVERRIDE;
|
||||
size_t& dataLengthInBytes) OVERRIDE;
|
||||
virtual int32_t PlayoutAVIVideoData(int8_t* videoBuffer,
|
||||
uint32_t& dataLengthInBytes) OVERRIDE;
|
||||
size_t& dataLengthInBytes) OVERRIDE;
|
||||
virtual int32_t PlayoutStereoData(int8_t* audioBufferLeft,
|
||||
int8_t* audioBufferRight,
|
||||
uint32_t& dataLengthInBytes) OVERRIDE;
|
||||
size_t& dataLengthInBytes) OVERRIDE;
|
||||
virtual int32_t StartPlayingAudioFile(
|
||||
const char* fileName,
|
||||
const uint32_t notificationTimeMs = 0,
|
||||
@ -58,10 +58,10 @@ public:
|
||||
virtual int32_t StopPlaying() OVERRIDE;
|
||||
virtual bool IsPlaying() OVERRIDE;
|
||||
virtual int32_t PlayoutPositionMs(uint32_t& positionMs) const OVERRIDE;
|
||||
virtual int32_t IncomingAudioData(const int8_t* audioBuffer,
|
||||
const uint32_t bufferLength) OVERRIDE;
|
||||
virtual int32_t IncomingAVIVideoData(const int8_t* audioBuffer,
|
||||
const uint32_t bufferLength) OVERRIDE;
|
||||
virtual int32_t IncomingAudioData(const int8_t* audioBuffer,
|
||||
const size_t bufferLength) OVERRIDE;
|
||||
virtual int32_t IncomingAVIVideoData(const int8_t* audioBuffer,
|
||||
const size_t bufferLength) OVERRIDE;
|
||||
virtual int32_t StartRecordingAudioFile(
|
||||
const char* fileName,
|
||||
const FileFormats format,
|
||||
@ -157,14 +157,14 @@ private:
|
||||
// audioBuffer. As output parameter it indicates the number of bytes
|
||||
// written to audioBuffer. If video is true the data written is a video
|
||||
// frame otherwise it is an audio frame.
|
||||
int32_t PlayoutData(int8_t* dataBuffer, uint32_t& dataLengthInBytes,
|
||||
int32_t PlayoutData(int8_t* dataBuffer, size_t& dataLengthInBytes,
|
||||
bool video);
|
||||
|
||||
// Write one frame, i.e. the bufferLength first bytes of audioBuffer,
|
||||
// to file. The frame is an audio frame if video is true otherwise it is an
|
||||
// audio frame.
|
||||
int32_t IncomingAudioVideoData(const int8_t* buffer,
|
||||
const uint32_t bufferLength,
|
||||
int32_t IncomingAudioVideoData(const int8_t* buffer,
|
||||
const size_t bufferLength,
|
||||
const bool video);
|
||||
|
||||
// Open/creates file specified by fileName for writing (relative path is
|
||||
|
||||
@ -49,9 +49,11 @@ TEST_F(MediaFileTest, DISABLED_ON_ANDROID(StartPlayingAudioFileWithoutError)) {
|
||||
|
||||
TEST_F(MediaFileTest, WriteWavFile) {
|
||||
// Write file.
|
||||
static const int kHeaderSize = 44;
|
||||
static const int kPayloadSize = 320;
|
||||
webrtc::CodecInst codec = {0, "L16", 16000, kPayloadSize, 1};
|
||||
static const size_t kHeaderSize = 44;
|
||||
static const size_t kPayloadSize = 320;
|
||||
webrtc::CodecInst codec = {
|
||||
0, "L16", 16000, static_cast<int>(kPayloadSize), 1
|
||||
};
|
||||
std::string outfile = webrtc::test::OutputPath() + "wavtest.wav";
|
||||
ASSERT_EQ(0,
|
||||
media_file_->StartRecordingAudioFile(
|
||||
@ -78,8 +80,7 @@ TEST_F(MediaFileTest, WriteWavFile) {
|
||||
};
|
||||
COMPILE_ASSERT(sizeof(kExpectedHeader) == kHeaderSize, header_size);
|
||||
|
||||
EXPECT_EQ(size_t(kHeaderSize + kPayloadSize),
|
||||
webrtc::test::GetFileSize(outfile));
|
||||
EXPECT_EQ(kHeaderSize + kPayloadSize, webrtc::test::GetFileSize(outfile));
|
||||
FILE* f = fopen(outfile.c_str(), "rb");
|
||||
ASSERT_TRUE(f);
|
||||
|
||||
|
||||
@ -13,7 +13,9 @@
|
||||
#include <assert.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/types.h>
|
||||
#include <limits>
|
||||
|
||||
#include "webrtc/base/format_macros.h"
|
||||
#include "webrtc/common_audio/wav_header.h"
|
||||
#include "webrtc/common_types.h"
|
||||
#include "webrtc/engine_configurations.h"
|
||||
@ -234,7 +236,7 @@ int32_t ModuleFileUtility::InitAviWriting(
|
||||
|
||||
int32_t ModuleFileUtility::WriteAviAudioData(
|
||||
const int8_t* buffer,
|
||||
uint32_t bufferLengthInBytes)
|
||||
size_t bufferLengthInBytes)
|
||||
{
|
||||
if( _aviOutFile != 0)
|
||||
{
|
||||
@ -251,7 +253,7 @@ int32_t ModuleFileUtility::WriteAviAudioData(
|
||||
|
||||
int32_t ModuleFileUtility::WriteAviVideoData(
|
||||
const int8_t* buffer,
|
||||
uint32_t bufferLengthInBytes)
|
||||
size_t bufferLengthInBytes)
|
||||
{
|
||||
if( _aviOutFile != 0)
|
||||
{
|
||||
@ -370,7 +372,7 @@ int32_t ModuleFileUtility::InitAviReading(const char* filename, bool videoOnly,
|
||||
|
||||
int32_t ModuleFileUtility::ReadAviAudioData(
|
||||
int8_t* outBuffer,
|
||||
const uint32_t bufferLengthInBytes)
|
||||
size_t bufferLengthInBytes)
|
||||
{
|
||||
if(_aviAudioInFile == 0)
|
||||
{
|
||||
@ -378,22 +380,20 @@ int32_t ModuleFileUtility::ReadAviAudioData(
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32_t length = bufferLengthInBytes;
|
||||
if(_aviAudioInFile->ReadAudio(
|
||||
reinterpret_cast<uint8_t*>(outBuffer),
|
||||
length) != 0)
|
||||
if(_aviAudioInFile->ReadAudio(reinterpret_cast<uint8_t*>(outBuffer),
|
||||
bufferLengthInBytes) != 0)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
{
|
||||
return length;
|
||||
return static_cast<int32_t>(bufferLengthInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
int32_t ModuleFileUtility::ReadAviVideoData(
|
||||
int8_t* outBuffer,
|
||||
const uint32_t bufferLengthInBytes)
|
||||
size_t bufferLengthInBytes)
|
||||
{
|
||||
if(_aviVideoInFile == 0)
|
||||
{
|
||||
@ -401,14 +401,12 @@ int32_t ModuleFileUtility::ReadAviVideoData(
|
||||
return -1;
|
||||
}
|
||||
|
||||
int32_t length = bufferLengthInBytes;
|
||||
if( _aviVideoInFile->ReadVideo(
|
||||
reinterpret_cast<uint8_t*>(outBuffer),
|
||||
length) != 0)
|
||||
if(_aviVideoInFile->ReadVideo(reinterpret_cast<uint8_t*>(outBuffer),
|
||||
bufferLengthInBytes) != 0)
|
||||
{
|
||||
return -1;
|
||||
} else {
|
||||
return length;
|
||||
return static_cast<int32_t>(bufferLengthInBytes);
|
||||
}
|
||||
}
|
||||
|
||||
@ -774,14 +772,14 @@ int32_t ModuleFileUtility::InitWavReading(InStream& wav,
|
||||
int32_t ModuleFileUtility::ReadWavDataAsMono(
|
||||
InStream& wav,
|
||||
int8_t* outData,
|
||||
const uint32_t bufferSize)
|
||||
const size_t bufferSize)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d,\
|
||||
bufSize= %ld)",
|
||||
"ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d, "
|
||||
"bufSize= %" PRIuS ")",
|
||||
&wav,
|
||||
outData,
|
||||
bufferSize);
|
||||
@ -853,14 +851,14 @@ int32_t ModuleFileUtility::ReadWavDataAsStereo(
|
||||
InStream& wav,
|
||||
int8_t* outDataLeft,
|
||||
int8_t* outDataRight,
|
||||
const uint32_t bufferSize)
|
||||
const size_t bufferSize)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x,\
|
||||
outRight= 0x%x, bufSize= %ld)",
|
||||
"ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x, "
|
||||
"outRight= 0x%x, bufSize= %" PRIuS ")",
|
||||
&wav,
|
||||
outDataLeft,
|
||||
outDataRight,
|
||||
@ -1083,13 +1081,14 @@ int32_t ModuleFileUtility::InitWavWriting(OutStream& wav,
|
||||
|
||||
int32_t ModuleFileUtility::WriteWavData(OutStream& out,
|
||||
const int8_t* buffer,
|
||||
const uint32_t dataLength)
|
||||
const size_t dataLength)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %d)",
|
||||
"ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
|
||||
")",
|
||||
&out,
|
||||
buffer,
|
||||
dataLength);
|
||||
@ -1106,7 +1105,7 @@ int32_t ModuleFileUtility::WriteWavData(OutStream& out,
|
||||
return -1;
|
||||
}
|
||||
_bytesWritten += dataLength;
|
||||
return dataLength;
|
||||
return static_cast<int32_t>(dataLength);
|
||||
}
|
||||
|
||||
|
||||
@ -1192,14 +1191,14 @@ int32_t ModuleFileUtility::InitPreEncodedReading(InStream& in,
|
||||
int32_t ModuleFileUtility::ReadPreEncodedData(
|
||||
InStream& in,
|
||||
int8_t* outData,
|
||||
const uint32_t bufferSize)
|
||||
const size_t bufferSize)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x,\
|
||||
bufferSize= %d)",
|
||||
"ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x, "
|
||||
"bufferSize= %" PRIuS ")",
|
||||
&in,
|
||||
outData,
|
||||
bufferSize);
|
||||
@ -1259,14 +1258,14 @@ int32_t ModuleFileUtility::InitPreEncodedWriting(
|
||||
int32_t ModuleFileUtility::WritePreEncodedData(
|
||||
OutStream& out,
|
||||
const int8_t* buffer,
|
||||
const uint32_t dataLength)
|
||||
const size_t dataLength)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x,\
|
||||
dataLen= %d)",
|
||||
"ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x, "
|
||||
"dataLen= %" PRIuS ")",
|
||||
&out,
|
||||
buffer,
|
||||
dataLength);
|
||||
@ -1276,11 +1275,12 @@ int32_t ModuleFileUtility::WritePreEncodedData(
|
||||
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
||||
}
|
||||
|
||||
int32_t bytesWritten = 0;
|
||||
size_t bytesWritten = 0;
|
||||
// The first two bytes is the size of the frame.
|
||||
int16_t lengthBuf;
|
||||
lengthBuf = (int16_t)dataLength;
|
||||
if(!out.Write(&lengthBuf, 2))
|
||||
if(dataLength > static_cast<size_t>(std::numeric_limits<int16_t>::max()) ||
|
||||
!out.Write(&lengthBuf, 2))
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
@ -1291,7 +1291,7 @@ int32_t ModuleFileUtility::WritePreEncodedData(
|
||||
return -1;
|
||||
}
|
||||
bytesWritten += dataLength;
|
||||
return bytesWritten;
|
||||
return static_cast<int32_t>(bytesWritten);
|
||||
}
|
||||
|
||||
int32_t ModuleFileUtility::InitCompressedReading(
|
||||
@ -1495,14 +1495,14 @@ int32_t ModuleFileUtility::InitCompressedReading(
|
||||
|
||||
int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
||||
int8_t* outData,
|
||||
uint32_t bufferSize)
|
||||
size_t bufferSize)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x,\
|
||||
bytes=%ld)",
|
||||
"ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x, bytes=%"
|
||||
PRIuS ")",
|
||||
&in,
|
||||
outData,
|
||||
bufferSize);
|
||||
@ -1554,7 +1554,7 @@ int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
||||
}
|
||||
if(mode != 15)
|
||||
{
|
||||
if(bufferSize < AMRmode2bytes[mode] + 1)
|
||||
if(bufferSize < static_cast<size_t>(AMRmode2bytes[mode] + 1))
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceError,
|
||||
@ -1612,7 +1612,7 @@ int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
||||
}
|
||||
if(mode != 15)
|
||||
{
|
||||
if(bufferSize < AMRWBmode2bytes[mode] + 1)
|
||||
if(bufferSize < static_cast<size_t>(AMRWBmode2bytes[mode] + 1))
|
||||
{
|
||||
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
||||
"output buffer is too short to read AMRWB\
|
||||
@ -1770,14 +1770,14 @@ int32_t ModuleFileUtility::InitCompressedWriting(
|
||||
int32_t ModuleFileUtility::WriteCompressedData(
|
||||
OutStream& out,
|
||||
const int8_t* buffer,
|
||||
const uint32_t dataLength)
|
||||
const size_t dataLength)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x,\
|
||||
dataLen= %d)",
|
||||
"ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x, "
|
||||
"dataLen= %" PRIuS ")",
|
||||
&out,
|
||||
buffer,
|
||||
dataLength);
|
||||
@ -1791,7 +1791,7 @@ int32_t ModuleFileUtility::WriteCompressedData(
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
return dataLength;
|
||||
return static_cast<int32_t>(dataLength);
|
||||
}
|
||||
|
||||
int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
|
||||
@ -1872,13 +1872,14 @@ int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
|
||||
|
||||
int32_t ModuleFileUtility::ReadPCMData(InStream& pcm,
|
||||
int8_t* outData,
|
||||
uint32_t bufferSize)
|
||||
size_t bufferSize)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %d)",
|
||||
"ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %"
|
||||
PRIuS ")",
|
||||
&pcm,
|
||||
outData,
|
||||
bufferSize);
|
||||
@ -2006,13 +2007,14 @@ int32_t ModuleFileUtility::InitPCMWriting(OutStream& out, uint32_t freq)
|
||||
|
||||
int32_t ModuleFileUtility::WritePCMData(OutStream& out,
|
||||
const int8_t* buffer,
|
||||
const uint32_t dataLength)
|
||||
const size_t dataLength)
|
||||
{
|
||||
WEBRTC_TRACE(
|
||||
kTraceStream,
|
||||
kTraceFile,
|
||||
_id,
|
||||
"ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %d)",
|
||||
"ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
|
||||
")",
|
||||
&out,
|
||||
buffer,
|
||||
dataLength);
|
||||
@ -2028,7 +2030,7 @@ int32_t ModuleFileUtility::WritePCMData(OutStream& out,
|
||||
}
|
||||
|
||||
_bytesWritten += dataLength;
|
||||
return dataLength;
|
||||
return static_cast<int32_t>(dataLength);
|
||||
}
|
||||
|
||||
int32_t ModuleFileUtility::codec_info(CodecInst& codecInst)
|
||||
|
||||
@ -43,13 +43,13 @@ public:
|
||||
// audio with more channels (in which case the audio will be coverted to
|
||||
// mono).
|
||||
int32_t ReadAviAudioData(int8_t* outBuffer,
|
||||
const uint32_t bufferLengthInBytes);
|
||||
size_t bufferLengthInBytes);
|
||||
|
||||
// Put one video frame into outBuffer. bufferLengthInBytes indicates the
|
||||
// size of outBuffer.
|
||||
// The return value is the number of bytes written to videoBuffer.
|
||||
int32_t ReadAviVideoData(int8_t* videoBuffer,
|
||||
const uint32_t bufferLengthInBytes);
|
||||
size_t bufferLengthInBytes);
|
||||
|
||||
// Open/create the file specified by fileName for writing audio/video data
|
||||
// (relative path is allowed). codecInst specifies the encoding of the audio
|
||||
@ -66,7 +66,7 @@ public:
|
||||
// InitAviWriting(..) call.
|
||||
// Note: bufferLength must be exactly one frame.
|
||||
int32_t WriteAviAudioData(const int8_t* audioBuffer,
|
||||
uint32_t bufferLengthInBytes);
|
||||
size_t bufferLengthInBytes);
|
||||
|
||||
|
||||
// Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
|
||||
@ -76,7 +76,7 @@ public:
|
||||
// InitAviWriting(..) call. The videoBuffer must contain exactly
|
||||
// one video frame.
|
||||
int32_t WriteAviVideoData(const int8_t* videoBuffer,
|
||||
uint32_t bufferLengthInBytes);
|
||||
size_t bufferLengthInBytes);
|
||||
|
||||
// Stop recording to file or stream.
|
||||
int32_t CloseAviFile();
|
||||
@ -98,7 +98,7 @@ public:
|
||||
// audio with more channels (in which case the audio will be converted to
|
||||
// mono).
|
||||
int32_t ReadWavDataAsMono(InStream& stream, int8_t* audioBuffer,
|
||||
const uint32_t dataLengthInBytes);
|
||||
const size_t dataLengthInBytes);
|
||||
|
||||
// Put 10-60ms, depending on codec frame size, of audio data from file into
|
||||
// audioBufferLeft and audioBufferRight. The buffers contain the left and
|
||||
@ -111,7 +111,7 @@ public:
|
||||
int32_t ReadWavDataAsStereo(InStream& wav,
|
||||
int8_t* audioBufferLeft,
|
||||
int8_t* audioBufferRight,
|
||||
const uint32_t bufferLength);
|
||||
const size_t bufferLength);
|
||||
|
||||
// Prepare for recording audio to stream.
|
||||
// codecInst specifies the encoding of the audio data.
|
||||
@ -125,7 +125,7 @@ public:
|
||||
// The return value is the number of bytes written to audioBuffer.
|
||||
int32_t WriteWavData(OutStream& stream,
|
||||
const int8_t* audioBuffer,
|
||||
const uint32_t bufferLength);
|
||||
const size_t bufferLength);
|
||||
|
||||
// Finalizes the WAV header so that it is correct if nothing more will be
|
||||
// written to stream.
|
||||
@ -148,7 +148,7 @@ public:
|
||||
// codec frame size. dataLengthInBytes indicates the size of audioBuffer.
|
||||
// The return value is the number of bytes written to audioBuffer.
|
||||
int32_t ReadPCMData(InStream& stream, int8_t* audioBuffer,
|
||||
const uint32_t dataLengthInBytes);
|
||||
const size_t dataLengthInBytes);
|
||||
|
||||
// Prepare for recording audio to stream.
|
||||
// freqInHz is the PCM sampling frequency.
|
||||
@ -161,7 +161,7 @@ public:
|
||||
// The return value is the number of bytes written to audioBuffer.
|
||||
int32_t WritePCMData(OutStream& stream,
|
||||
const int8_t* audioBuffer,
|
||||
uint32_t bufferLength);
|
||||
size_t bufferLength);
|
||||
|
||||
// Prepare for playing audio from stream.
|
||||
// startPointMs and stopPointMs, unless zero, specify what part of the file
|
||||
@ -175,7 +175,7 @@ public:
|
||||
// The return value is the number of bytes written to audioBuffer.
|
||||
int32_t ReadCompressedData(InStream& stream,
|
||||
int8_t* audioBuffer,
|
||||
const uint32_t dataLengthInBytes);
|
||||
const size_t dataLengthInBytes);
|
||||
|
||||
// Prepare for recording audio to stream.
|
||||
// codecInst specifies the encoding of the audio data.
|
||||
@ -189,7 +189,7 @@ public:
|
||||
// Note: bufferLength must be exactly one frame.
|
||||
int32_t WriteCompressedData(OutStream& stream,
|
||||
const int8_t* audioBuffer,
|
||||
const uint32_t bufferLength);
|
||||
const size_t bufferLength);
|
||||
|
||||
// Prepare for playing audio from stream.
|
||||
// codecInst specifies the encoding of the audio data.
|
||||
@ -201,7 +201,7 @@ public:
|
||||
// The return value is the number of bytes written to audioBuffer.
|
||||
int32_t ReadPreEncodedData(InStream& stream,
|
||||
int8_t* audioBuffer,
|
||||
const uint32_t dataLengthInBytes);
|
||||
const size_t dataLengthInBytes);
|
||||
|
||||
// Prepare for recording audio to stream.
|
||||
// codecInst specifies the encoding of the audio data.
|
||||
@ -215,7 +215,7 @@ public:
|
||||
// Note: bufferLength must be exactly one frame.
|
||||
int32_t WritePreEncodedData(OutStream& stream,
|
||||
const int8_t* inData,
|
||||
const uint32_t dataLengthInBytes);
|
||||
const size_t dataLengthInBytes);
|
||||
|
||||
// Set durationMs to the size of the file (in ms) specified by fileName.
|
||||
// freqInHz specifies the sampling frequency of the file.
|
||||
@ -320,7 +320,7 @@ private:
|
||||
uint32_t _stopPointInMs;
|
||||
uint32_t _startPointInMs;
|
||||
uint32_t _playoutPositionMs;
|
||||
uint32_t _bytesWritten;
|
||||
size_t _bytesWritten;
|
||||
|
||||
CodecInst codec_info_;
|
||||
MediaFileUtility_CodecType _codecId;
|
||||
|
||||
Reference in New Issue
Block a user