
They have all been replaced by AudioEncoder subclasses, accessed throgh ACMGenericCodecWrapper objects. After this change, the only subclass of ACMGenericCodec is ACMGenericCodecWrapper. (The two will be consolidated in a future cl.) This CL also deletes acm_opus_unittest.cc. This test file was already replaced audio_encoder_opus_unittest.cc in r8244. BUG=4228 COAUTHOR=kwiberg@webrtc.org R=tina.legrand@webrtc.org Review URL: https://webrtc-codereview.appspot.com/40729004 Cr-Commit-Position: refs/heads/master@{#8457} git-svn-id: http://webrtc.googlecode.com/svn/trunk@8457 4adac7df-926f-26a2-2b94-8c16560cd09d
2011 lines
56 KiB
C++
2011 lines
56 KiB
C++
/*
|
|
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
|
*
|
|
* Use of this source code is governed by a BSD-style license
|
|
* that can be found in the LICENSE file in the root of the source
|
|
* tree. An additional intellectual property rights grant can be found
|
|
* in the file PATENTS. All contributing project authors may
|
|
* be found in the AUTHORS file in the root of the source tree.
|
|
*/
|
|
|
|
#include "webrtc/modules/media_file/source/media_file_utility.h"
|
|
|
|
#include <assert.h>
|
|
#include <sys/stat.h>
|
|
#include <sys/types.h>
|
|
#include <limits>
|
|
|
|
#include "webrtc/base/format_macros.h"
|
|
#include "webrtc/common_audio/wav_header.h"
|
|
#include "webrtc/common_types.h"
|
|
#include "webrtc/engine_configurations.h"
|
|
#include "webrtc/modules/interface/module_common_types.h"
|
|
#include "webrtc/system_wrappers/interface/file_wrapper.h"
|
|
#include "webrtc/system_wrappers/interface/trace.h"
|
|
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
#include "avi_file.h"
|
|
#endif
|
|
|
|
namespace {
|
|
|
|
// First 16 bytes the WAVE header. ckID should be "RIFF", wave_ckID should be
|
|
// "WAVE" and ckSize is the chunk size (4 + n)
|
|
struct WAVE_RIFF_header
|
|
{
|
|
int8_t ckID[4];
|
|
int32_t ckSize;
|
|
int8_t wave_ckID[4];
|
|
};
|
|
|
|
// First 8 byte of the format chunk. fmt_ckID should be "fmt ". fmt_ckSize is
|
|
// the chunk size (16, 18 or 40 byte)
|
|
struct WAVE_CHUNK_header
|
|
{
|
|
int8_t fmt_ckID[4];
|
|
int32_t fmt_ckSize;
|
|
};
|
|
} // unnamed namespace
|
|
|
|
namespace webrtc {
|
|
ModuleFileUtility::ModuleFileUtility(const int32_t id)
|
|
: _wavFormatObj(),
|
|
_dataSize(0),
|
|
_readSizeBytes(0),
|
|
_id(id),
|
|
_stopPointInMs(0),
|
|
_startPointInMs(0),
|
|
_playoutPositionMs(0),
|
|
_bytesWritten(0),
|
|
codec_info_(),
|
|
_codecId(kCodecNoCodec),
|
|
_bytesPerSample(0),
|
|
_readPos(0),
|
|
_reading(false),
|
|
_writing(false),
|
|
_tempData()
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
,
|
|
_aviAudioInFile(0),
|
|
_aviVideoInFile(0),
|
|
_aviOutFile(0)
|
|
#endif
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
|
|
"ModuleFileUtility::ModuleFileUtility()");
|
|
memset(&codec_info_,0,sizeof(CodecInst));
|
|
codec_info_.pltype = -1;
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
memset(&_videoCodec,0,sizeof(_videoCodec));
|
|
#endif
|
|
}
|
|
|
|
ModuleFileUtility::~ModuleFileUtility()
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
|
|
"ModuleFileUtility::~ModuleFileUtility()");
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
delete _aviAudioInFile;
|
|
delete _aviVideoInFile;
|
|
#endif
|
|
}
|
|
|
|
#ifdef WEBRTC_MODULE_UTILITY_VIDEO
|
|
int32_t ModuleFileUtility::InitAviWriting(
|
|
const char* filename,
|
|
const CodecInst& audioCodecInst,
|
|
const VideoCodec& videoCodecInst,
|
|
const bool videoOnly /*= false*/)
|
|
{
|
|
_writing = false;
|
|
|
|
delete _aviOutFile;
|
|
_aviOutFile = new AviFile( );
|
|
|
|
AVISTREAMHEADER videoStreamHeader;
|
|
videoStreamHeader.fccType = AviFile::MakeFourCc('v', 'i', 'd', 's');
|
|
|
|
#ifdef VIDEOCODEC_I420
|
|
if (strncmp(videoCodecInst.plName, "I420", 7) == 0)
|
|
{
|
|
videoStreamHeader.fccHandler = AviFile::MakeFourCc('I','4','2','0');
|
|
}
|
|
#endif
|
|
#ifdef VIDEOCODEC_VP8
|
|
if (strncmp(videoCodecInst.plName, "VP8", 7) == 0)
|
|
{
|
|
videoStreamHeader.fccHandler = AviFile::MakeFourCc('V','P','8','0');
|
|
}
|
|
#endif
|
|
if (videoStreamHeader.fccHandler == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"InitAviWriting() Codec not supported");
|
|
|
|
return -1;
|
|
}
|
|
videoStreamHeader.dwScale = 1;
|
|
videoStreamHeader.dwRate = videoCodecInst.maxFramerate;
|
|
videoStreamHeader.dwSuggestedBufferSize = videoCodecInst.height *
|
|
(videoCodecInst.width >> 1) * 3;
|
|
videoStreamHeader.dwQuality = (uint32_t)-1;
|
|
videoStreamHeader.dwSampleSize = 0;
|
|
videoStreamHeader.rcFrame.top = 0;
|
|
videoStreamHeader.rcFrame.bottom = videoCodecInst.height;
|
|
videoStreamHeader.rcFrame.left = 0;
|
|
videoStreamHeader.rcFrame.right = videoCodecInst.width;
|
|
|
|
BITMAPINFOHEADER bitMapInfoHeader;
|
|
bitMapInfoHeader.biSize = sizeof(BITMAPINFOHEADER);
|
|
bitMapInfoHeader.biHeight = videoCodecInst.height;
|
|
bitMapInfoHeader.biWidth = videoCodecInst.width;
|
|
bitMapInfoHeader.biPlanes = 1;
|
|
bitMapInfoHeader.biBitCount = 12;
|
|
bitMapInfoHeader.biClrImportant = 0;
|
|
bitMapInfoHeader.biClrUsed = 0;
|
|
bitMapInfoHeader.biCompression = videoStreamHeader.fccHandler;
|
|
bitMapInfoHeader.biSizeImage = bitMapInfoHeader.biWidth *
|
|
bitMapInfoHeader.biHeight * bitMapInfoHeader.biBitCount / 8;
|
|
|
|
if (_aviOutFile->CreateVideoStream(
|
|
videoStreamHeader,
|
|
bitMapInfoHeader,
|
|
NULL,
|
|
0) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
if(!videoOnly)
|
|
{
|
|
AVISTREAMHEADER audioStreamHeader;
|
|
audioStreamHeader.fccType = AviFile::MakeFourCc('a', 'u', 'd', 's');
|
|
// fccHandler is the FOURCC of the codec for decoding the stream.
|
|
// It's an optional parameter that is not used by audio streams.
|
|
audioStreamHeader.fccHandler = 0;
|
|
audioStreamHeader.dwScale = 1;
|
|
|
|
WAVEFORMATEX waveFormatHeader;
|
|
waveFormatHeader.cbSize = 0;
|
|
waveFormatHeader.nChannels = 1;
|
|
|
|
if (strncmp(audioCodecInst.plname, "PCMU", 4) == 0)
|
|
{
|
|
audioStreamHeader.dwSampleSize = 1;
|
|
audioStreamHeader.dwRate = 8000;
|
|
audioStreamHeader.dwQuality = (uint32_t)-1;
|
|
audioStreamHeader.dwSuggestedBufferSize = 80;
|
|
|
|
waveFormatHeader.nAvgBytesPerSec = 8000;
|
|
waveFormatHeader.nSamplesPerSec = 8000;
|
|
waveFormatHeader.wBitsPerSample = 8;
|
|
waveFormatHeader.nBlockAlign = 1;
|
|
waveFormatHeader.wFormatTag = kWavFormatMuLaw;
|
|
|
|
} else if (strncmp(audioCodecInst.plname, "PCMA", 4) == 0)
|
|
{
|
|
audioStreamHeader.dwSampleSize = 1;
|
|
audioStreamHeader.dwRate = 8000;
|
|
audioStreamHeader.dwQuality = (uint32_t)-1;
|
|
audioStreamHeader.dwSuggestedBufferSize = 80;
|
|
|
|
waveFormatHeader.nAvgBytesPerSec = 8000;
|
|
waveFormatHeader.nSamplesPerSec = 8000;
|
|
waveFormatHeader.wBitsPerSample = 8;
|
|
waveFormatHeader.nBlockAlign = 1;
|
|
waveFormatHeader.wFormatTag = kWavFormatALaw;
|
|
|
|
} else if (strncmp(audioCodecInst.plname, "L16", 3) == 0)
|
|
{
|
|
audioStreamHeader.dwSampleSize = 2;
|
|
audioStreamHeader.dwRate = audioCodecInst.plfreq;
|
|
audioStreamHeader.dwQuality = (uint32_t)-1;
|
|
audioStreamHeader.dwSuggestedBufferSize =
|
|
(audioCodecInst.plfreq/100) * 2;
|
|
|
|
waveFormatHeader.nAvgBytesPerSec = audioCodecInst.plfreq * 2;
|
|
waveFormatHeader.nSamplesPerSec = audioCodecInst.plfreq;
|
|
waveFormatHeader.wBitsPerSample = 16;
|
|
waveFormatHeader.nBlockAlign = 2;
|
|
waveFormatHeader.wFormatTag = kWavFormatPcm;
|
|
} else
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
if(_aviOutFile->CreateAudioStream(
|
|
audioStreamHeader,
|
|
waveFormatHeader) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
|
|
if( InitWavCodec(waveFormatHeader.nSamplesPerSec,
|
|
waveFormatHeader.nChannels,
|
|
waveFormatHeader.wBitsPerSample,
|
|
waveFormatHeader.wFormatTag) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
_aviOutFile->Create(filename);
|
|
_writing = true;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::WriteAviAudioData(
|
|
const int8_t* buffer,
|
|
size_t bufferLengthInBytes)
|
|
{
|
|
if( _aviOutFile != 0)
|
|
{
|
|
return _aviOutFile->WriteAudio(
|
|
reinterpret_cast<const uint8_t*>(buffer),
|
|
bufferLengthInBytes);
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
|
|
return -1;
|
|
}
|
|
}
|
|
|
|
int32_t ModuleFileUtility::WriteAviVideoData(
|
|
const int8_t* buffer,
|
|
size_t bufferLengthInBytes)
|
|
{
|
|
if( _aviOutFile != 0)
|
|
{
|
|
return _aviOutFile->WriteVideo(
|
|
reinterpret_cast<const uint8_t*>(buffer),
|
|
bufferLengthInBytes);
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
|
|
return -1;
|
|
}
|
|
}
|
|
|
|
|
|
int32_t ModuleFileUtility::CloseAviFile( )
|
|
{
|
|
if( _reading && _aviAudioInFile)
|
|
{
|
|
delete _aviAudioInFile;
|
|
_aviAudioInFile = 0;
|
|
}
|
|
|
|
if( _reading && _aviVideoInFile)
|
|
{
|
|
delete _aviVideoInFile;
|
|
_aviVideoInFile = 0;
|
|
}
|
|
|
|
if( _writing && _aviOutFile)
|
|
{
|
|
delete _aviOutFile;
|
|
_aviOutFile = 0;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
|
|
int32_t ModuleFileUtility::InitAviReading(const char* filename, bool videoOnly,
|
|
bool loop)
|
|
{
|
|
_reading = false;
|
|
delete _aviVideoInFile;
|
|
_aviVideoInFile = new AviFile( );
|
|
|
|
if ((_aviVideoInFile != 0) && _aviVideoInFile->Open(AviFile::AVI_VIDEO,
|
|
filename, loop) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
|
"Unable to open AVI file (video)");
|
|
return -1;
|
|
}
|
|
|
|
|
|
AVISTREAMHEADER videoInStreamHeader;
|
|
BITMAPINFOHEADER bitmapInfo;
|
|
char codecConfigParameters[AviFile::CODEC_CONFIG_LENGTH] = {};
|
|
int32_t configLength = 0;
|
|
if( _aviVideoInFile->GetVideoStreamInfo(videoInStreamHeader, bitmapInfo,
|
|
codecConfigParameters,
|
|
configLength) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
_videoCodec.width = static_cast<uint16_t>(
|
|
videoInStreamHeader.rcFrame.right);
|
|
_videoCodec.height = static_cast<uint16_t>(
|
|
videoInStreamHeader.rcFrame.bottom);
|
|
_videoCodec.maxFramerate = static_cast<uint8_t>(
|
|
videoInStreamHeader.dwRate);
|
|
|
|
const size_t plnameLen = sizeof(_videoCodec.plName) / sizeof(char);
|
|
if (bitmapInfo.biCompression == AviFile::MakeFourCc('I','4','2','0'))
|
|
{
|
|
strncpy(_videoCodec.plName, "I420", plnameLen);
|
|
_videoCodec.codecType = kVideoCodecI420;
|
|
}
|
|
else if (bitmapInfo.biCompression ==
|
|
AviFile::MakeFourCc('V', 'P', '8', '0'))
|
|
{
|
|
strncpy(_videoCodec.plName, "VP8", plnameLen);
|
|
_videoCodec.codecType = kVideoCodecVP8;
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
if(!videoOnly)
|
|
{
|
|
delete _aviAudioInFile;
|
|
_aviAudioInFile = new AviFile();
|
|
|
|
if ( (_aviAudioInFile != 0) &&
|
|
_aviAudioInFile->Open(AviFile::AVI_AUDIO, filename, loop) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
|
|
"Unable to open AVI file (audio)");
|
|
return -1;
|
|
}
|
|
|
|
WAVEFORMATEX waveHeader;
|
|
if(_aviAudioInFile->GetAudioStreamInfo(waveHeader) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
if(InitWavCodec(waveHeader.nSamplesPerSec, waveHeader.nChannels,
|
|
waveHeader.wBitsPerSample, waveHeader.wFormatTag) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadAviAudioData(
|
|
int8_t* outBuffer,
|
|
size_t bufferLengthInBytes)
|
|
{
|
|
if(_aviAudioInFile == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
|
|
return -1;
|
|
}
|
|
|
|
if(_aviAudioInFile->ReadAudio(reinterpret_cast<uint8_t*>(outBuffer),
|
|
bufferLengthInBytes) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
else
|
|
{
|
|
return static_cast<int32_t>(bufferLengthInBytes);
|
|
}
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadAviVideoData(
|
|
int8_t* outBuffer,
|
|
size_t bufferLengthInBytes)
|
|
{
|
|
if(_aviVideoInFile == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
|
|
return -1;
|
|
}
|
|
|
|
if(_aviVideoInFile->ReadVideo(reinterpret_cast<uint8_t*>(outBuffer),
|
|
bufferLengthInBytes) != 0)
|
|
{
|
|
return -1;
|
|
} else {
|
|
return static_cast<int32_t>(bufferLengthInBytes);
|
|
}
|
|
}
|
|
|
|
int32_t ModuleFileUtility::VideoCodecInst(VideoCodec& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ModuleFileUtility::CodecInst(codecInst= 0x%x)", &codecInst);
|
|
|
|
if(!_reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"CodecInst: not currently reading audio file!");
|
|
return -1;
|
|
}
|
|
memcpy(&codecInst,&_videoCodec,sizeof(VideoCodec));
|
|
return 0;
|
|
}
|
|
#endif
|
|
|
|
int32_t ModuleFileUtility::ReadWavHeader(InStream& wav)
|
|
{
|
|
WAVE_RIFF_header RIFFheaderObj;
|
|
WAVE_CHUNK_header CHUNKheaderObj;
|
|
// TODO (hellner): tmpStr and tmpStr2 seems unnecessary here.
|
|
char tmpStr[6] = "FOUR";
|
|
unsigned char tmpStr2[4];
|
|
int32_t i, len;
|
|
bool dataFound = false;
|
|
bool fmtFound = false;
|
|
int8_t dummyRead;
|
|
|
|
|
|
_dataSize = 0;
|
|
len = wav.Read(&RIFFheaderObj, sizeof(WAVE_RIFF_header));
|
|
if(len != sizeof(WAVE_RIFF_header))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Not a wave file (too short)");
|
|
return -1;
|
|
}
|
|
|
|
for (i = 0; i < 4; i++)
|
|
{
|
|
tmpStr[i] = RIFFheaderObj.ckID[i];
|
|
}
|
|
if(strcmp(tmpStr, "RIFF") != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Not a wave file (does not have RIFF)");
|
|
return -1;
|
|
}
|
|
for (i = 0; i < 4; i++)
|
|
{
|
|
tmpStr[i] = RIFFheaderObj.wave_ckID[i];
|
|
}
|
|
if(strcmp(tmpStr, "WAVE") != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Not a wave file (does not have WAVE)");
|
|
return -1;
|
|
}
|
|
|
|
len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
|
|
|
|
// WAVE files are stored in little endian byte order. Make sure that the
|
|
// data can be read on big endian as well.
|
|
// TODO (hellner): little endian to system byte order should be done in
|
|
// in a subroutine.
|
|
memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
|
|
CHUNKheaderObj.fmt_ckSize =
|
|
(int32_t) ((uint32_t) tmpStr2[0] +
|
|
(((uint32_t)tmpStr2[1])<<8) +
|
|
(((uint32_t)tmpStr2[2])<<16) +
|
|
(((uint32_t)tmpStr2[3])<<24));
|
|
|
|
memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
|
|
|
|
while ((len == sizeof(WAVE_CHUNK_header)) && (!fmtFound || !dataFound))
|
|
{
|
|
if(strcmp(tmpStr, "fmt ") == 0)
|
|
{
|
|
len = wav.Read(&_wavFormatObj, sizeof(WAVE_FMTINFO_header));
|
|
|
|
memcpy(tmpStr2, &_wavFormatObj.formatTag, 2);
|
|
_wavFormatObj.formatTag =
|
|
(uint32_t)tmpStr2[0] + (((uint32_t)tmpStr2[1])<<8);
|
|
memcpy(tmpStr2, &_wavFormatObj.nChannels, 2);
|
|
_wavFormatObj.nChannels =
|
|
(int16_t) ((uint32_t)tmpStr2[0] +
|
|
(((uint32_t)tmpStr2[1])<<8));
|
|
memcpy(tmpStr2, &_wavFormatObj.nSamplesPerSec, 4);
|
|
_wavFormatObj.nSamplesPerSec =
|
|
(int32_t) ((uint32_t)tmpStr2[0] +
|
|
(((uint32_t)tmpStr2[1])<<8) +
|
|
(((uint32_t)tmpStr2[2])<<16) +
|
|
(((uint32_t)tmpStr2[3])<<24));
|
|
memcpy(tmpStr2, &_wavFormatObj.nAvgBytesPerSec, 4);
|
|
_wavFormatObj.nAvgBytesPerSec =
|
|
(int32_t) ((uint32_t)tmpStr2[0] +
|
|
(((uint32_t)tmpStr2[1])<<8) +
|
|
(((uint32_t)tmpStr2[2])<<16) +
|
|
(((uint32_t)tmpStr2[3])<<24));
|
|
memcpy(tmpStr2, &_wavFormatObj.nBlockAlign, 2);
|
|
_wavFormatObj.nBlockAlign =
|
|
(int16_t) ((uint32_t)tmpStr2[0] +
|
|
(((uint32_t)tmpStr2[1])<<8));
|
|
memcpy(tmpStr2, &_wavFormatObj.nBitsPerSample, 2);
|
|
_wavFormatObj.nBitsPerSample =
|
|
(int16_t) ((uint32_t)tmpStr2[0] +
|
|
(((uint32_t)tmpStr2[1])<<8));
|
|
|
|
for (i = 0;
|
|
i < (CHUNKheaderObj.fmt_ckSize -
|
|
(int32_t)sizeof(WAVE_FMTINFO_header));
|
|
i++)
|
|
{
|
|
len = wav.Read(&dummyRead, 1);
|
|
if(len != 1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"File corrupted, reached EOF (reading fmt)");
|
|
return -1;
|
|
}
|
|
}
|
|
fmtFound = true;
|
|
}
|
|
else if(strcmp(tmpStr, "data") == 0)
|
|
{
|
|
_dataSize = CHUNKheaderObj.fmt_ckSize;
|
|
dataFound = true;
|
|
break;
|
|
}
|
|
else
|
|
{
|
|
for (i = 0; i < (CHUNKheaderObj.fmt_ckSize); i++)
|
|
{
|
|
len = wav.Read(&dummyRead, 1);
|
|
if(len != 1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"File corrupted, reached EOF (reading other)");
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
|
|
len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
|
|
|
|
memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
|
|
CHUNKheaderObj.fmt_ckSize =
|
|
(int32_t) ((uint32_t)tmpStr2[0] +
|
|
(((uint32_t)tmpStr2[1])<<8) +
|
|
(((uint32_t)tmpStr2[2])<<16) +
|
|
(((uint32_t)tmpStr2[3])<<24));
|
|
|
|
memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
|
|
}
|
|
|
|
// Either a proper format chunk has been read or a data chunk was come
|
|
// across.
|
|
if( (_wavFormatObj.formatTag != kWavFormatPcm) &&
|
|
(_wavFormatObj.formatTag != kWavFormatALaw) &&
|
|
(_wavFormatObj.formatTag != kWavFormatMuLaw))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Coding formatTag value=%d not supported!",
|
|
_wavFormatObj.formatTag);
|
|
return -1;
|
|
}
|
|
if((_wavFormatObj.nChannels < 1) ||
|
|
(_wavFormatObj.nChannels > 2))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"nChannels value=%d not supported!",
|
|
_wavFormatObj.nChannels);
|
|
return -1;
|
|
}
|
|
|
|
if((_wavFormatObj.nBitsPerSample != 8) &&
|
|
(_wavFormatObj.nBitsPerSample != 16))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"nBitsPerSample value=%d not supported!",
|
|
_wavFormatObj.nBitsPerSample);
|
|
return -1;
|
|
}
|
|
|
|
// Calculate the number of bytes that 10 ms of audio data correspond to.
|
|
if(_wavFormatObj.formatTag == kWavFormatPcm)
|
|
{
|
|
// TODO (hellner): integer division for 22050 and 11025 would yield
|
|
// the same result as the else statement. Remove those
|
|
// special cases?
|
|
if(_wavFormatObj.nSamplesPerSec == 44100)
|
|
{
|
|
_readSizeBytes = 440 * _wavFormatObj.nChannels *
|
|
(_wavFormatObj.nBitsPerSample / 8);
|
|
} else if(_wavFormatObj.nSamplesPerSec == 22050) {
|
|
_readSizeBytes = 220 * _wavFormatObj.nChannels *
|
|
(_wavFormatObj.nBitsPerSample / 8);
|
|
} else if(_wavFormatObj.nSamplesPerSec == 11025) {
|
|
_readSizeBytes = 110 * _wavFormatObj.nChannels *
|
|
(_wavFormatObj.nBitsPerSample / 8);
|
|
} else {
|
|
_readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
|
|
_wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
|
|
}
|
|
|
|
} else {
|
|
_readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
|
|
_wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitWavCodec(uint32_t samplesPerSec,
|
|
uint32_t channels,
|
|
uint32_t bitsPerSample,
|
|
uint32_t formatTag)
|
|
{
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = samplesPerSec;
|
|
codec_info_.channels = channels;
|
|
codec_info_.rate = bitsPerSample * samplesPerSec;
|
|
|
|
// Calculate the packet size for 10ms frames
|
|
switch(formatTag)
|
|
{
|
|
case kWavFormatALaw:
|
|
strcpy(codec_info_.plname, "PCMA");
|
|
_codecId = kCodecPcma;
|
|
codec_info_.pltype = 8;
|
|
codec_info_.pacsize = codec_info_.plfreq / 100;
|
|
break;
|
|
case kWavFormatMuLaw:
|
|
strcpy(codec_info_.plname, "PCMU");
|
|
_codecId = kCodecPcmu;
|
|
codec_info_.pltype = 0;
|
|
codec_info_.pacsize = codec_info_.plfreq / 100;
|
|
break;
|
|
case kWavFormatPcm:
|
|
codec_info_.pacsize = (bitsPerSample * (codec_info_.plfreq / 100)) / 8;
|
|
if(samplesPerSec == 8000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(samplesPerSec == 16000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(samplesPerSec == 32000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
// Set the packet size for "odd" sampling frequencies so that it
|
|
// properly corresponds to _readSizeBytes.
|
|
else if(samplesPerSec == 11025)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 110;
|
|
codec_info_.plfreq = 11000;
|
|
}
|
|
else if(samplesPerSec == 22050)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 220;
|
|
codec_info_.plfreq = 22000;
|
|
}
|
|
else if(samplesPerSec == 44100)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 440;
|
|
codec_info_.plfreq = 44000;
|
|
}
|
|
else if(samplesPerSec == 48000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
_codecId = kCodecL16_16kHz;
|
|
codec_info_.pacsize = 480;
|
|
codec_info_.plfreq = 48000;
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Unsupported PCM frequency!");
|
|
return -1;
|
|
}
|
|
break;
|
|
default:
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"unknown WAV format TAG!");
|
|
return -1;
|
|
break;
|
|
}
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitWavReading(InStream& wav,
|
|
const uint32_t start,
|
|
const uint32_t stop)
|
|
{
|
|
|
|
_reading = false;
|
|
|
|
if(ReadWavHeader(wav) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to read WAV header!");
|
|
return -1;
|
|
}
|
|
|
|
_playoutPositionMs = 0;
|
|
_readPos = 0;
|
|
|
|
if(start > 0)
|
|
{
|
|
uint8_t dummy[WAV_MAX_BUFFER_SIZE];
|
|
int32_t readLength;
|
|
if(_readSizeBytes <= WAV_MAX_BUFFER_SIZE)
|
|
{
|
|
while (_playoutPositionMs < start)
|
|
{
|
|
readLength = wav.Read(dummy, _readSizeBytes);
|
|
if(readLength == _readSizeBytes)
|
|
{
|
|
_readPos += readLength;
|
|
_playoutPositionMs += 10;
|
|
}
|
|
else // Must have reached EOF before start position!
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"InitWavReading(), EOF before start position");
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
if( InitWavCodec(_wavFormatObj.nSamplesPerSec, _wavFormatObj.nChannels,
|
|
_wavFormatObj.nBitsPerSample,
|
|
_wavFormatObj.formatTag) != 0)
|
|
{
|
|
return -1;
|
|
}
|
|
_bytesPerSample = _wavFormatObj.nBitsPerSample / 8;
|
|
|
|
|
|
_startPointInMs = start;
|
|
_stopPointInMs = stop;
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadWavDataAsMono(
|
|
InStream& wav,
|
|
int8_t* outData,
|
|
const size_t bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d, "
|
|
"bufSize= %" PRIuS ")",
|
|
&wav,
|
|
outData,
|
|
bufferSize);
|
|
|
|
// The number of bytes that should be read from file.
|
|
const uint32_t totalBytesNeeded = _readSizeBytes;
|
|
// The number of bytes that will be written to outData.
|
|
const uint32_t bytesRequested = (codec_info_.channels == 2) ?
|
|
totalBytesNeeded >> 1 : totalBytesNeeded;
|
|
if(bufferSize < bytesRequested)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: output buffer is too short!");
|
|
return -1;
|
|
}
|
|
if(outData == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: output buffer NULL!");
|
|
return -1;
|
|
}
|
|
|
|
if(!_reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: no longer reading file.");
|
|
return -1;
|
|
}
|
|
|
|
int32_t bytesRead = ReadWavData(
|
|
wav,
|
|
(codec_info_.channels == 2) ? _tempData : (uint8_t*)outData,
|
|
totalBytesNeeded);
|
|
if(bytesRead == 0)
|
|
{
|
|
return 0;
|
|
}
|
|
if(bytesRead < 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: failed to read data from WAV file.");
|
|
return -1;
|
|
}
|
|
// Output data is should be mono.
|
|
if(codec_info_.channels == 2)
|
|
{
|
|
for (uint32_t i = 0; i < bytesRequested / _bytesPerSample; i++)
|
|
{
|
|
// Sample value is the average of left and right buffer rounded to
|
|
// closest integer value. Note samples can be either 1 or 2 byte.
|
|
if(_bytesPerSample == 1)
|
|
{
|
|
_tempData[i] = ((_tempData[2 * i] + _tempData[(2 * i) + 1] +
|
|
1) >> 1);
|
|
}
|
|
else
|
|
{
|
|
int16_t* sampleData = (int16_t*) _tempData;
|
|
sampleData[i] = ((sampleData[2 * i] + sampleData[(2 * i) + 1] +
|
|
1) >> 1);
|
|
}
|
|
}
|
|
memcpy(outData, _tempData, bytesRequested);
|
|
}
|
|
return bytesRequested;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadWavDataAsStereo(
|
|
InStream& wav,
|
|
int8_t* outDataLeft,
|
|
int8_t* outDataRight,
|
|
const size_t bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x, "
|
|
"outRight= 0x%x, bufSize= %" PRIuS ")",
|
|
&wav,
|
|
outDataLeft,
|
|
outDataRight,
|
|
bufferSize);
|
|
|
|
if((outDataLeft == NULL) ||
|
|
(outDataRight == NULL))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: an input buffer is NULL!");
|
|
return -1;
|
|
}
|
|
if(codec_info_.channels != 2)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceError,
|
|
kTraceFile,
|
|
_id,
|
|
"ReadWavDataAsStereo: WAV file does not contain stereo data!");
|
|
return -1;
|
|
}
|
|
if(! _reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsStereo: no longer reading file.");
|
|
return -1;
|
|
}
|
|
|
|
// The number of bytes that should be read from file.
|
|
const uint32_t totalBytesNeeded = _readSizeBytes;
|
|
// The number of bytes that will be written to the left and the right
|
|
// buffers.
|
|
const uint32_t bytesRequested = totalBytesNeeded >> 1;
|
|
if(bufferSize < bytesRequested)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavData: Output buffers are too short!");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
|
|
int32_t bytesRead = ReadWavData(wav, _tempData, totalBytesNeeded);
|
|
if(bytesRead <= 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsStereo: failed to read data from WAV file.");
|
|
return -1;
|
|
}
|
|
|
|
// Turn interleaved audio to left and right buffer. Note samples can be
|
|
// either 1 or 2 bytes
|
|
if(_bytesPerSample == 1)
|
|
{
|
|
for (uint32_t i = 0; i < bytesRequested; i++)
|
|
{
|
|
outDataLeft[i] = _tempData[2 * i];
|
|
outDataRight[i] = _tempData[(2 * i) + 1];
|
|
}
|
|
}
|
|
else if(_bytesPerSample == 2)
|
|
{
|
|
int16_t* sampleData = reinterpret_cast<int16_t*>(_tempData);
|
|
int16_t* outLeft = reinterpret_cast<int16_t*>(outDataLeft);
|
|
int16_t* outRight = reinterpret_cast<int16_t*>(
|
|
outDataRight);
|
|
|
|
// Bytes requested to samples requested.
|
|
uint32_t sampleCount = bytesRequested >> 1;
|
|
for (uint32_t i = 0; i < sampleCount; i++)
|
|
{
|
|
outLeft[i] = sampleData[2 * i];
|
|
outRight[i] = sampleData[(2 * i) + 1];
|
|
}
|
|
} else {
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavStereoData: unsupported sample size %d!",
|
|
_bytesPerSample);
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
return bytesRequested;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadWavData(
|
|
InStream& wav,
|
|
uint8_t* buffer,
|
|
const uint32_t dataLengthInBytes)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadWavData(wav= 0x%x, buffer= 0x%x, dataLen= %ld)",
|
|
&wav,
|
|
buffer,
|
|
dataLengthInBytes);
|
|
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadWavDataAsMono: output buffer NULL!");
|
|
return -1;
|
|
}
|
|
|
|
// Make sure that a read won't return too few samples.
|
|
// TODO (hellner): why not read the remaining bytes needed from the start
|
|
// of the file?
|
|
if((_dataSize - _readPos) < (int32_t)dataLengthInBytes)
|
|
{
|
|
// Rewind() being -1 may be due to the file not supposed to be looped.
|
|
if(wav.Rewind() == -1)
|
|
{
|
|
_reading = false;
|
|
return 0;
|
|
}
|
|
if(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
|
|
int32_t bytesRead = wav.Read(buffer, dataLengthInBytes);
|
|
if(bytesRead < 0)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
|
|
// This should never happen due to earlier sanity checks.
|
|
// TODO (hellner): change to an assert and fail here since this should
|
|
// never happen...
|
|
if(bytesRead < (int32_t)dataLengthInBytes)
|
|
{
|
|
if((wav.Rewind() == -1) ||
|
|
(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
else
|
|
{
|
|
bytesRead = wav.Read(buffer, dataLengthInBytes);
|
|
if(bytesRead < (int32_t)dataLengthInBytes)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
|
|
_readPos += bytesRead;
|
|
|
|
// TODO (hellner): Why is dataLengthInBytes let dictate the number of bytes
|
|
// to read when exactly 10ms should be read?!
|
|
_playoutPositionMs += 10;
|
|
if((_stopPointInMs > 0) &&
|
|
(_playoutPositionMs >= _stopPointInMs))
|
|
{
|
|
if((wav.Rewind() == -1) ||
|
|
(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
return bytesRead;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitWavWriting(OutStream& wav,
|
|
const CodecInst& codecInst)
|
|
{
|
|
|
|
if(set_codec_info(codecInst) != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst identifies unsupported codec!");
|
|
return -1;
|
|
}
|
|
_writing = false;
|
|
uint32_t channels = (codecInst.channels == 0) ?
|
|
1 : codecInst.channels;
|
|
|
|
if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
|
|
{
|
|
_bytesPerSample = 1;
|
|
if(WriteWavHeader(wav, 8000, _bytesPerSample, channels,
|
|
kWavFormatMuLaw, 0) == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
}else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
|
|
{
|
|
_bytesPerSample = 1;
|
|
if(WriteWavHeader(wav, 8000, _bytesPerSample, channels, kWavFormatALaw,
|
|
0) == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
|
|
{
|
|
_bytesPerSample = 2;
|
|
if(WriteWavHeader(wav, codecInst.plfreq, _bytesPerSample, channels,
|
|
kWavFormatPcm, 0) == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst identifies unsupported codec for WAV file!");
|
|
return -1;
|
|
}
|
|
_writing = true;
|
|
_bytesWritten = 0;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::WriteWavData(OutStream& out,
|
|
const int8_t* buffer,
|
|
const size_t dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
|
|
")",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"WriteWavData: input buffer NULL!");
|
|
return -1;
|
|
}
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
_bytesWritten += dataLength;
|
|
return static_cast<int32_t>(dataLength);
|
|
}
|
|
|
|
|
|
int32_t ModuleFileUtility::WriteWavHeader(
|
|
OutStream& wav,
|
|
const uint32_t freq,
|
|
const uint32_t bytesPerSample,
|
|
const uint32_t channels,
|
|
const uint32_t format,
|
|
const uint32_t lengthInBytes)
|
|
{
|
|
// Frame size in bytes for 10 ms of audio.
|
|
// TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to
|
|
// be taken into consideration here!
|
|
const int32_t frameSize = (freq / 100) * channels;
|
|
|
|
// Calculate the number of full frames that the wave file contain.
|
|
const int32_t dataLengthInBytes = frameSize * (lengthInBytes / frameSize);
|
|
|
|
uint8_t buf[kWavHeaderSize];
|
|
webrtc::WriteWavHeader(buf, channels, freq, static_cast<WavFormat>(format),
|
|
bytesPerSample, dataLengthInBytes / bytesPerSample);
|
|
wav.Write(buf, kWavHeaderSize);
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::UpdateWavHeader(OutStream& wav)
|
|
{
|
|
int32_t res = -1;
|
|
if(wav.Rewind() == -1)
|
|
{
|
|
return -1;
|
|
}
|
|
uint32_t channels = (codec_info_.channels == 0) ?
|
|
1 : codec_info_.channels;
|
|
|
|
if(STR_CASE_CMP(codec_info_.plname, "L16") == 0)
|
|
{
|
|
res = WriteWavHeader(wav, codec_info_.plfreq, 2, channels,
|
|
kWavFormatPcm, _bytesWritten);
|
|
} else if(STR_CASE_CMP(codec_info_.plname, "PCMU") == 0) {
|
|
res = WriteWavHeader(wav, 8000, 1, channels, kWavFormatMuLaw,
|
|
_bytesWritten);
|
|
} else if(STR_CASE_CMP(codec_info_.plname, "PCMA") == 0) {
|
|
res = WriteWavHeader(wav, 8000, 1, channels, kWavFormatALaw,
|
|
_bytesWritten);
|
|
} else {
|
|
// Allow calling this API even if not writing to a WAVE file.
|
|
// TODO (hellner): why?!
|
|
return 0;
|
|
}
|
|
return res;
|
|
}
|
|
|
|
|
|
int32_t ModuleFileUtility::InitPreEncodedReading(InStream& in,
|
|
const CodecInst& cinst)
|
|
{
|
|
|
|
uint8_t preEncodedID;
|
|
in.Read(&preEncodedID, 1);
|
|
|
|
MediaFileUtility_CodecType codecType =
|
|
(MediaFileUtility_CodecType)preEncodedID;
|
|
|
|
if(set_codec_info(cinst) != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Pre-encoded file send codec mismatch!");
|
|
return -1;
|
|
}
|
|
if(codecType != _codecId)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"Pre-encoded file format codec mismatch!");
|
|
return -1;
|
|
}
|
|
memcpy(&codec_info_,&cinst,sizeof(CodecInst));
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadPreEncodedData(
|
|
InStream& in,
|
|
int8_t* outData,
|
|
const size_t bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x, "
|
|
"bufferSize= %" PRIuS ")",
|
|
&in,
|
|
outData,
|
|
bufferSize);
|
|
|
|
if(outData == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "output buffer NULL");
|
|
}
|
|
|
|
uint32_t frameLen;
|
|
uint8_t buf[64];
|
|
// Each frame has a two byte header containing the frame length.
|
|
int32_t res = in.Read(buf, 2);
|
|
if(res != 2)
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
// The first byte is the codec identifier.
|
|
in.Read(buf, 1);
|
|
res = in.Read(buf, 2);
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
frameLen = buf[0] + buf[1] * 256;
|
|
if(bufferSize < frameLen)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceError,
|
|
kTraceFile,
|
|
_id,
|
|
"buffer not large enough to read %d bytes of pre-encoded data!",
|
|
frameLen);
|
|
return -1;
|
|
}
|
|
return in.Read(outData, frameLen);
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitPreEncodedWriting(
|
|
OutStream& out,
|
|
const CodecInst& codecInst)
|
|
{
|
|
|
|
if(set_codec_info(codecInst) != 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "CodecInst not recognized!");
|
|
return -1;
|
|
}
|
|
_writing = true;
|
|
_bytesWritten = 1;
|
|
out.Write(&_codecId, 1);
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::WritePreEncodedData(
|
|
OutStream& out,
|
|
const int8_t* buffer,
|
|
const size_t dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x, "
|
|
"dataLen= %" PRIuS ")",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
|
}
|
|
|
|
size_t bytesWritten = 0;
|
|
// The first two bytes is the size of the frame.
|
|
int16_t lengthBuf;
|
|
lengthBuf = (int16_t)dataLength;
|
|
if(dataLength > static_cast<size_t>(std::numeric_limits<int16_t>::max()) ||
|
|
!out.Write(&lengthBuf, 2))
|
|
{
|
|
return -1;
|
|
}
|
|
bytesWritten = 2;
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
bytesWritten += dataLength;
|
|
return static_cast<int32_t>(bytesWritten);
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitCompressedReading(
|
|
InStream& in,
|
|
const uint32_t start,
|
|
const uint32_t stop)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceDebug,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::InitCompressedReading(in= 0x%x, start= %d,\
|
|
stop= %d)",
|
|
&in,
|
|
start,
|
|
stop);
|
|
|
|
#if defined(WEBRTC_CODEC_ILBC)
|
|
int16_t read_len = 0;
|
|
#endif
|
|
_codecId = kCodecNoCodec;
|
|
_playoutPositionMs = 0;
|
|
_reading = false;
|
|
|
|
_startPointInMs = start;
|
|
_stopPointInMs = stop;
|
|
|
|
// Read the codec name
|
|
int32_t cnt = 0;
|
|
char buf[64];
|
|
do
|
|
{
|
|
in.Read(&buf[cnt++], 1);
|
|
} while ((buf[cnt-1] != '\n') && (64 > cnt));
|
|
|
|
if(cnt==64)
|
|
{
|
|
return -1;
|
|
} else {
|
|
buf[cnt]=0;
|
|
}
|
|
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if(!strcmp("#!iLBC20\n", buf))
|
|
{
|
|
codec_info_.pltype = 102;
|
|
strcpy(codec_info_.plname, "ilbc");
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 160;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 13300;
|
|
_codecId = kCodecIlbc20Ms;
|
|
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs <= _startPointInMs)
|
|
{
|
|
read_len = in.Read(buf, 38);
|
|
if(read_len == 38)
|
|
{
|
|
_playoutPositionMs += 20;
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if(!strcmp("#!iLBC30\n", buf))
|
|
{
|
|
codec_info_.pltype = 102;
|
|
strcpy(codec_info_.plname, "ilbc");
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 240;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 13300;
|
|
_codecId = kCodecIlbc30Ms;
|
|
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs <= _startPointInMs)
|
|
{
|
|
read_len = in.Read(buf, 50);
|
|
if(read_len == 50)
|
|
{
|
|
_playoutPositionMs += 20;
|
|
}
|
|
else
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
#endif
|
|
if(_codecId == kCodecNoCodec)
|
|
{
|
|
return -1;
|
|
}
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadCompressedData(InStream& in,
|
|
int8_t* outData,
|
|
size_t bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x, bytes=%"
|
|
PRIuS ")",
|
|
&in,
|
|
outData,
|
|
bufferSize);
|
|
|
|
uint32_t bytesRead = 0;
|
|
|
|
if(! _reading)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "not currently reading!");
|
|
return -1;
|
|
}
|
|
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if((_codecId == kCodecIlbc20Ms) ||
|
|
(_codecId == kCodecIlbc30Ms))
|
|
{
|
|
uint32_t byteSize = 0;
|
|
if(_codecId == kCodecIlbc30Ms)
|
|
{
|
|
byteSize = 50;
|
|
}
|
|
if(_codecId == kCodecIlbc20Ms)
|
|
{
|
|
byteSize = 38;
|
|
}
|
|
if(bufferSize < byteSize)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"output buffer is too short to read ILBC compressed\
|
|
data.");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
|
|
bytesRead = in.Read(outData, byteSize);
|
|
if(bytesRead != byteSize)
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
InitCompressedReading(in, _startPointInMs, _stopPointInMs);
|
|
bytesRead = in.Read(outData, byteSize);
|
|
if(bytesRead != byteSize)
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
#endif
|
|
if(bytesRead == 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadCompressedData() no bytes read, codec not supported");
|
|
return -1;
|
|
}
|
|
|
|
_playoutPositionMs += 20;
|
|
if((_stopPointInMs > 0) &&
|
|
(_playoutPositionMs >= _stopPointInMs))
|
|
{
|
|
if(!in.Rewind())
|
|
{
|
|
InitCompressedReading(in, _startPointInMs, _stopPointInMs);
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
return bytesRead;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitCompressedWriting(
|
|
OutStream& out,
|
|
const CodecInst& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceDebug, kTraceFile, _id,
|
|
"ModuleFileUtility::InitCompressedWriting(out= 0x%x,\
|
|
codecName= %s)",
|
|
&out, codecInst.plname);
|
|
|
|
_writing = false;
|
|
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
|
|
{
|
|
if(codecInst.pacsize == 160)
|
|
{
|
|
_codecId = kCodecIlbc20Ms;
|
|
out.Write("#!iLBC20\n",9);
|
|
}
|
|
else if(codecInst.pacsize == 240)
|
|
{
|
|
_codecId = kCodecIlbc30Ms;
|
|
out.Write("#!iLBC30\n",9);
|
|
}
|
|
else
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst defines unsupported compression codec!");
|
|
return -1;
|
|
}
|
|
memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
|
|
_writing = true;
|
|
return 0;
|
|
}
|
|
#endif
|
|
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"codecInst defines unsupported compression codec!");
|
|
return -1;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::WriteCompressedData(
|
|
OutStream& out,
|
|
const int8_t* buffer,
|
|
const size_t dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x, "
|
|
"dataLen= %" PRIuS ")",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
|
}
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
return static_cast<int32_t>(dataLength);
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitPCMReading(InStream& pcm,
|
|
const uint32_t start,
|
|
const uint32_t stop,
|
|
uint32_t freq)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceInfo,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::InitPCMReading(pcm= 0x%x, start=%d, stop=%d,\
|
|
freq=%d)",
|
|
&pcm,
|
|
start,
|
|
stop,
|
|
freq);
|
|
|
|
int8_t dummy[320];
|
|
int32_t read_len;
|
|
|
|
_playoutPositionMs = 0;
|
|
_startPointInMs = start;
|
|
_stopPointInMs = stop;
|
|
_reading = false;
|
|
|
|
if(freq == 8000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 160;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 128000;
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(freq == 16000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 16000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 256000;
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(freq == 32000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 32000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 512000;
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
|
|
// Readsize for 10ms of audio data (2 bytes per sample).
|
|
_readSizeBytes = 2 * codec_info_. plfreq / 100;
|
|
if(_startPointInMs > 0)
|
|
{
|
|
while (_playoutPositionMs < _startPointInMs)
|
|
{
|
|
read_len = pcm.Read(dummy, _readSizeBytes);
|
|
if(read_len == _readSizeBytes)
|
|
{
|
|
_playoutPositionMs += 10;
|
|
}
|
|
else // Must have reached EOF before start position!
|
|
{
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
_reading = true;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::ReadPCMData(InStream& pcm,
|
|
int8_t* outData,
|
|
size_t bufferSize)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %"
|
|
PRIuS ")",
|
|
&pcm,
|
|
outData,
|
|
bufferSize);
|
|
|
|
if(outData == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
|
|
}
|
|
|
|
// Readsize for 10ms of audio data (2 bytes per sample).
|
|
uint32_t bytesRequested = 2 * codec_info_.plfreq / 100;
|
|
if(bufferSize < bytesRequested)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadPCMData: buffer not long enough for a 10ms frame.");
|
|
assert(false);
|
|
return -1;
|
|
}
|
|
|
|
uint32_t bytesRead = pcm.Read(outData, bytesRequested);
|
|
if(bytesRead < bytesRequested)
|
|
{
|
|
if(pcm.Rewind() == -1)
|
|
{
|
|
_reading = false;
|
|
}
|
|
else
|
|
{
|
|
if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
|
|
codec_info_.plfreq) == -1)
|
|
{
|
|
_reading = false;
|
|
}
|
|
else
|
|
{
|
|
int32_t rest = bytesRequested - bytesRead;
|
|
int32_t len = pcm.Read(&(outData[bytesRead]), rest);
|
|
if(len == rest)
|
|
{
|
|
bytesRead += len;
|
|
}
|
|
else
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
if(bytesRead <= 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"ReadPCMData: Failed to rewind audio file.");
|
|
return -1;
|
|
}
|
|
}
|
|
}
|
|
|
|
if(bytesRead <= 0)
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ReadPCMData: end of file");
|
|
return -1;
|
|
}
|
|
_playoutPositionMs += 10;
|
|
if(_stopPointInMs && _playoutPositionMs >= _stopPointInMs)
|
|
{
|
|
if(!pcm.Rewind())
|
|
{
|
|
if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
|
|
codec_info_.plfreq) == -1)
|
|
{
|
|
_reading = false;
|
|
}
|
|
}
|
|
}
|
|
return bytesRead;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::InitPCMWriting(OutStream& out, uint32_t freq)
|
|
{
|
|
|
|
if(freq == 8000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 8000;
|
|
codec_info_.pacsize = 160;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 128000;
|
|
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(freq == 16000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 16000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 256000;
|
|
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(freq == 32000)
|
|
{
|
|
strcpy(codec_info_.plname, "L16");
|
|
codec_info_.pltype = -1;
|
|
codec_info_.plfreq = 32000;
|
|
codec_info_.pacsize = 320;
|
|
codec_info_.channels = 1;
|
|
codec_info_.rate = 512000;
|
|
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
if((_codecId != kCodecL16_8Khz) &&
|
|
(_codecId != kCodecL16_16kHz) &&
|
|
(_codecId != kCodecL16_32Khz))
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"CodecInst is not 8KHz PCM or 16KHz PCM!");
|
|
return -1;
|
|
}
|
|
_writing = true;
|
|
_bytesWritten = 0;
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::WritePCMData(OutStream& out,
|
|
const int8_t* buffer,
|
|
const size_t dataLength)
|
|
{
|
|
WEBRTC_TRACE(
|
|
kTraceStream,
|
|
kTraceFile,
|
|
_id,
|
|
"ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %" PRIuS
|
|
")",
|
|
&out,
|
|
buffer,
|
|
dataLength);
|
|
|
|
if(buffer == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "buffer NULL");
|
|
}
|
|
|
|
if(!out.Write(buffer, dataLength))
|
|
{
|
|
return -1;
|
|
}
|
|
|
|
_bytesWritten += dataLength;
|
|
return static_cast<int32_t>(dataLength);
|
|
}
|
|
|
|
int32_t ModuleFileUtility::codec_info(CodecInst& codecInst)
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ModuleFileUtility::codec_info(codecInst= 0x%x)", &codecInst);
|
|
|
|
if(!_reading && !_writing)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"CodecInst: not currently reading audio file!");
|
|
return -1;
|
|
}
|
|
memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::set_codec_info(const CodecInst& codecInst)
|
|
{
|
|
|
|
_codecId = kCodecNoCodec;
|
|
if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
|
|
{
|
|
_codecId = kCodecPcmu;
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
|
|
{
|
|
_codecId = kCodecPcma;
|
|
}
|
|
else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
|
|
{
|
|
if(codecInst.plfreq == 8000)
|
|
{
|
|
_codecId = kCodecL16_8Khz;
|
|
}
|
|
else if(codecInst.plfreq == 16000)
|
|
{
|
|
_codecId = kCodecL16_16kHz;
|
|
}
|
|
else if(codecInst.plfreq == 32000)
|
|
{
|
|
_codecId = kCodecL16_32Khz;
|
|
}
|
|
}
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
else if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
|
|
{
|
|
if(codecInst.pacsize == 160)
|
|
{
|
|
_codecId = kCodecIlbc20Ms;
|
|
}
|
|
else if(codecInst.pacsize == 240)
|
|
{
|
|
_codecId = kCodecIlbc30Ms;
|
|
}
|
|
}
|
|
#endif
|
|
#if(defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
|
|
else if(STR_CASE_CMP(codecInst.plname, "isac") == 0)
|
|
{
|
|
if(codecInst.plfreq == 16000)
|
|
{
|
|
_codecId = kCodecIsac;
|
|
}
|
|
else if(codecInst.plfreq == 32000)
|
|
{
|
|
_codecId = kCodecIsacSwb;
|
|
}
|
|
}
|
|
#endif
|
|
#ifdef WEBRTC_CODEC_G722
|
|
else if(STR_CASE_CMP(codecInst.plname, "G722") == 0)
|
|
{
|
|
_codecId = kCodecG722;
|
|
}
|
|
#endif
|
|
if(_codecId == kCodecNoCodec)
|
|
{
|
|
return -1;
|
|
}
|
|
memcpy(&codec_info_, &codecInst, sizeof(CodecInst));
|
|
return 0;
|
|
}
|
|
|
|
int32_t ModuleFileUtility::FileDurationMs(const char* fileName,
|
|
const FileFormats fileFormat,
|
|
const uint32_t freqInHz)
|
|
{
|
|
|
|
if(fileName == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id, "filename NULL");
|
|
return -1;
|
|
}
|
|
|
|
int32_t time_in_ms = -1;
|
|
struct stat file_size;
|
|
if(stat(fileName,&file_size) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to retrieve file size with stat!");
|
|
return -1;
|
|
}
|
|
FileWrapper* inStreamObj = FileWrapper::Create();
|
|
if(inStreamObj == NULL)
|
|
{
|
|
WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
|
|
"failed to create InStream object!");
|
|
return -1;
|
|
}
|
|
if(inStreamObj->OpenFile(fileName, true) == -1)
|
|
{
|
|
delete inStreamObj;
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to open file %s!", fileName);
|
|
return -1;
|
|
}
|
|
|
|
switch (fileFormat)
|
|
{
|
|
case kFileFormatWavFile:
|
|
{
|
|
if(ReadWavHeader(*inStreamObj) == -1)
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"failed to read WAV file header!");
|
|
return -1;
|
|
}
|
|
time_in_ms = ((file_size.st_size - 44) /
|
|
(_wavFormatObj.nAvgBytesPerSec/1000));
|
|
break;
|
|
}
|
|
case kFileFormatPcm16kHzFile:
|
|
{
|
|
// 16 samples per ms. 2 bytes per sample.
|
|
int32_t denominator = 16*2;
|
|
time_in_ms = (file_size.st_size)/denominator;
|
|
break;
|
|
}
|
|
case kFileFormatPcm8kHzFile:
|
|
{
|
|
// 8 samples per ms. 2 bytes per sample.
|
|
int32_t denominator = 8*2;
|
|
time_in_ms = (file_size.st_size)/denominator;
|
|
break;
|
|
}
|
|
case kFileFormatCompressedFile:
|
|
{
|
|
int32_t cnt = 0;
|
|
int32_t read_len = 0;
|
|
char buf[64];
|
|
do
|
|
{
|
|
read_len = inStreamObj->Read(&buf[cnt++], 1);
|
|
if(read_len != 1)
|
|
{
|
|
return -1;
|
|
}
|
|
} while ((buf[cnt-1] != '\n') && (64 > cnt));
|
|
|
|
if(cnt == 64)
|
|
{
|
|
return -1;
|
|
}
|
|
else
|
|
{
|
|
buf[cnt] = 0;
|
|
}
|
|
#ifdef WEBRTC_CODEC_ILBC
|
|
if(!strcmp("#!iLBC20\n", buf))
|
|
{
|
|
// 20 ms is 304 bits
|
|
time_in_ms = ((file_size.st_size)*160)/304;
|
|
break;
|
|
}
|
|
if(!strcmp("#!iLBC30\n", buf))
|
|
{
|
|
// 30 ms takes 400 bits.
|
|
// file size in bytes * 8 / 400 is the number of
|
|
// 30 ms frames in the file ->
|
|
// time_in_ms = file size * 8 / 400 * 30
|
|
time_in_ms = ((file_size.st_size)*240)/400;
|
|
break;
|
|
}
|
|
#endif
|
|
break;
|
|
}
|
|
case kFileFormatPreencodedFile:
|
|
{
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"cannot determine duration of Pre-Encoded file!");
|
|
break;
|
|
}
|
|
default:
|
|
WEBRTC_TRACE(kTraceError, kTraceFile, _id,
|
|
"unsupported file format %d!", fileFormat);
|
|
break;
|
|
}
|
|
inStreamObj->CloseFile();
|
|
delete inStreamObj;
|
|
return time_in_ms;
|
|
}
|
|
|
|
uint32_t ModuleFileUtility::PlayoutPositionMs()
|
|
{
|
|
WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
|
|
"ModuleFileUtility::PlayoutPosition()");
|
|
|
|
if(_reading)
|
|
{
|
|
return _playoutPositionMs;
|
|
}
|
|
else
|
|
{
|
|
return 0;
|
|
}
|
|
}
|
|
} // namespace webrtc
|