Use clang-format -style=chromium to correct the format in webrtc/modules/interface/module_common_types.h

R=andrew@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/2979004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@5036 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
xians@webrtc.org
2013-10-25 18:15:09 +00:00
parent e4e5683b41
commit c94abd313e

View File

@ -21,20 +21,18 @@
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"
#ifdef _WIN32 #ifdef _WIN32
#pragma warning(disable:4351) // remove warning "new behavior: elements of array // Remove warning "new behavior: elements of array will be default initialized".
// 'array' will be default initialized" #pragma warning(disable : 4351)
#endif #endif
namespace webrtc { namespace webrtc {
struct RTPHeaderExtension struct RTPHeaderExtension {
{
int32_t transmissionTimeOffset; int32_t transmissionTimeOffset;
uint32_t absoluteSendTime; uint32_t absoluteSendTime;
}; };
struct RTPHeader struct RTPHeader {
{
bool markerBit; bool markerBit;
uint8_t payloadType; uint8_t payloadType;
uint16_t sequenceNumber; uint16_t sequenceNumber;
@ -48,24 +46,31 @@ struct RTPHeader
RTPHeaderExtension extension; RTPHeaderExtension extension;
}; };
struct RTPAudioHeader struct RTPAudioHeader {
{
uint8_t numEnergy; // number of valid entries in arrOfEnergy uint8_t numEnergy; // number of valid entries in arrOfEnergy
uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel uint8_t arrOfEnergy[kRtpCsrcSize]; // one energy byte (0-9) per channel
bool isCNG; // is this CNG bool isCNG; // is this CNG
uint8_t channel; // number of channels 2 = stereo uint8_t channel; // number of channels 2 = stereo
}; };
enum {kNoPictureId = -1}; enum {
enum {kNoTl0PicIdx = -1}; kNoPictureId = -1
enum {kNoTemporalIdx = -1}; };
enum {kNoKeyIdx = -1}; enum {
enum {kNoSimulcastIdx = 0}; kNoTl0PicIdx = -1
};
enum {
kNoTemporalIdx = -1
};
enum {
kNoKeyIdx = -1
};
enum {
kNoSimulcastIdx = 0
};
struct RTPVideoHeaderVP8 struct RTPVideoHeaderVP8 {
{ void InitRTPVideoHeaderVP8() {
void InitRTPVideoHeaderVP8()
{
nonReference = false; nonReference = false;
pictureId = kNoPictureId; pictureId = kNoPictureId;
tl0PicIdx = kNoTl0PicIdx; tl0PicIdx = kNoTl0PicIdx;
@ -89,19 +94,16 @@ struct RTPVideoHeaderVP8
bool beginningOfPartition; // True if this packet is the first bool beginningOfPartition; // True if this packet is the first
// in a VP8 partition. Otherwise false // in a VP8 partition. Otherwise false
}; };
union RTPVideoTypeHeader union RTPVideoTypeHeader {
{
RTPVideoHeaderVP8 VP8; RTPVideoHeaderVP8 VP8;
}; };
enum RtpVideoCodecTypes enum RtpVideoCodecTypes {
{
kRtpVideoNone, kRtpVideoNone,
kRtpVideoGeneric, kRtpVideoGeneric,
kRtpVideoVp8 kRtpVideoVp8
}; };
struct RTPVideoHeader struct RTPVideoHeader {
{
uint16_t width; // size uint16_t width; // size
uint16_t height; uint16_t height;
@ -111,76 +113,63 @@ struct RTPVideoHeader
RtpVideoCodecTypes codec; RtpVideoCodecTypes codec;
RTPVideoTypeHeader codecHeader; RTPVideoTypeHeader codecHeader;
}; };
union RTPTypeHeader union RTPTypeHeader {
{
RTPAudioHeader Audio; RTPAudioHeader Audio;
RTPVideoHeader Video; RTPVideoHeader Video;
}; };
struct WebRtcRTPHeader struct WebRtcRTPHeader {
{
RTPHeader header; RTPHeader header;
FrameType frameType; FrameType frameType;
RTPTypeHeader type; RTPTypeHeader type;
}; };
class RTPFragmentationHeader class RTPFragmentationHeader {
{ public:
public: RTPFragmentationHeader()
RTPFragmentationHeader() : : fragmentationVectorSize(0),
fragmentationVectorSize(0),
fragmentationOffset(NULL), fragmentationOffset(NULL),
fragmentationLength(NULL), fragmentationLength(NULL),
fragmentationTimeDiff(NULL), fragmentationTimeDiff(NULL),
fragmentationPlType(NULL) fragmentationPlType(NULL) {};
{};
~RTPFragmentationHeader() ~RTPFragmentationHeader() {
{ delete[] fragmentationOffset;
delete [] fragmentationOffset; delete[] fragmentationLength;
delete [] fragmentationLength; delete[] fragmentationTimeDiff;
delete [] fragmentationTimeDiff; delete[] fragmentationPlType;
delete [] fragmentationPlType;
} }
void CopyFrom(const RTPFragmentationHeader& src) void CopyFrom(const RTPFragmentationHeader& src) {
{ if (this == &src) {
if(this == &src)
{
return; return;
} }
if(src.fragmentationVectorSize != fragmentationVectorSize) if (src.fragmentationVectorSize != fragmentationVectorSize) {
{
// new size of vectors // new size of vectors
// delete old // delete old
delete [] fragmentationOffset; delete[] fragmentationOffset;
fragmentationOffset = NULL; fragmentationOffset = NULL;
delete [] fragmentationLength; delete[] fragmentationLength;
fragmentationLength = NULL; fragmentationLength = NULL;
delete [] fragmentationTimeDiff; delete[] fragmentationTimeDiff;
fragmentationTimeDiff = NULL; fragmentationTimeDiff = NULL;
delete [] fragmentationPlType; delete[] fragmentationPlType;
fragmentationPlType = NULL; fragmentationPlType = NULL;
if(src.fragmentationVectorSize > 0) if (src.fragmentationVectorSize > 0) {
{
// allocate new // allocate new
if(src.fragmentationOffset) if (src.fragmentationOffset) {
{
fragmentationOffset = new uint32_t[src.fragmentationVectorSize]; fragmentationOffset = new uint32_t[src.fragmentationVectorSize];
} }
if(src.fragmentationLength) if (src.fragmentationLength) {
{
fragmentationLength = new uint32_t[src.fragmentationVectorSize]; fragmentationLength = new uint32_t[src.fragmentationVectorSize];
} }
if(src.fragmentationTimeDiff) if (src.fragmentationTimeDiff) {
{
fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize]; fragmentationTimeDiff = new uint16_t[src.fragmentationVectorSize];
} }
if(src.fragmentationPlType) if (src.fragmentationPlType) {
{
fragmentationPlType = new uint8_t[src.fragmentationVectorSize]; fragmentationPlType = new uint8_t[src.fragmentationVectorSize];
} }
} }
@ -188,53 +177,47 @@ public:
fragmentationVectorSize = src.fragmentationVectorSize; fragmentationVectorSize = src.fragmentationVectorSize;
} }
if(src.fragmentationVectorSize > 0) if (src.fragmentationVectorSize > 0) {
{
// copy values // copy values
if(src.fragmentationOffset) if (src.fragmentationOffset) {
{
memcpy(fragmentationOffset, src.fragmentationOffset, memcpy(fragmentationOffset, src.fragmentationOffset,
src.fragmentationVectorSize * sizeof(uint32_t)); src.fragmentationVectorSize * sizeof(uint32_t));
} }
if(src.fragmentationLength) if (src.fragmentationLength) {
{
memcpy(fragmentationLength, src.fragmentationLength, memcpy(fragmentationLength, src.fragmentationLength,
src.fragmentationVectorSize * sizeof(uint32_t)); src.fragmentationVectorSize * sizeof(uint32_t));
} }
if(src.fragmentationTimeDiff) if (src.fragmentationTimeDiff) {
{
memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff, memcpy(fragmentationTimeDiff, src.fragmentationTimeDiff,
src.fragmentationVectorSize * sizeof(uint16_t)); src.fragmentationVectorSize * sizeof(uint16_t));
} }
if(src.fragmentationPlType) if (src.fragmentationPlType) {
{
memcpy(fragmentationPlType, src.fragmentationPlType, memcpy(fragmentationPlType, src.fragmentationPlType,
src.fragmentationVectorSize * sizeof(uint8_t)); src.fragmentationVectorSize * sizeof(uint8_t));
} }
} }
} }
void VerifyAndAllocateFragmentationHeader(const uint16_t size) void VerifyAndAllocateFragmentationHeader(const uint16_t size) {
{ if (fragmentationVectorSize < size) {
if(fragmentationVectorSize < size)
{
uint16_t oldVectorSize = fragmentationVectorSize; uint16_t oldVectorSize = fragmentationVectorSize;
{ {
// offset // offset
uint32_t* oldOffsets = fragmentationOffset; uint32_t* oldOffsets = fragmentationOffset;
fragmentationOffset = new uint32_t[size]; fragmentationOffset = new uint32_t[size];
memset(fragmentationOffset+oldVectorSize, 0, memset(fragmentationOffset + oldVectorSize, 0,
sizeof(uint32_t)*(size-oldVectorSize)); sizeof(uint32_t) * (size - oldVectorSize));
// copy old values // copy old values
memcpy(fragmentationOffset,oldOffsets, sizeof(uint32_t) * oldVectorSize); memcpy(fragmentationOffset, oldOffsets,
sizeof(uint32_t) * oldVectorSize);
delete[] oldOffsets; delete[] oldOffsets;
} }
// length // length
{ {
uint32_t* oldLengths = fragmentationLength; uint32_t* oldLengths = fragmentationLength;
fragmentationLength = new uint32_t[size]; fragmentationLength = new uint32_t[size];
memset(fragmentationLength+oldVectorSize, 0, memset(fragmentationLength + oldVectorSize, 0,
sizeof(uint32_t) * (size- oldVectorSize)); sizeof(uint32_t) * (size - oldVectorSize));
memcpy(fragmentationLength, oldLengths, memcpy(fragmentationLength, oldLengths,
sizeof(uint32_t) * oldVectorSize); sizeof(uint32_t) * oldVectorSize);
delete[] oldLengths; delete[] oldLengths;
@ -243,8 +226,8 @@ public:
{ {
uint16_t* oldTimeDiffs = fragmentationTimeDiff; uint16_t* oldTimeDiffs = fragmentationTimeDiff;
fragmentationTimeDiff = new uint16_t[size]; fragmentationTimeDiff = new uint16_t[size];
memset(fragmentationTimeDiff+oldVectorSize, 0, memset(fragmentationTimeDiff + oldVectorSize, 0,
sizeof(uint16_t) * (size- oldVectorSize)); sizeof(uint16_t) * (size - oldVectorSize));
memcpy(fragmentationTimeDiff, oldTimeDiffs, memcpy(fragmentationTimeDiff, oldTimeDiffs,
sizeof(uint16_t) * oldVectorSize); sizeof(uint16_t) * oldVectorSize);
delete[] oldTimeDiffs; delete[] oldTimeDiffs;
@ -253,8 +236,8 @@ public:
{ {
uint8_t* oldTimePlTypes = fragmentationPlType; uint8_t* oldTimePlTypes = fragmentationPlType;
fragmentationPlType = new uint8_t[size]; fragmentationPlType = new uint8_t[size];
memset(fragmentationPlType+oldVectorSize, 0, memset(fragmentationPlType + oldVectorSize, 0,
sizeof(uint8_t) * (size- oldVectorSize)); sizeof(uint8_t) * (size - oldVectorSize));
memcpy(fragmentationPlType, oldTimePlTypes, memcpy(fragmentationPlType, oldTimePlTypes,
sizeof(uint8_t) * oldVectorSize); sizeof(uint8_t) * oldVectorSize);
delete[] oldTimePlTypes; delete[] oldTimePlTypes;
@ -270,12 +253,11 @@ public:
// each fragmentation // each fragmentation
uint8_t* fragmentationPlType; // Payload type of each fragmentation uint8_t* fragmentationPlType; // Payload type of each fragmentation
private: private:
DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader); DISALLOW_COPY_AND_ASSIGN(RTPFragmentationHeader);
}; };
struct RTCPVoIPMetric struct RTCPVoIPMetric {
{
// RFC 3611 4.7 // RFC 3611 4.7
uint8_t lossRate; uint8_t lossRate;
uint8_t discardRate; uint8_t discardRate;
@ -327,11 +309,10 @@ class CallStatsObserver {
}; };
// class describing a complete, or parts of an encoded frame. // class describing a complete, or parts of an encoded frame.
class EncodedVideoData class EncodedVideoData {
{ public:
public: EncodedVideoData()
EncodedVideoData() : : payloadType(0),
payloadType(0),
timeStamp(0), timeStamp(0),
renderTimeMs(0), renderTimeMs(0),
encodedWidth(0), encodedWidth(0),
@ -343,11 +324,9 @@ public:
bufferSize(0), bufferSize(0),
fragmentationHeader(), fragmentationHeader(),
frameType(kVideoFrameDelta), frameType(kVideoFrameDelta),
codec(kVideoCodecUnknown) codec(kVideoCodecUnknown) {};
{};
EncodedVideoData(const EncodedVideoData& data) EncodedVideoData(const EncodedVideoData& data) {
{
payloadType = data.payloadType; payloadType = data.payloadType;
timeStamp = data.timeStamp; timeStamp = data.timeStamp;
renderTimeMs = data.renderTimeMs; renderTimeMs = data.renderTimeMs;
@ -359,27 +338,20 @@ public:
fragmentationHeader.CopyFrom(data.fragmentationHeader); fragmentationHeader.CopyFrom(data.fragmentationHeader);
frameType = data.frameType; frameType = data.frameType;
codec = data.codec; codec = data.codec;
if (data.payloadSize > 0) if (data.payloadSize > 0) {
{
payloadData = new uint8_t[data.payloadSize]; payloadData = new uint8_t[data.payloadSize];
memcpy(payloadData, data.payloadData, data.payloadSize); memcpy(payloadData, data.payloadData, data.payloadSize);
} } else {
else
{
payloadData = NULL; payloadData = NULL;
} }
} }
~EncodedVideoData() {
~EncodedVideoData() delete[] payloadData;
{
delete [] payloadData;
}; };
EncodedVideoData& operator=(const EncodedVideoData& data) EncodedVideoData& operator=(const EncodedVideoData& data) {
{ if (this == &data) {
if (this == &data)
{
return *this; return *this;
} }
payloadType = data.payloadType; payloadType = data.payloadType;
@ -393,19 +365,16 @@ public:
fragmentationHeader.CopyFrom(data.fragmentationHeader); fragmentationHeader.CopyFrom(data.fragmentationHeader);
frameType = data.frameType; frameType = data.frameType;
codec = data.codec; codec = data.codec;
if (data.payloadSize > 0) if (data.payloadSize > 0) {
{ delete[] payloadData;
delete [] payloadData;
payloadData = new uint8_t[data.payloadSize]; payloadData = new uint8_t[data.payloadSize];
memcpy(payloadData, data.payloadData, data.payloadSize); memcpy(payloadData, data.payloadData, data.payloadSize);
bufferSize = data.payloadSize; bufferSize = data.payloadSize;
} }
return *this; return *this;
}; };
void VerifyAndAllocate( const uint32_t size) void VerifyAndAllocate(const uint32_t size) {
{ if (bufferSize < size) {
if (bufferSize < size)
{
uint8_t* oldPayload = payloadData; uint8_t* oldPayload = payloadData;
payloadData = new uint8_t[size]; payloadData = new uint8_t[size];
memcpy(payloadData, oldPayload, sizeof(uint8_t) * payloadSize); memcpy(payloadData, oldPayload, sizeof(uint8_t) * payloadSize);
@ -435,8 +404,7 @@ struct VideoContentMetrics {
: motion_magnitude(0.0f), : motion_magnitude(0.0f),
spatial_pred_err(0.0f), spatial_pred_err(0.0f),
spatial_pred_err_h(0.0f), spatial_pred_err_h(0.0f),
spatial_pred_err_v(0.0f) { spatial_pred_err_v(0.0f) {}
}
void Reset() { void Reset() {
motion_magnitude = 0.0f; motion_magnitude = 0.0f;
@ -459,40 +427,42 @@ struct VideoContentMetrics {
* *
* *
*************************************************/ *************************************************/
class VideoFrame class VideoFrame {
{ public:
public:
VideoFrame(); VideoFrame();
~VideoFrame(); ~VideoFrame();
/** /**
* Verifies that current allocated buffer size is larger than or equal to the input size. * Verifies that current allocated buffer size is larger than or equal to the
* If the current buffer size is smaller, a new allocation is made and the old buffer data * input size.
* If the current buffer size is smaller, a new allocation is made and the old
* buffer data
* is copied to the new buffer. * is copied to the new buffer.
* Buffer size is updated to minimumSize. * Buffer size is updated to minimumSize.
*/ */
int32_t VerifyAndAllocate(const uint32_t minimumSize); int32_t VerifyAndAllocate(const uint32_t minimumSize);
/** /**
* Update length of data buffer in frame. Function verifies that new length is less or * Update length of data buffer in frame. Function verifies that new length
* is less or
* equal to allocated size. * equal to allocated size.
*/ */
int32_t SetLength(const uint32_t newLength); int32_t SetLength(const uint32_t newLength);
/* /*
* Swap buffer and size data * Swap buffer and size data
*/ */
int32_t Swap(uint8_t*& newMemory, int32_t Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize);
uint32_t& newLength,
uint32_t& newSize);
/* /*
* Swap buffer and size data * Swap buffer and size data
*/ */
int32_t SwapFrame(VideoFrame& videoFrame); int32_t SwapFrame(VideoFrame& videoFrame);
/** /**
* Copy buffer: If newLength is bigger than allocated size, a new buffer of size length * Copy buffer: If newLength is bigger than allocated size, a new buffer of
* size length
* is allocated. * is allocated.
*/ */
int32_t CopyFrame(const VideoFrame& videoFrame); int32_t CopyFrame(const VideoFrame& videoFrame);
/** /**
* Copy buffer: If newLength is bigger than allocated size, a new buffer of size length * Copy buffer: If newLength is bigger than allocated size, a new buffer of
* size length
* is allocated. * is allocated.
*/ */
int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer); int32_t CopyFrame(uint32_t length, const uint8_t* sourceBuffer);
@ -503,56 +473,55 @@ public:
/** /**
* Set frame timestamp (90kHz) * Set frame timestamp (90kHz)
*/ */
void SetTimeStamp(const uint32_t timeStamp) {_timeStamp = timeStamp;} void SetTimeStamp(const uint32_t timeStamp) { _timeStamp = timeStamp; }
/** /**
* Get pointer to frame buffer * Get pointer to frame buffer
*/ */
uint8_t* Buffer() const {return _buffer;} uint8_t* Buffer() const { return _buffer; }
uint8_t*& Buffer() {return _buffer;} uint8_t*& Buffer() { return _buffer; }
/** /**
* Get allocated buffer size * Get allocated buffer size
*/ */
uint32_t Size() const {return _bufferSize;} uint32_t Size() const { return _bufferSize; }
/** /**
* Get frame length * Get frame length
*/ */
uint32_t Length() const {return _bufferLength;} uint32_t Length() const { return _bufferLength; }
/** /**
* Get frame timestamp (90kHz) * Get frame timestamp (90kHz)
*/ */
uint32_t TimeStamp() const {return _timeStamp;} uint32_t TimeStamp() const { return _timeStamp; }
/** /**
* Get frame width * Get frame width
*/ */
uint32_t Width() const {return _width;} uint32_t Width() const { return _width; }
/** /**
* Get frame height * Get frame height
*/ */
uint32_t Height() const {return _height;} uint32_t Height() const { return _height; }
/** /**
* Set frame width * Set frame width
*/ */
void SetWidth(const uint32_t width) {_width = width;} void SetWidth(const uint32_t width) { _width = width; }
/** /**
* Set frame height * Set frame height
*/ */
void SetHeight(const uint32_t height) {_height = height;} void SetHeight(const uint32_t height) { _height = height; }
/** /**
* Set render time in miliseconds * Set render time in miliseconds
*/ */
void SetRenderTime(const int64_t renderTimeMs) {_renderTimeMs = renderTimeMs;} void SetRenderTime(const int64_t renderTimeMs) {
_renderTimeMs = renderTimeMs;
}
/** /**
* Get render time in miliseconds * Get render time in miliseconds
*/ */
int64_t RenderTimeMs() const {return _renderTimeMs;} int64_t RenderTimeMs() const { return _renderTimeMs; }
private: private:
void Set(uint8_t* buffer, void Set(uint8_t* buffer, uint32_t size, uint32_t length, uint32_t timeStamp);
uint32_t size,
uint32_t length,
uint32_t timeStamp);
uint8_t* _buffer; // Pointer to frame buffer uint8_t* _buffer; // Pointer to frame buffer
uint32_t _bufferSize; // Allocated buffer size uint32_t _bufferSize; // Allocated buffer size
@ -564,49 +533,35 @@ private:
}; // end of VideoFrame class declaration }; // end of VideoFrame class declaration
// inline implementation of VideoFrame class: // inline implementation of VideoFrame class:
inline inline VideoFrame::VideoFrame()
VideoFrame::VideoFrame(): : _buffer(0),
_buffer(0),
_bufferSize(0), _bufferSize(0),
_bufferLength(0), _bufferLength(0),
_timeStamp(0), _timeStamp(0),
_width(0), _width(0),
_height(0), _height(0),
_renderTimeMs(0) _renderTimeMs(0) {
{
// //
} }
inline inline VideoFrame::~VideoFrame() {
VideoFrame::~VideoFrame() if (_buffer) {
{ delete[] _buffer;
if(_buffer)
{
delete [] _buffer;
_buffer = NULL; _buffer = NULL;
} }
} }
inline int32_t VideoFrame::VerifyAndAllocate(const uint32_t minimumSize) {
inline if (minimumSize < 1) {
int32_t
VideoFrame::VerifyAndAllocate(const uint32_t minimumSize)
{
if (minimumSize < 1)
{
return -1; return -1;
} }
if(minimumSize > _bufferSize) if (minimumSize > _bufferSize) {
{
// create buffer of sufficient size // create buffer of sufficient size
uint8_t* newBufferBuffer = new uint8_t[minimumSize]; uint8_t* newBufferBuffer = new uint8_t[minimumSize];
if(_buffer) if (_buffer) {
{
// copy old data // copy old data
memcpy(newBufferBuffer, _buffer, _bufferSize); memcpy(newBufferBuffer, _buffer, _bufferSize);
delete [] _buffer; delete[] _buffer;
} } else {
else
{
memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t)); memset(newBufferBuffer, 0, minimumSize * sizeof(uint8_t));
} }
_buffer = newBufferBuffer; _buffer = newBufferBuffer;
@ -615,22 +570,15 @@ VideoFrame::VerifyAndAllocate(const uint32_t minimumSize)
return 0; return 0;
} }
inline inline int32_t VideoFrame::SetLength(const uint32_t newLength) {
int32_t if (newLength > _bufferSize) { // can't accomodate new value
VideoFrame::SetLength(const uint32_t newLength)
{
if (newLength >_bufferSize )
{ // can't accomodate new value
return -1; return -1;
} }
_bufferLength = newLength; _bufferLength = newLength;
return 0; return 0;
} }
inline inline int32_t VideoFrame::SwapFrame(VideoFrame& videoFrame) {
int32_t
VideoFrame::SwapFrame(VideoFrame& videoFrame)
{
uint32_t tmpTimeStamp = _timeStamp; uint32_t tmpTimeStamp = _timeStamp;
uint32_t tmpWidth = _width; uint32_t tmpWidth = _width;
uint32_t tmpHeight = _height; uint32_t tmpHeight = _height;
@ -646,13 +594,12 @@ VideoFrame::SwapFrame(VideoFrame& videoFrame)
videoFrame._height = tmpHeight; videoFrame._height = tmpHeight;
videoFrame._renderTimeMs = tmpRenderTime; videoFrame._renderTimeMs = tmpRenderTime;
return Swap(videoFrame._buffer, videoFrame._bufferLength, videoFrame._bufferSize); return Swap(videoFrame._buffer, videoFrame._bufferLength,
videoFrame._bufferSize);
} }
inline inline int32_t VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength,
int32_t uint32_t& newSize) {
VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize)
{
uint8_t* tmpBuffer = _buffer; uint8_t* tmpBuffer = _buffer;
uint32_t tmpLength = _bufferLength; uint32_t tmpLength = _bufferLength;
uint32_t tmpSize = _bufferSize; uint32_t tmpSize = _bufferSize;
@ -665,15 +612,11 @@ VideoFrame::Swap(uint8_t*& newMemory, uint32_t& newLength, uint32_t& newSize)
return 0; return 0;
} }
inline inline int32_t VideoFrame::CopyFrame(uint32_t length,
int32_t const uint8_t* sourceBuffer) {
VideoFrame::CopyFrame(uint32_t length, const uint8_t* sourceBuffer) if (length > _bufferSize) {
{
if (length > _bufferSize)
{
int32_t ret = VerifyAndAllocate(length); int32_t ret = VerifyAndAllocate(length);
if (ret < 0) if (ret < 0) {
{
return ret; return ret;
} }
} }
@ -682,12 +625,8 @@ VideoFrame::CopyFrame(uint32_t length, const uint8_t* sourceBuffer)
return 0; return 0;
} }
inline inline int32_t VideoFrame::CopyFrame(const VideoFrame& videoFrame) {
int32_t if (CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0) {
VideoFrame::CopyFrame(const VideoFrame& videoFrame)
{
if(CopyFrame(videoFrame.Length(), videoFrame.Buffer()) != 0)
{
return -1; return -1;
} }
_timeStamp = videoFrame._timeStamp; _timeStamp = videoFrame._timeStamp;
@ -697,10 +636,7 @@ VideoFrame::CopyFrame(const VideoFrame& videoFrame)
return 0; return 0;
} }
inline inline void VideoFrame::Free() {
void
VideoFrame::Free()
{
_timeStamp = 0; _timeStamp = 0;
_bufferLength = 0; _bufferLength = 0;
_bufferSize = 0; _bufferSize = 0;
@ -708,14 +644,12 @@ VideoFrame::Free()
_width = 0; _width = 0;
_renderTimeMs = 0; _renderTimeMs = 0;
if(_buffer) if (_buffer) {
{ delete[] _buffer;
delete [] _buffer;
_buffer = NULL; _buffer = NULL;
} }
} }
/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It /* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
* allows for adding and subtracting frames while keeping track of the resulting * allows for adding and subtracting frames while keeping track of the resulting
* states. * states.
@ -730,7 +664,7 @@ VideoFrame::Free()
* deciding the resulting state. To do this use the -operator. * deciding the resulting state. To do this use the -operator.
*/ */
class AudioFrame { class AudioFrame {
public: public:
// Stereo, 32 kHz, 60 ms (2 * 32 * 60) // Stereo, 32 kHz, 60 ms (2 * 32 * 60)
static const int kMaxDataSizeSamples = 3840; static const int kMaxDataSizeSamples = 3840;
@ -750,17 +684,11 @@ public:
AudioFrame(); AudioFrame();
virtual ~AudioFrame() {} virtual ~AudioFrame() {}
// |Interleaved_| is assumed to be unchanged with this UpdateFrame() method. // |interleaved_| is not changed by this method.
void UpdateFrame( void UpdateFrame(int id, uint32_t timestamp, const int16_t* data,
int id, int samples_per_channel, int sample_rate_hz,
uint32_t timestamp, SpeechType speech_type, VADActivity vad_activity,
const int16_t* data, int num_channels = 1, uint32_t energy = -1);
int samples_per_channel,
int sample_rate_hz,
SpeechType speech_type,
VADActivity vad_activity,
int num_channels = 1,
uint32_t energy = -1);
AudioFrame& Append(const AudioFrame& rhs); AudioFrame& Append(const AudioFrame& rhs);
@ -783,12 +711,11 @@ public:
uint32_t energy_; uint32_t energy_;
bool interleaved_; bool interleaved_;
private: private:
DISALLOW_COPY_AND_ASSIGN(AudioFrame); DISALLOW_COPY_AND_ASSIGN(AudioFrame);
}; };
inline inline AudioFrame::AudioFrame()
AudioFrame::AudioFrame()
: id_(-1), : id_(-1),
timestamp_(0), timestamp_(0),
data_(), data_(),
@ -800,16 +727,11 @@ AudioFrame::AudioFrame()
energy_(0xffffffff), energy_(0xffffffff),
interleaved_(true) {} interleaved_(true) {}
inline inline void AudioFrame::UpdateFrame(int id, uint32_t timestamp,
void AudioFrame::UpdateFrame(
int id,
uint32_t timestamp,
const int16_t* data, const int16_t* data,
int samples_per_channel, int samples_per_channel, int sample_rate_hz,
int sample_rate_hz,
SpeechType speech_type, SpeechType speech_type,
VADActivity vad_activity, VADActivity vad_activity, int num_channels,
int num_channels,
uint32_t energy) { uint32_t energy) {
id_ = id; id_ = id;
timestamp_ = timestamp; timestamp_ = timestamp;
@ -822,7 +744,7 @@ void AudioFrame::UpdateFrame(
const int length = samples_per_channel * num_channels; const int length = samples_per_channel * num_channels;
assert(length <= kMaxDataSizeSamples && length >= 0); assert(length <= kMaxDataSizeSamples && length >= 0);
if(data != NULL) { if (data != NULL) {
memcpy(data_, data, sizeof(int16_t) * length); memcpy(data_, data, sizeof(int16_t) * length);
} else { } else {
memset(data_, 0, sizeof(int16_t) * length); memset(data_, 0, sizeof(int16_t) * length);
@ -830,8 +752,7 @@ void AudioFrame::UpdateFrame(
} }
inline void AudioFrame::CopyFrom(const AudioFrame& src) { inline void AudioFrame::CopyFrom(const AudioFrame& src) {
if(this == &src) if (this == &src) return;
return;
id_ = src.id_; id_ = src.id_;
timestamp_ = src.timestamp_; timestamp_ = src.timestamp_;
@ -848,63 +769,54 @@ inline void AudioFrame::CopyFrom(const AudioFrame& src) {
memcpy(data_, src.data_, sizeof(int16_t) * length); memcpy(data_, src.data_, sizeof(int16_t) * length);
} }
inline inline void AudioFrame::Mute() {
void AudioFrame::Mute() {
memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t)); memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t));
} }
inline inline AudioFrame& AudioFrame::operator>>=(const int rhs) {
AudioFrame& AudioFrame::operator>>=(const int rhs) {
assert((num_channels_ > 0) && (num_channels_ < 3)); assert((num_channels_ > 0) && (num_channels_ < 3));
if((num_channels_ > 2) || (num_channels_ < 1)) if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
return *this;
for(int i = 0; i < samples_per_channel_ * num_channels_; i++) { for (int i = 0; i < samples_per_channel_ * num_channels_; i++) {
data_[i] = static_cast<int16_t>(data_[i] >> rhs); data_[i] = static_cast<int16_t>(data_[i] >> rhs);
} }
return *this; return *this;
} }
inline inline AudioFrame& AudioFrame::Append(const AudioFrame& rhs) {
AudioFrame& AudioFrame::Append(const AudioFrame& rhs) {
// Sanity check // Sanity check
assert((num_channels_ > 0) && (num_channels_ < 3)); assert((num_channels_ > 0) && (num_channels_ < 3));
assert(interleaved_ == rhs.interleaved_); assert(interleaved_ == rhs.interleaved_);
if((num_channels_ > 2) || (num_channels_ < 1)) if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
return *this; if (num_channels_ != rhs.num_channels_) return *this;
if(num_channels_ != rhs.num_channels_)
return *this;
if((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) {
vad_activity_ = kVadActive; vad_activity_ = kVadActive;
} else if(vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) {
vad_activity_ = kVadUnknown; vad_activity_ = kVadUnknown;
} }
if(speech_type_ != rhs.speech_type_) { if (speech_type_ != rhs.speech_type_) {
speech_type_ = kUndefined; speech_type_ = kUndefined;
} }
int offset = samples_per_channel_ * num_channels_; int offset = samples_per_channel_ * num_channels_;
for(int i = 0; i < rhs.samples_per_channel_ * rhs.num_channels_; i++) { for (int i = 0; i < rhs.samples_per_channel_ * rhs.num_channels_; i++) {
data_[offset+i] = rhs.data_[i]; data_[offset + i] = rhs.data_[i];
} }
samples_per_channel_ += rhs.samples_per_channel_; samples_per_channel_ += rhs.samples_per_channel_;
return *this; return *this;
} }
inline inline AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) {
AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) {
// Sanity check // Sanity check
assert((num_channels_ > 0) && (num_channels_ < 3)); assert((num_channels_ > 0) && (num_channels_ < 3));
assert(interleaved_ == rhs.interleaved_); assert(interleaved_ == rhs.interleaved_);
if((num_channels_ > 2) || (num_channels_ < 1)) if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
return *this; if (num_channels_ != rhs.num_channels_) return *this;
if(num_channels_ != rhs.num_channels_)
return *this;
bool noPrevData = false; bool noPrevData = false;
if(samples_per_channel_ != rhs.samples_per_channel_) { if (samples_per_channel_ != rhs.samples_per_channel_) {
if(samples_per_channel_ == 0) { if (samples_per_channel_ == 0) {
// special case we have no data to start with // special case we have no data to start with
samples_per_channel_ = rhs.samples_per_channel_; samples_per_channel_ = rhs.samples_per_channel_;
noPrevData = true; noPrevData = true;
@ -913,26 +825,25 @@ AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) {
} }
} }
if((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) { if ((vad_activity_ == kVadActive) || rhs.vad_activity_ == kVadActive) {
vad_activity_ = kVadActive; vad_activity_ = kVadActive;
} else if(vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) { } else if (vad_activity_ == kVadUnknown || rhs.vad_activity_ == kVadUnknown) {
vad_activity_ = kVadUnknown; vad_activity_ = kVadUnknown;
} }
if(speech_type_ != rhs.speech_type_) if (speech_type_ != rhs.speech_type_) speech_type_ = kUndefined;
speech_type_ = kUndefined;
if(noPrevData) { if (noPrevData) {
memcpy(data_, rhs.data_, memcpy(data_, rhs.data_,
sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_); sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
} else { } else {
// IMPROVEMENT this can be done very fast in assembly // IMPROVEMENT this can be done very fast in assembly
for(int i = 0; i < samples_per_channel_ * num_channels_; i++) { for (int i = 0; i < samples_per_channel_ * num_channels_; i++) {
int32_t wrapGuard = static_cast<int32_t>(data_[i]) + int32_t wrapGuard =
static_cast<int32_t>(rhs.data_[i]); static_cast<int32_t>(data_[i]) + static_cast<int32_t>(rhs.data_[i]);
if(wrapGuard < -32768) { if (wrapGuard < -32768) {
data_[i] = -32768; data_[i] = -32768;
} else if(wrapGuard > 32767) { } else if (wrapGuard > 32767) {
data_[i] = 32767; data_[i] = 32767;
} else { } else {
data_[i] = (int16_t)wrapGuard; data_[i] = (int16_t)wrapGuard;
@ -943,29 +854,27 @@ AudioFrame& AudioFrame::operator+=(const AudioFrame& rhs) {
return *this; return *this;
} }
inline inline AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) {
AudioFrame& AudioFrame::operator-=(const AudioFrame& rhs) {
// Sanity check // Sanity check
assert((num_channels_ > 0) && (num_channels_ < 3)); assert((num_channels_ > 0) && (num_channels_ < 3));
assert(interleaved_ == rhs.interleaved_); assert(interleaved_ == rhs.interleaved_);
if((num_channels_ > 2)|| (num_channels_ < 1)) if ((num_channels_ > 2) || (num_channels_ < 1)) return *this;
return *this;
if((samples_per_channel_ != rhs.samples_per_channel_) || if ((samples_per_channel_ != rhs.samples_per_channel_) ||
(num_channels_ != rhs.num_channels_)) { (num_channels_ != rhs.num_channels_)) {
return *this; return *this;
} }
if((vad_activity_ != kVadPassive) || rhs.vad_activity_ != kVadPassive) { if ((vad_activity_ != kVadPassive) || rhs.vad_activity_ != kVadPassive) {
vad_activity_ = kVadUnknown; vad_activity_ = kVadUnknown;
} }
speech_type_ = kUndefined; speech_type_ = kUndefined;
for(int i = 0; i < samples_per_channel_ * num_channels_; i++) { for (int i = 0; i < samples_per_channel_ * num_channels_; i++) {
int32_t wrapGuard = static_cast<int32_t>(data_[i]) - int32_t wrapGuard =
static_cast<int32_t>(rhs.data_[i]); static_cast<int32_t>(data_[i]) - static_cast<int32_t>(rhs.data_[i]);
if(wrapGuard < -32768) { if (wrapGuard < -32768) {
data_[i] = -32768; data_[i] = -32768;
} else if(wrapGuard > 32767) { } else if (wrapGuard > 32767) {
data_[i] = 32767; data_[i] = 32767;
} else { } else {
data_[i] = (int16_t)wrapGuard; data_[i] = (int16_t)wrapGuard;
@ -988,13 +897,13 @@ inline bool IsNewerTimestamp(uint32_t timestamp, uint32_t prev_timestamp) {
inline uint16_t LatestSequenceNumber(uint16_t sequence_number1, inline uint16_t LatestSequenceNumber(uint16_t sequence_number1,
uint16_t sequence_number2) { uint16_t sequence_number2) {
return IsNewerSequenceNumber(sequence_number1, sequence_number2) ? return IsNewerSequenceNumber(sequence_number1, sequence_number2)
sequence_number1 : sequence_number2; ? sequence_number1
: sequence_number2;
} }
inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2) { inline uint32_t LatestTimestamp(uint32_t timestamp1, uint32_t timestamp2) {
return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : return IsNewerTimestamp(timestamp1, timestamp2) ? timestamp1 : timestamp2;
timestamp2;
} }
} // namespace webrtc } // namespace webrtc