Switching to I420VideoFrame
Review URL: https://webrtc-codereview.appspot.com/922004 git-svn-id: http://webrtc.googlecode.com/svn/trunk@2983 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
@ -230,9 +230,6 @@ Benchmark::PerformNormalTest()
|
||||
CodecSettings(_target->GetWidth(), _target->GetHeight(), _target->GetFrameRate(), _bitRate);
|
||||
Setup();
|
||||
EventWrapper* waitEvent = EventWrapper::Create();
|
||||
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_encoder->InitEncode(&_inst, 4, 1440);
|
||||
CodecSpecific_InitBitrate();
|
||||
_decoder->InitDecode(&_inst,1);
|
||||
@ -282,9 +279,7 @@ Benchmark::PerformNormalTest()
|
||||
waitEvent->Wait(5);
|
||||
}
|
||||
|
||||
_inputVideoBuffer.Free();
|
||||
_encodedVideoBuffer.Free();
|
||||
_decodedVideoBuffer.Free();
|
||||
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
|
@ -16,6 +16,7 @@
|
||||
#include <sstream>
|
||||
#include <vector>
|
||||
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "tick_util.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
@ -262,16 +263,13 @@ WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes()
|
||||
}
|
||||
|
||||
WebRtc_Word32
|
||||
VideoDecodeCompleteCallback::Decoded(VideoFrame& image)
|
||||
VideoDecodeCompleteCallback::Decoded(I420VideoFrame& image)
|
||||
{
|
||||
_test.Decoded(image);
|
||||
_decodedBytes += image.Length();
|
||||
_decodedBytes += CalcBufferSize(kI420, image.width(), image.height());
|
||||
if (_decodedFile != NULL)
|
||||
{
|
||||
if (fwrite(image.Buffer(), 1, image.Length(),
|
||||
_decodedFile) != image.Length()) {
|
||||
return -1;
|
||||
}
|
||||
return PrintI420VideoFrame(image, _decodedFile);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@ -300,14 +298,14 @@ NormalAsyncTest::Encoded(const EncodedImage& encodedImage)
|
||||
}
|
||||
|
||||
void
|
||||
NormalAsyncTest::Decoded(const VideoFrame& decodedImage)
|
||||
NormalAsyncTest::Decoded(const I420VideoFrame& decodedImage)
|
||||
{
|
||||
_decodeCompleteTime = tGetTime();
|
||||
_decFrameCnt++;
|
||||
_totalDecodePipeTime += _decodeCompleteTime -
|
||||
_decodeTimes[decodedImage.TimeStamp()];
|
||||
_decodedWidth = decodedImage.Width();
|
||||
_decodedHeight = decodedImage.Height();
|
||||
_decodeTimes[decodedImage.timestamp()];
|
||||
_decodedWidth = decodedImage.width();
|
||||
_decodedHeight = decodedImage.height();
|
||||
}
|
||||
|
||||
void
|
||||
@ -316,8 +314,6 @@ NormalAsyncTest::Perform()
|
||||
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
|
||||
CodecSettings(352, 288, 30, _bitRate);
|
||||
Setup();
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
if(_encoder->InitEncode(&_inst, 1, 1440) < 0)
|
||||
{
|
||||
exit(EXIT_FAILURE);
|
||||
@ -410,17 +406,19 @@ NormalAsyncTest::Encode()
|
||||
{
|
||||
_lengthEncFrame = 0;
|
||||
EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp((unsigned int)
|
||||
_inputVideoBuffer.CreateFrame(_sizeY, _sourceBuffer,
|
||||
_sizeUv, _sourceBuffer + _sizeY,
|
||||
_sizeUv, _sourceBuffer + _sizeY + _sizeUv,
|
||||
_width, _height,
|
||||
_width, _halfWidth, _halfWidth);
|
||||
_inputVideoBuffer.set_timestamp((unsigned int)
|
||||
(_encFrameCnt * 9e4 / _inst.maxFramerate));
|
||||
_inputVideoBuffer.SetWidth(_inst.width);
|
||||
_inputVideoBuffer.SetHeight(_inst.height);
|
||||
if (feof(_sourceFile) != 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
_encodeCompleteTime = 0;
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()] = tGetTime();
|
||||
_encodeTimes[_inputVideoBuffer.timestamp()] = tGetTime();
|
||||
std::vector<VideoFrameType> frame_types(1, kDeltaFrame);
|
||||
|
||||
// check SLI queue
|
||||
@ -474,12 +472,12 @@ NormalAsyncTest::Encode()
|
||||
if (_encodeCompleteTime > 0)
|
||||
{
|
||||
_totalEncodeTime += _encodeCompleteTime -
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||
_encodeTimes[_inputVideoBuffer.timestamp()];
|
||||
}
|
||||
else
|
||||
{
|
||||
_totalEncodeTime += tGetTime() -
|
||||
_encodeTimes[_inputVideoBuffer.TimeStamp()];
|
||||
_encodeTimes[_inputVideoBuffer.timestamp()];
|
||||
}
|
||||
assert(ret >= 0);
|
||||
return false;
|
||||
|
@ -80,7 +80,7 @@ public:
|
||||
virtual ~NormalAsyncTest() {};
|
||||
virtual void Perform();
|
||||
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
|
||||
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
|
||||
virtual void Decoded(const webrtc::I420VideoFrame& decodedImage);
|
||||
virtual webrtc::CodecSpecificInfo*
|
||||
CopyCodecSpecificInfo(
|
||||
const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
|
||||
@ -172,7 +172,7 @@ public:
|
||||
_decodedBytes(0)
|
||||
{}
|
||||
|
||||
virtual WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
|
||||
virtual WebRtc_Word32 Decoded(webrtc::I420VideoFrame& decodedImage);
|
||||
virtual WebRtc_Word32
|
||||
ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
|
||||
virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
|
||||
|
@ -14,6 +14,7 @@
|
||||
#include <sstream>
|
||||
#include <string.h>
|
||||
|
||||
#include "common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "gtest/gtest.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
|
||||
@ -22,7 +23,13 @@ NormalTest::NormalTest()
|
||||
CodecTest("Normal Test 1", "A test of normal execution of the codec"),
|
||||
_testNo(1),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false)
|
||||
_appendNext(false),
|
||||
_width(0),
|
||||
_halfWidth(0),
|
||||
_height(0),
|
||||
_halfHeight(0),
|
||||
_sizeY(0),
|
||||
_sizeUv(0)
|
||||
{
|
||||
}
|
||||
|
||||
@ -33,7 +40,13 @@ CodecTest(name, description),
|
||||
_requestKeyFrame(false),
|
||||
_testNo(testNo),
|
||||
_lengthEncFrame(0),
|
||||
_appendNext(false)
|
||||
_appendNext(false),
|
||||
_width(0),
|
||||
_halfWidth(0),
|
||||
_height(0),
|
||||
_halfHeight(0),
|
||||
_sizeY(0),
|
||||
_sizeUv(0)
|
||||
{
|
||||
}
|
||||
|
||||
@ -108,12 +121,22 @@ NormalTest::Teardown()
|
||||
void
|
||||
NormalTest::Perform()
|
||||
{
|
||||
_width = 352;
|
||||
_halfWidth = (_width + 1) / 2;
|
||||
_height = 288;
|
||||
_halfHeight = (_height + 1) / 2;
|
||||
_sizeY = _width * _height;
|
||||
_sizeUv = _halfWidth * _halfHeight;
|
||||
_inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
|
||||
CodecSettings(352, 288, 30, _bitRate);
|
||||
CodecSettings(_width, _height, 30, _bitRate);
|
||||
Setup();
|
||||
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_inputVideoBuffer.CreateEmptyFrame(_width, _height,
|
||||
_width, _halfWidth, _halfWidth);
|
||||
_inputVideoBuffer.CreateEmptyFrame(_width, _height,
|
||||
_width, _halfWidth, _halfWidth);
|
||||
_decodedVideoBuffer.CreateEmptyFrame(_width, _height,
|
||||
_width, _halfWidth, _halfWidth);
|
||||
_encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
|
||||
_encoder->InitEncode(&_inst, 1, 1460);
|
||||
@ -140,8 +163,7 @@ NormalTest::Perform()
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
|
||||
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
||||
if (PrintI420VideoFrame(_decodedVideoBuffer, _decodedFile) < 0) {
|
||||
return;
|
||||
}
|
||||
CodecSpecific_InitBitrate();
|
||||
@ -157,8 +179,7 @@ NormalTest::Perform()
|
||||
fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (fwrite(_decodedVideoBuffer.Buffer(), 1, decodeLength,
|
||||
_decodedFile) != static_cast<unsigned int>(decodeLength)) {
|
||||
if (PrintI420VideoFrame(_decodedVideoBuffer, _decodedFile) < 0) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -173,8 +194,6 @@ NormalTest::Perform()
|
||||
(*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
|
||||
(*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
|
||||
|
||||
_inputVideoBuffer.Free();
|
||||
|
||||
_encoder->Release();
|
||||
_decoder->Release();
|
||||
|
||||
@ -190,8 +209,13 @@ NormalTest::Encode()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp(_framecnt);
|
||||
_inputVideoBuffer.CreateFrame(_sizeY, _sourceBuffer,
|
||||
_sizeUv, _sourceBuffer + _sizeY,
|
||||
_sizeUv, _sourceBuffer + _sizeY +
|
||||
_sizeUv,
|
||||
_width, _height,
|
||||
_width, _halfWidth, _halfWidth);
|
||||
_inputVideoBuffer.set_timestamp(_framecnt);
|
||||
|
||||
// This multiple attempt ridiculousness is to accomodate VP7:
|
||||
// 1. The wrapper can unilaterally reduce the framerate for low bitrates.
|
||||
@ -204,8 +228,8 @@ NormalTest::Encode()
|
||||
{
|
||||
starttime = clock()/(double)CLOCKS_PER_SEC;
|
||||
|
||||
_inputVideoBuffer.SetWidth(_inst.width);
|
||||
_inputVideoBuffer.SetHeight(_inst.height);
|
||||
_inputVideoBuffer.set_width(_inst.width);
|
||||
_inputVideoBuffer.set_height(_inst.height);
|
||||
//_lengthEncFrame = _encoder->Encode(_inputVideoBuffer, _encodedVideoBuffer, _frameInfo,
|
||||
// _inst.frameRate, _requestKeyFrame && !(_framecnt%50));
|
||||
|
||||
|
@ -40,6 +40,12 @@ protected:
|
||||
unsigned int _testNo;
|
||||
int _lengthEncFrame;
|
||||
bool _appendNext;
|
||||
int _width;
|
||||
int _halfWidth;
|
||||
int _height;
|
||||
int _halfHeight;
|
||||
int _sizeY;
|
||||
int _sizeUv;
|
||||
};
|
||||
|
||||
#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
|
||||
|
@ -64,11 +64,11 @@ PacketLossTest::Encoded(const EncodedImage& encodedImage)
|
||||
}
|
||||
|
||||
void
|
||||
PacketLossTest::Decoded(const VideoFrame& decodedImage)
|
||||
PacketLossTest::Decoded(const I420VideoFrame& decodedImage)
|
||||
{
|
||||
// check the frame queue if any frames have gone missing
|
||||
assert(!_frameQueue.empty()); // decoded frame is not in the queue
|
||||
while(_frameQueue.front() < decodedImage.TimeStamp())
|
||||
while(_frameQueue.front() < decodedImage.timestamp())
|
||||
{
|
||||
// this frame is missing
|
||||
// write previous decoded frame again (frame freeze)
|
||||
@ -84,20 +84,23 @@ PacketLossTest::Decoded(const VideoFrame& decodedImage)
|
||||
_frameQueue.pop_front();
|
||||
}
|
||||
// Decoded frame is not in the queue.
|
||||
assert(_frameQueue.front() == decodedImage.TimeStamp());
|
||||
assert(_frameQueue.front() == decodedImage.timestamp());
|
||||
|
||||
// pop the current frame
|
||||
_frameQueue.pop_front();
|
||||
|
||||
// save image for future freeze-frame
|
||||
if (_lastFrameLength < decodedImage.Length())
|
||||
unsigned int length = CalcBufferSize(kI420, decodedImage.width(),
|
||||
decodedImage.height());
|
||||
if (_lastFrameLength < length)
|
||||
{
|
||||
if (_lastFrame) delete [] _lastFrame;
|
||||
|
||||
_lastFrame = new WebRtc_UWord8[decodedImage.Length()];
|
||||
_lastFrame = new WebRtc_UWord8[length];
|
||||
}
|
||||
memcpy(_lastFrame, decodedImage.Buffer(), decodedImage.Length());
|
||||
_lastFrameLength = decodedImage.Length();
|
||||
// TODO(mikhal): Can't the last frame be a I420VideoFrame?
|
||||
ExtractBuffer(decodedImage, length, _lastFrame);
|
||||
_lastFrameLength = length;
|
||||
|
||||
NormalAsyncTest::Decoded(decodedImage);
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ public:
|
||||
PacketLossTest();
|
||||
virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}}
|
||||
virtual void Encoded(const webrtc::EncodedImage& encodedImage);
|
||||
virtual void Decoded(const webrtc::VideoFrame& decodedImage);
|
||||
virtual void Decoded(const webrtc::I420VideoFrame& decodedImage);
|
||||
protected:
|
||||
PacketLossTest(std::string name, std::string description);
|
||||
PacketLossTest(std::string name,
|
||||
|
@ -49,11 +49,11 @@ protected:
|
||||
WebRtc_UWord32 _bitRate;
|
||||
unsigned int _lengthSourceFrame;
|
||||
unsigned char* _sourceBuffer;
|
||||
webrtc::VideoFrame _inputVideoBuffer;
|
||||
webrtc::I420VideoFrame _inputVideoBuffer;
|
||||
// TODO(mikhal): For now using VideoFrame for encodedBuffer, should use a
|
||||
// designated class.
|
||||
webrtc::VideoFrame _encodedVideoBuffer;
|
||||
webrtc::VideoFrame _decodedVideoBuffer;
|
||||
webrtc::I420VideoFrame _decodedVideoBuffer;
|
||||
webrtc::VideoCodec _inst;
|
||||
std::fstream* _log;
|
||||
std::string _inname;
|
||||
|
@ -98,7 +98,8 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
||||
_encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
|
||||
_encodedVideoBuffer->CopyFrame(encodedImage._size, encodedImage._buffer);
|
||||
_encodedVideoBuffer->SetLength(encodedImage._length);
|
||||
// _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
|
||||
// TODO(mikhal): Update frame type API.
|
||||
// _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
|
||||
_encodedVideoBuffer->SetWidth(
|
||||
(WebRtc_UWord16)encodedImage._encodedWidth);
|
||||
_encodedVideoBuffer->SetHeight(
|
||||
@ -109,12 +110,9 @@ UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
|
||||
return 0;
|
||||
}
|
||||
|
||||
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
|
||||
WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(I420VideoFrame& image)
|
||||
{
|
||||
_decodedVideoBuffer->CopyFrame(image.Length(), image.Buffer());
|
||||
_decodedVideoBuffer->SetWidth(image.Width());
|
||||
_decodedVideoBuffer->SetHeight(image.Height());
|
||||
_decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
|
||||
_decodedVideoBuffer->CopyFrame(image);
|
||||
_decodeComplete = true;
|
||||
return 0;
|
||||
}
|
||||
@ -155,7 +153,7 @@ UnitTest::WaitForEncodedFrame() const
|
||||
{
|
||||
if (_encodeCompleteCallback->EncodeComplete())
|
||||
{
|
||||
return _encodedVideoBuffer.Length();
|
||||
return _encodedVideoBuffer.Length();
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
@ -169,7 +167,8 @@ UnitTest::WaitForDecodedFrame() const
|
||||
{
|
||||
if (_decodeCompleteCallback->DecodeComplete())
|
||||
{
|
||||
return _decodedVideoBuffer.Length();
|
||||
return webrtc::CalcBufferSize(kI420, _decodedVideoBuffer.width(),
|
||||
_decodedVideoBuffer.height());
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
@ -224,12 +223,16 @@ UnitTest::Setup()
|
||||
_inst.codecSpecific.VP8.denoisingOn = true;
|
||||
|
||||
// Get input frame.
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
|
||||
== _lengthSourceFrame);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||
int size_y = _inst.width * _inst.height;
|
||||
int size_uv = ((_inst.width + 1) / 2) * ((_inst.height + 1) / 2);
|
||||
_inputVideoBuffer.CreateFrame(size_y, _refFrame,
|
||||
size_uv, _refFrame + size_y,
|
||||
size_uv, _refFrame + size_y + size_uv,
|
||||
_inst.width, _inst.height,
|
||||
_inst.width,
|
||||
(_inst.width + 1) / 2, (_inst.width + 1) / 2);
|
||||
rewind(_sourceFile);
|
||||
|
||||
// Get a reference encoded frame.
|
||||
@ -244,7 +247,9 @@ UnitTest::Setup()
|
||||
memcpy(_refEncFrame, _encodedVideoBuffer.Buffer(), _refEncFrameLength);
|
||||
|
||||
// Get a reference decoded frame.
|
||||
_decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_decodedVideoBuffer.CreateEmptyFrame(_inst.width, _inst.height, _inst.width,
|
||||
(_inst.width + 1) / 2,
|
||||
(_inst.width + 1) / 2);
|
||||
EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
|
||||
ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
@ -255,12 +260,15 @@ UnitTest::Setup()
|
||||
if (i > 0)
|
||||
{
|
||||
// Insert yet another frame
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
|
||||
_sourceFile) == _lengthSourceFrame);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||
_inputVideoBuffer.CreateFrame(size_y, _refFrame,
|
||||
size_uv, _refFrame + size_y,
|
||||
size_uv, _refFrame + size_y + size_uv,
|
||||
_inst.width, _inst.height,
|
||||
_inst.width,
|
||||
(_inst.width + 1) / 2,
|
||||
(_inst.width + 1) / 2);
|
||||
_encoder->Encode(_inputVideoBuffer, NULL, NULL);
|
||||
ASSERT_TRUE(WaitForEncodedFrame() > 0);
|
||||
}
|
||||
@ -274,7 +282,7 @@ UnitTest::Setup()
|
||||
}
|
||||
rewind(_sourceFile);
|
||||
EXPECT_TRUE(frameLength == _lengthSourceFrame);
|
||||
memcpy(_refDecFrame, _decodedVideoBuffer.Buffer(), _lengthSourceFrame);
|
||||
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame, _refDecFrame);
|
||||
}
|
||||
|
||||
void
|
||||
@ -342,7 +350,7 @@ UnitTest::Perform()
|
||||
{
|
||||
UnitTest::Setup();
|
||||
int frameLength;
|
||||
VideoFrame inputImage;
|
||||
I420VideoFrame inputImage;
|
||||
EncodedImage encodedImage;
|
||||
|
||||
//----- Encoder parameter tests -----
|
||||
@ -409,17 +417,20 @@ UnitTest::Perform()
|
||||
EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
//-- Encode() errors --
|
||||
|
||||
// inputVideoBuffer unallocated.
|
||||
_inputVideoBuffer.Free();
|
||||
inputImage.Free();
|
||||
inputImage.ResetSize();
|
||||
EXPECT_TRUE(_encoder->Encode(inputImage, NULL, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
|
||||
_inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _refFrame);
|
||||
_inputVideoBuffer.SetWidth(_source->GetWidth());
|
||||
_inputVideoBuffer.SetHeight(_source->GetHeight());
|
||||
|
||||
int width = _source->GetWidth();
|
||||
int half_width = (width + 1) / 2;
|
||||
int height = _source->GetHeight();
|
||||
int half_height = (height + 1) / 2;
|
||||
int size_y = width * height;
|
||||
int size_uv = half_width * half_height;
|
||||
_inputVideoBuffer.CreateFrame(size_y, _refFrame,
|
||||
size_uv, _refFrame + size_y,
|
||||
size_uv, _refFrame + size_y + size_uv,
|
||||
width, height,
|
||||
width, half_width, half_width);
|
||||
//----- Encoder stress tests -----
|
||||
|
||||
// Vary frame rate and I-frame request.
|
||||
@ -539,8 +550,12 @@ UnitTest::Perform()
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
unsigned int length = CalcBufferSize(kI420, width, height);
|
||||
scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
|
||||
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame,
|
||||
decoded_buffer.get());
|
||||
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength, _refDecFrame,
|
||||
_lengthSourceFrame) == true);
|
||||
|
||||
// Reset then decode.
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
@ -551,8 +566,10 @@ UnitTest::Perform()
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
ExtractBuffer(_decodedVideoBuffer, _lengthSourceFrame,
|
||||
decoded_buffer.get());
|
||||
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
|
||||
// Decode with other size, reset, then decode with original size again
|
||||
// to verify that decoder is reset to a "fresh" state upon Reset().
|
||||
@ -565,20 +582,25 @@ UnitTest::Perform()
|
||||
memcpy(&tempInst, &_inst, sizeof(VideoCodec));
|
||||
tempInst.width /= 2;
|
||||
tempInst.height /= 2;
|
||||
int tmpHalfWidth = (tempInst.width + 1) / 2;
|
||||
int tmpHalfHeight = (tempInst.height + 1) / 2;
|
||||
|
||||
int tmpSizeY = tempInst.width * tempInst.height;
|
||||
int tmpSizeUv = tmpHalfWidth * tmpHalfHeight;
|
||||
|
||||
// Encode reduced (quarter) frame size.
|
||||
EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
|
||||
EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
VideoFrame tempInput;
|
||||
unsigned int tmpLength = _inputVideoBuffer.Length() / 4;
|
||||
tempInput.CopyFrame(tmpLength, _inputVideoBuffer.Buffer());
|
||||
tempInput.SetWidth(tempInst.width);
|
||||
tempInput.SetHeight(tempInst.height);
|
||||
webrtc::I420VideoFrame tempInput;
|
||||
tempInput.CreateFrame(tmpSizeY, _inputVideoBuffer.buffer(kYPlane),
|
||||
tmpSizeUv, _inputVideoBuffer.buffer(kUPlane),
|
||||
tmpSizeUv, _inputVideoBuffer.buffer(kVPlane),
|
||||
tempInst.width, tempInst.height,
|
||||
tempInst.width, tmpHalfWidth, tmpHalfWidth);
|
||||
_encoder->Encode(tempInput, NULL, NULL);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
tempInput.Free();
|
||||
// Reset then decode.
|
||||
EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = 0;
|
||||
@ -608,9 +630,11 @@ UnitTest::Perform()
|
||||
}
|
||||
|
||||
// check that decoded frame matches with reference
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
|
||||
unsigned int length = CalcBufferSize(kI420, width, height);
|
||||
scoped_array<uint8_t> decoded_buffer(new uint8_t[length]);
|
||||
ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get());
|
||||
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), length,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
}
|
||||
|
||||
// Release then decode.
|
||||
@ -624,8 +648,9 @@ UnitTest::Perform()
|
||||
_decoder->Decode(encodedImage, false, NULL);
|
||||
frameLength = WaitForDecodedFrame();
|
||||
}
|
||||
EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.Buffer(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
ExtractBuffer(_decodedVideoBuffer, length, decoded_buffer.get());
|
||||
EXPECT_TRUE(CheckIfBitExact(decoded_buffer.get(), frameLength,
|
||||
_refDecFrame, _lengthSourceFrame) == true);
|
||||
_encodedVideoBuffer.SetLength(0);
|
||||
|
||||
delete [] tmpBuf;
|
||||
@ -644,19 +669,24 @@ UnitTest::Perform()
|
||||
frames = 0;
|
||||
int frameDelay = 0;
|
||||
int encTimeStamp;
|
||||
_decodedVideoBuffer.SetTimeStamp(0);
|
||||
_decodedVideoBuffer.set_timestamp(0);
|
||||
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
||||
_lengthSourceFrame)
|
||||
{
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp(frames);
|
||||
_inputVideoBuffer.CreateFrame(size_y, _sourceBuffer,
|
||||
size_uv, _sourceBuffer + size_y,
|
||||
size_uv, _sourceBuffer + size_y + size_uv,
|
||||
width, height,
|
||||
width, half_width, half_width);
|
||||
|
||||
_inputVideoBuffer.set_timestamp(frames);
|
||||
ASSERT_TRUE(_encoder->Encode(_inputVideoBuffer, NULL, NULL) ==
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
frameLength = WaitForEncodedFrame();
|
||||
//ASSERT_TRUE(frameLength);
|
||||
EXPECT_TRUE(frameLength > 0);
|
||||
encTimeStamp = _encodedVideoBuffer.TimeStamp();
|
||||
EXPECT_TRUE(_inputVideoBuffer.TimeStamp() ==
|
||||
EXPECT_TRUE(_inputVideoBuffer.timestamp() ==
|
||||
static_cast<unsigned>(encTimeStamp));
|
||||
|
||||
frameLength = Decode();
|
||||
@ -670,7 +700,7 @@ UnitTest::Perform()
|
||||
{
|
||||
encTimeStamp = 0;
|
||||
}
|
||||
EXPECT_TRUE(_decodedVideoBuffer.TimeStamp() ==
|
||||
EXPECT_TRUE(_decodedVideoBuffer.timestamp() ==
|
||||
static_cast<unsigned>(encTimeStamp));
|
||||
frames++;
|
||||
}
|
||||
@ -678,7 +708,6 @@ UnitTest::Perform()
|
||||
rewind(_sourceFile);
|
||||
|
||||
RateControlTests();
|
||||
inputImage.Free();
|
||||
|
||||
Teardown();
|
||||
}
|
||||
@ -719,13 +748,22 @@ UnitTest::RateControlTests()
|
||||
{
|
||||
CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
|
||||
}
|
||||
|
||||
int width = _source->GetWidth();
|
||||
int half_width = (width + 1) / 2;
|
||||
int height = _source->GetHeight();
|
||||
int half_height = (height + 1) / 2;
|
||||
int size_y = width * height;
|
||||
int size_uv = half_width * half_height;
|
||||
while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
|
||||
_lengthSourceFrame)
|
||||
{
|
||||
_inputVideoBuffer.CopyFrame(_lengthSourceFrame, _sourceBuffer);
|
||||
_inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.TimeStamp() +
|
||||
static_cast<WebRtc_UWord32>(9e4 /
|
||||
_inputVideoBuffer.CreateFrame(size_y, _sourceBuffer,
|
||||
size_uv, _sourceBuffer + size_y,
|
||||
size_uv, _sourceBuffer + size_y +
|
||||
size_uv,
|
||||
width, height,
|
||||
width, half_width, half_width);
|
||||
_inputVideoBuffer.set_timestamp(static_cast<WebRtc_UWord32>(9e4 /
|
||||
static_cast<float>(_inst.maxFramerate)));
|
||||
ASSERT_EQ(_encoder->Encode(_inputVideoBuffer, NULL, NULL),
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
|
@ -94,12 +94,12 @@ private:
|
||||
class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
|
||||
{
|
||||
public:
|
||||
UnitTestDecodeCompleteCallback(webrtc::VideoFrame* buffer) :
|
||||
UnitTestDecodeCompleteCallback(webrtc::I420VideoFrame* buffer) :
|
||||
_decodedVideoBuffer(buffer), _decodeComplete(false) {}
|
||||
WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
|
||||
WebRtc_Word32 Decoded(webrtc::I420VideoFrame& image);
|
||||
bool DecodeComplete();
|
||||
private:
|
||||
webrtc::VideoFrame* _decodedVideoBuffer;
|
||||
webrtc::I420VideoFrame* _decodedVideoBuffer;
|
||||
bool _decodeComplete;
|
||||
};
|
||||
|
||||
|
Reference in New Issue
Block a user