Switching to I420VideoFrame

Review URL: https://webrtc-codereview.appspot.com/922004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2983 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org
2012-10-24 18:33:04 +00:00
parent 6392657643
commit 9fedff7c17
152 changed files with 2076 additions and 1862 deletions

View File

@ -18,9 +18,16 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
WebRtc_UWord32 frameNum = 0;
WebRtc_Word32 brightnessWarning = 0;
WebRtc_UWord32 warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
@ -42,15 +49,21 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile);
frameNum = 0;
warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength &&
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length &&
frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* frame = _videoFrame.Buffer();
WebRtc_UWord8* frame = _videoFrame.buffer(kYPlane);
WebRtc_UWord32 yTmp = 0;
for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
for (int yIdx = 0; yIdx < _width * _height; yIdx++)
{
yTmp = frame[yIdx] << 1;
if (yTmp > 255)
@ -80,17 +93,23 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile);
frameNum = 0;
warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
frameNum < 300)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length && frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* frame = _videoFrame.Buffer();
WebRtc_UWord8* y_plane = _videoFrame.buffer(kYPlane);
WebRtc_Word32 yTmp = 0;
for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
for (int yIdx = 0; yIdx < _width * _height; yIdx++)
{
yTmp = frame[yIdx] >> 1;
frame[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
yTmp = y_plane[yIdx] >> 1;
y_plane[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
}
VideoProcessingModule::FrameStats stats;

View File

@ -39,15 +39,22 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n";
WebRtc_UWord32 frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
t0 = TickTime::Now();
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame));
t1 = TickTime::Now();
accTicks += t1 - t0;
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
modFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, modFile) < 0) {
return;
}
}
@ -76,44 +83,70 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
rewind(modFile);
ASSERT_EQ(refLen, testLen) << "File lengths differ.";
VideoFrame refVideoFrame;
refVideoFrame.VerifyAndAllocate(_frameLength);
refVideoFrame.SetWidth(_width);
refVideoFrame.SetHeight(_height);
I420VideoFrame refVideoFrame;
// Compare frame-by-frame.
while (fread(_videoFrame.Buffer(), 1, _frameLength, modFile) == _frameLength)
scoped_array<uint8_t> ref_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, modFile) ==
_frame_length)
{
ASSERT_EQ(_frameLength, fread(refVideoFrame.Buffer(), 1, _frameLength, refFile));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), refVideoFrame.Buffer(), _frameLength));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(_frame_length, fread(ref_buffer.get(), 1, _frame_length,
refFile));
refVideoFrame.CreateFrame(_size_y, ref_buffer.get(),
_size_uv, ref_buffer.get() + _size_y,
_size_uv, ref_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kYPlane),
refVideoFrame.buffer(kYPlane),
_size_y));
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kUPlane),
refVideoFrame.buffer(kUPlane),
_size_uv));
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kVPlane),
refVideoFrame.buffer(kVPlane),
_size_uv));
}
ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
// Verify that all color pixels are enhanced, and no luminance values are
// altered.
WebRtc_UWord8 *testFrame = new WebRtc_UWord8[_frameLength];
scoped_array<uint8_t> testFrame(new WebRtc_UWord8[_frame_length]);
// Use value 128 as probe value, since we know that this will be changed
// in the enhancement.
memset(testFrame, 128, _frameLength);
memset(testFrame.get(), 128, _frame_length);
I420VideoFrame testVideoFrame;
testVideoFrame.CreateFrame(_size_y, testFrame.get(),
_size_uv, testFrame.get() + _size_y,
_size_uv, testFrame.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
VideoFrame testVideoFrame;
testVideoFrame.CopyFrame(_frameLength, testFrame);
testVideoFrame.SetWidth(_width);
testVideoFrame.SetHeight(_height);
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));
EXPECT_EQ(0, memcmp(testVideoFrame.Buffer(), testFrame, _width * _height))
EXPECT_EQ(0, memcmp(testVideoFrame.buffer(kYPlane), testFrame.get(),
_size_y))
<< "Function is modifying the luminance.";
EXPECT_NE(0, memcmp(testVideoFrame.Buffer() + _width * _height,
&testFrame[_width * _height], _width * _height / 2)) <<
"Function is not modifying all chrominance pixels";
EXPECT_NE(0, memcmp(testVideoFrame.buffer(kUPlane),
testFrame.get() + _size_y, _size_uv)) <<
"Function is not modifying all chrominance pixels";
EXPECT_NE(0, memcmp(testVideoFrame.buffer(kVPlane),
testFrame.get() + _size_y + _size_uv, _size_uv)) <<
"Function is not modifying all chrominance pixels";
ASSERT_EQ(0, fclose(refFile));
ASSERT_EQ(0, fclose(modFile));
delete [] testFrame;
}
} // namespace webrtc

View File

@ -23,9 +23,16 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis)
_ca_c.Initialize(_width,_height);
_ca_sse.Initialize(_width,_height);
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
== _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile)
== _frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
_cM_c = _ca_c.ComputeContentMetrics(_videoFrame);
_cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame);

View File

@ -11,6 +11,7 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
@ -42,6 +43,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
"Could not open output file: " << output_file << "\n";
printf("\nRun time [us / frame]:\n");
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
{
TickTime t0;
@ -50,10 +52,17 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
WebRtc_UWord32 timeStamp = 1;
frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
frameNum++;
_videoFrame.SetTimeStamp(timeStamp);
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
_videoFrame.set_timestamp(timeStamp);
t0 = TickTime::Now();
VideoProcessingModule::FrameStats stats;
@ -64,8 +73,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
deflickerFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, deflickerFile) < 0) {
return;
}
}

View File

@ -11,6 +11,7 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
@ -47,21 +48,27 @@ TEST_F(VideoProcessingModuleTest, Denoising)
WebRtc_Word32 modifiedPixels = 0;
frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv,
video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* sourceBuffer = _videoFrame.Buffer();
WebRtc_UWord8* sourceBuffer = _videoFrame.buffer(kYPlane);
// Add noise to a part in video stream
// Random noise
// TODO: investigate the effectiveness of this test.
//for(WebRtc_UWord32 ir = 0; ir < _frameLength; ir++)
// sourceBuffer[ir] = 128
for (WebRtc_UWord32 ir = 0; ir < _height; ir++)
for (int ir = 0; ir < _height; ir++)
{
WebRtc_UWord32 ik = ir * _width;
for (WebRtc_UWord32 ic = 0; ic < _width; ic++)
for (int ic = 0; ic < _width; ic++)
{
WebRtc_UWord8 r = rand() % 16;
r -= 8;
@ -92,8 +99,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
noiseFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) {
return;
}
}
@ -105,8 +111,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
denoiseFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) {
return;
}
}

View File

@ -23,16 +23,21 @@ namespace webrtc {
// quality when the resampled frame is scaled back up/down to the
// original/source size. |expected_psnr| is set to be ~0.1/0.05dB lower than
// actual PSNR verified under the same conditions.
void TestSize(const VideoFrame& sourceFrame, int target_width,
void TestSize(const I420VideoFrame& sourceFrame, int target_width,
int target_height, int mode, double expected_psnr,
VideoProcessingModule* vpm);
bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2);
VideoProcessingModuleTest::VideoProcessingModuleTest() :
_vpm(NULL),
_sourceFile(NULL),
_width(352),
_half_width(_width / 2),
_height(288),
_frameLength(CalcBufferSize(kI420, 352, 288))
_size_y(_width * _height),
_size_uv(_half_width * _height /2),
_frame_length(CalcBufferSize(kI420, _width, _height))
{
}
@ -41,9 +46,8 @@ void VideoProcessingModuleTest::SetUp()
_vpm = VideoProcessingModule::Create(0);
ASSERT_TRUE(_vpm != NULL);
ASSERT_EQ(0, _videoFrame.VerifyAndAllocate(_frameLength));
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(_height);
ASSERT_EQ(0, _videoFrame.CreateEmptyFrame(_width, _height, _width,
_half_width, _half_width));
const std::string video_file =
webrtc::test::ResourcePath("foreman_cif", "yuv");
@ -70,9 +74,9 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Video frame with unallocated buffer.
VideoFrame videoFrame;
videoFrame.SetWidth(_width);
videoFrame.SetHeight(_height);
I420VideoFrame videoFrame;
videoFrame.set_width(_width);
videoFrame.set_height(_height);
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, videoFrame));
@ -88,12 +92,15 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
TEST_F(VideoProcessingModuleTest, HandleBadStats)
{
VideoProcessingModule::FrameStats stats;
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(_height);
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
@ -104,21 +111,9 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize)
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Bad width
_videoFrame.SetWidth(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
// Bad height
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(0);
_videoFrame.ResetSize();
_videoFrame.set_width(_width);
_videoFrame.set_height(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
@ -132,58 +127,73 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize)
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0));
VideoFrame *outFrame = NULL;
I420VideoFrame *outFrame = NULL;
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(_videoFrame,
&outFrame));
}
TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
{
VideoFrame videoFrame2;
I420VideoFrame videoFrame2;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, videoFrame2.VerifyAndAllocate(_frameLength));
videoFrame2.SetWidth(_width);
videoFrame2.SetHeight(_height);
// Only testing non-static functions here.
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
ASSERT_EQ(0, _videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(0, videoFrame2.CopyFrame(_videoFrame));
ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
_vpm->Reset();
// Retrieve frame stats again in case Deflickering() has zeroed them.
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, videoFrame2));
ASSERT_EQ(0, _vpm->Deflickering(&videoFrame2, &stats));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
videoFrame2.CopyFrame(_videoFrame);
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
ASSERT_GE(_vpm->Denoising(&_videoFrame), 0);
_vpm->Reset();
ASSERT_GE(_vpm->Denoising(&videoFrame2), 0);
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
videoFrame2.CopyFrame(_videoFrame);
ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
_vpm->Reset();
ASSERT_EQ(0, _vpm->BrightnessDetection(videoFrame2, stats));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
}
TEST_F(VideoProcessingModuleTest, FrameStats)
{
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_FALSE(_vpm->ValidFrameStats(stats));
EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
@ -214,7 +224,7 @@ TEST_F(VideoProcessingModuleTest, PreprocessorLogic)
// Disable spatial sampling
_vpm->SetInputFrameResampleMode(kNoRescaling);
ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
VideoFrame *outFrame = NULL;
I420VideoFrame *outFrame = NULL;
ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame));
// No rescaling=> output frame = NULL
ASSERT_TRUE(outFrame == NULL);
@ -230,9 +240,6 @@ TEST_F(VideoProcessingModuleTest, Resampler)
TickTime t0;
TickTime t1;
TickInterval accTicks;
WebRtc_Word32 height = 288;
WebRtc_Word32 width = 352;
WebRtc_Word32 lengthSourceFrame = width*height*3/2;
rewind(_sourceFile);
ASSERT_TRUE(_sourceFile != NULL) <<
@ -244,12 +251,15 @@ TEST_F(VideoProcessingModuleTest, Resampler)
_vpm->EnableTemporalDecimation(false);
// Reading test frame
VideoFrame sourceFrame;
ASSERT_EQ(0, sourceFrame.VerifyAndAllocate(lengthSourceFrame));
EXPECT_GT(fread(sourceFrame.Buffer(), 1, lengthSourceFrame, _sourceFile), 0u);
ASSERT_EQ(0, sourceFrame.SetLength(lengthSourceFrame));
sourceFrame.SetHeight(height);
sourceFrame.SetWidth(width);
I420VideoFrame sourceFrame;
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
sourceFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
{
@ -282,8 +292,6 @@ TEST_F(VideoProcessingModuleTest, Resampler)
avgRuntime += accTicks.Microseconds();
}
sourceFrame.Free();
printf("\nAverage run time = %d us / frame\n",
//static_cast<int>(avgRuntime / frameNum / NumRuns));
static_cast<int>(avgRuntime));
@ -292,12 +300,12 @@ TEST_F(VideoProcessingModuleTest, Resampler)
static_cast<int>(minRuntime));
}
void TestSize(const VideoFrame& source_frame, int target_width,
void TestSize(const I420VideoFrame& source_frame, int target_width,
int target_height, int mode, double expected_psnr,
VideoProcessingModule* vpm) {
int source_width = source_frame.Width();
int source_height = source_frame.Height();
VideoFrame* out_frame = NULL;
int source_width = source_frame.width();
int source_height = source_frame.height();
I420VideoFrame* out_frame = NULL;
ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame));
@ -309,12 +317,6 @@ void TestSize(const VideoFrame& source_frame, int target_width,
// (3) write out the processed frame for viewing.
if (target_width != static_cast<int>(source_width) ||
target_height != static_cast<int>(source_height)) {
int target_half_width = (target_width + 1) >> 1;
int target_half_height = (target_height + 1) >> 1;
int required_size_resampled = target_width * target_height +
2 * (target_half_width * target_half_height);
ASSERT_EQ(required_size_resampled, static_cast<int>(out_frame->Length()));
// Write the processed frame to file for visual inspection.
std::ostringstream filename;
filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
@ -323,15 +325,14 @@ void TestSize(const VideoFrame& source_frame, int target_width,
std::cout << "Watch " << filename.str() << " and verify that it is okay."
<< std::endl;
FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
if (fwrite(out_frame->Buffer(), 1,
out_frame->Length(), stand_alone_file) != out_frame->Length()) {
if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
fprintf(stderr, "Failed to write frame for scaling to width/height: "
" %d %d \n", target_width, target_height);
return;
}
fclose(stand_alone_file);
VideoFrame resampled_source_frame;
I420VideoFrame resampled_source_frame;
resampled_source_frame.CopyFrame(*out_frame);
// Scale |resampled_source_frame| back to original/source size.
@ -349,24 +350,36 @@ void TestSize(const VideoFrame& source_frame, int target_width,
std::cout << "Watch " << filename2.str() << " and verify that it is okay."
<< std::endl;
stand_alone_file = fopen(filename2.str().c_str(), "wb");
if (fwrite(out_frame->Buffer(), 1,
out_frame->Length(), stand_alone_file) != out_frame->Length()) {
if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
fprintf(stderr, "Failed to write frame for scaling to width/height "
"%d %d \n", source_width, source_height);
"%d %d \n", source_width, source_height);
return;
}
fclose(stand_alone_file);
// Compute the PSNR and check expectation.
double psnr = I420PSNR(source_frame.Buffer(), out_frame->Buffer(),
source_width, source_height);
double psnr = I420PSNR(&source_frame, out_frame);
EXPECT_GT(psnr, expected_psnr);
printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
"source which is scaled down/up to: %d %d, and back to source size \n",
psnr, source_width, source_height, target_width, target_height);
resampled_source_frame.Free();
}
}
bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2) {
for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
int allocated_size1 = frame1.allocated_size(plane_type);
int allocated_size2 = frame2.allocated_size(plane_type);
if (allocated_size1 != allocated_size2)
return false;
const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
return false;
}
return true;
}
} // namespace webrtc

View File

@ -36,10 +36,13 @@ protected:
}
VideoProcessingModule* _vpm;
FILE* _sourceFile;
VideoFrame _videoFrame;
const WebRtc_UWord32 _width;
const WebRtc_UWord32 _height;
const WebRtc_UWord32 _frameLength;
I420VideoFrame _videoFrame;
const int _width;
const int _half_width;
const int _height;
const int _size_y;
const int _size_uv;
const unsigned int _frame_length;
};
} // namespace webrtc