Switching to I420VideoFrame

Review URL: https://webrtc-codereview.appspot.com/922004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@2983 4adac7df-926f-26a2-2b94-8c16560cd09d
This commit is contained in:
mikhal@webrtc.org
2012-10-24 18:33:04 +00:00
parent 6392657643
commit 9fedff7c17
152 changed files with 2076 additions and 1862 deletions

View File

@ -18,6 +18,7 @@
#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
#include "common_video/interface/i420_video_frame.h"
#include "module.h"
#include "module_common_types.h"
#include "video_processing_defines.h"
@ -120,7 +121,7 @@ public:
\return 0 on success, -1 on failure.
*/
static WebRtc_Word32 GetFrameStats(FrameStats* stats,
const VideoFrame& frame);
const I420VideoFrame& frame);
/**
Checks the validity of a FrameStats struct. Currently, valid implies only
@ -148,7 +149,7 @@ public:
\param[in,out] frame
Pointer to the video frame.
*/
static WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
static WebRtc_Word32 ColorEnhancement(I420VideoFrame* frame);
/**
Increases/decreases the luminance value.
@ -162,7 +163,7 @@ public:
\return 0 on success, -1 on failure.
*/
static WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
static WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta);
/**
Detects and removes camera flicker from a video stream. Every frame from
@ -179,7 +180,7 @@ public:
\return 0 on success, -1 on failure.
*/
virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
virtual WebRtc_Word32 Deflickering(I420VideoFrame* frame,
FrameStats* stats) = 0;
/**
@ -191,7 +192,7 @@ public:
\return The number of modified pixels on success, -1 on failure.
*/
virtual WebRtc_Word32 Denoising(VideoFrame* frame) = 0;
virtual WebRtc_Word32 Denoising(I420VideoFrame* frame) = 0;
/**
Detects if a video frame is excessively bright or dark. Returns a
@ -206,7 +207,7 @@ public:
\return A member of BrightnessWarning on success, -1 on error
*/
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
virtual WebRtc_Word32 BrightnessDetection(const I420VideoFrame& frame,
const FrameStats& stats) = 0;
/**
@ -283,8 +284,8 @@ public:
\return VPM_OK on success, a negative value on error (see error codes)
*/
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame) = 0;
virtual WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame) = 0;
/**
Return content metrics for the last processed frame

View File

@ -17,21 +17,21 @@
namespace webrtc {
namespace VideoProcessing {
WebRtc_Word32 Brighten(VideoFrame* frame, int delta) {
WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta) {
assert(frame);
if (frame->Buffer() == NULL) {
if (frame->IsZeroSize()) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Null frame pointer");
"zero size frame");
return VPM_PARAMETER_ERROR;
}
if (frame->Width() <= 0 || frame->Height() <= 0) {
if (frame->width() <= 0 || frame->height() <= 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
int numPixels = frame->Width() * frame->Height();
int numPixels = frame->width() * frame->height();
int lookUp[256];
for (int i = 0; i < 256; i++) {
@ -39,7 +39,7 @@ WebRtc_Word32 Brighten(VideoFrame* frame, int delta) {
lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
}
WebRtc_UWord8* tempPtr = frame->Buffer();
WebRtc_UWord8* tempPtr = frame->buffer(kYPlane);
for (int i = 0; i < numPixels; i++) {
*tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]);

View File

@ -17,7 +17,7 @@
namespace webrtc {
namespace VideoProcessing {
WebRtc_Word32 Brighten(VideoFrame* frame, int delta);
WebRtc_Word32 Brighten(I420VideoFrame* frame, int delta);
} // namespace VideoProcessing
} // namespace webrtc

View File

@ -41,25 +41,18 @@ VPMBrightnessDetection::Reset()
}
WebRtc_Word32
VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame,
VPMBrightnessDetection::ProcessFrame(const I420VideoFrame& frame,
const VideoProcessingModule::FrameStats&
stats)
{
if (frame.Buffer() == NULL)
if (frame.IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
return VPM_PARAMETER_ERROR;
}
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
int width = frame.width();
int height = frame.height();
if (!VideoProcessingModule::ValidFrameStats(stats))
{
@ -93,11 +86,11 @@ VPMBrightnessDetection::ProcessFrame(const VideoFrame& frame,
if (stats.mean < 90 || stats.mean > 170)
{
// Standard deviation of Y
const uint8_t* buffer = frame.buffer(kYPlane);
float stdY = 0;
uint8_t* buffer = frame.Buffer();
for (int h = 0; h < height; h += (1 << stats.subSamplHeight))
{
WebRtc_UWord32 row = h*width;
int row = h*width;
for (int w = 0; w < width; w += (1 << stats.subSamplWidth))
{
stdY += (buffer[w + row] - stats.mean) * (buffer[w + row] -

View File

@ -29,7 +29,7 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(const VideoFrame& frame,
WebRtc_Word32 ProcessFrame(const I420VideoFrame& frame,
const VideoProcessingModule::FrameStats& stats);
private:

View File

@ -18,38 +18,35 @@ namespace webrtc {
namespace VideoProcessing
{
WebRtc_Word32
ColorEnhancement(VideoFrame* frame)
ColorEnhancement(I420VideoFrame* frame)
{
assert(frame);
// pointers to U and V color pixels
WebRtc_UWord8* ptrU;
WebRtc_UWord8* ptrV;
WebRtc_UWord8 tempChroma;
const unsigned int size_y = frame->Width() * frame->Height();
const unsigned int size_uv = ((frame->Width() + 1) / 2) *
((frame->Height() + 1 ) / 2);
if (frame->Buffer() == NULL)
if (frame->IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Null frame pointer");
return VPM_GENERAL_ERROR;
}
if (frame->Width() == 0 || frame->Height() == 0)
if (frame->width() == 0 || frame->height() == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing,
-1, "Invalid frame size");
return VPM_GENERAL_ERROR;
}
// set pointers to first U and V pixels (skip luminance)
ptrU = frame->Buffer() + size_y;
ptrV = ptrU + size_uv;
ptrU = frame->buffer(kUPlane);
ptrV = frame->buffer(kVPlane);
int size_uv = ((frame->width() + 1) / 2) * ((frame->height() + 1) / 2);
// loop through all chrominance pixels and modify color
for (unsigned int ix = 0; ix < size_uv; ix++)
for (int ix = 0; ix < size_uv; ix++)
{
tempChroma = colorTable[*ptrU][*ptrV];
*ptrV = colorTable[*ptrV][*ptrU];

View File

@ -21,7 +21,7 @@ namespace webrtc {
namespace VideoProcessing
{
WebRtc_Word32 ColorEnhancement(VideoFrame* frame);
WebRtc_Word32 ColorEnhancement(I420VideoFrame* frame);
}
} //namespace

View File

@ -56,25 +56,23 @@ VPMContentAnalysis::~VPMContentAnalysis()
VideoContentMetrics*
VPMContentAnalysis::ComputeContentMetrics(const VideoFrame& inputFrame)
VPMContentAnalysis::ComputeContentMetrics(const I420VideoFrame& inputFrame)
{
if (inputFrame.Buffer() == NULL)
if (inputFrame.IsZeroSize())
{
return NULL;
}
// Init if needed (native dimension change)
if (_width != static_cast<int>(inputFrame.Width()) ||
_height != static_cast<int>(inputFrame.Height()))
if (_width != inputFrame.width() || _height != inputFrame.height())
{
if (VPM_OK != Initialize(static_cast<int>(inputFrame.Width()),
static_cast<int>(inputFrame.Height())))
if (VPM_OK != Initialize(inputFrame.width(), inputFrame.height()))
{
return NULL;
}
}
_origFrame = inputFrame.Buffer();
// Only interested in the Y plane.
_origFrame = inputFrame.buffer(kYPlane);
// compute spatial metrics: 3 spatial prediction errors
(this->*ComputeSpatialMetrics)();

View File

@ -11,6 +11,7 @@
#ifndef VPM_CONTENT_ANALYSIS_H
#define VPM_CONTENT_ANALYSIS_H
#include "common_video/interface/i420_video_frame.h"
#include "typedefs.h"
#include "module_common_types.h"
#include "video_processing_defines.h"
@ -35,7 +36,8 @@ public:
// Input: new frame
// Return value: pointer to structure containing content Analysis
// metrics or NULL value upon error
VideoContentMetrics* ComputeContentMetrics(const VideoFrame& inputFrame);
VideoContentMetrics* ComputeContentMetrics(const I420VideoFrame&
inputFrame);
// Release all allocated memory
// Output: 0 if OK, negative value upon error

View File

@ -89,7 +89,7 @@ VPMDeflickering::Reset()
}
WebRtc_Word32
VPMDeflickering::ProcessFrame(VideoFrame* frame,
VPMDeflickering::ProcessFrame(I420VideoFrame* frame,
VideoProcessingModule::FrameStats* stats)
{
assert(frame);
@ -103,10 +103,10 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
WebRtc_UWord16 tmpUW16;
WebRtc_UWord32 tmpUW32;
int width = frame->Width();
int height = frame->Height();
int width = frame->width();
int height = frame->height();
if (frame->Buffer() == NULL)
if (frame->IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
@ -114,7 +114,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
}
// Stricter height check due to subsampling size calculation below.
if (width == 0 || height < 2)
if (height < 2)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
@ -128,7 +128,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
return VPM_GENERAL_ERROR;
}
if (PreDetection(frame->TimeStamp(), *stats) == -1)
if (PreDetection(frame->timestamp(), *stats) == -1)
{
return VPM_GENERAL_ERROR;
}
@ -154,7 +154,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
for (int i = 0; i < height; i += kDownsamplingFactor)
{
memcpy(ySorted + sortRowIdx * width,
frame->Buffer() + i * width, width);
frame->buffer(kYPlane) + i * width, width);
sortRowIdx++;
}
@ -258,7 +258,7 @@ VPMDeflickering::ProcessFrame(VideoFrame* frame,
}
// Map to the output frame.
uint8_t* buffer = frame->Buffer();
uint8_t* buffer = frame->buffer(kYPlane);
for (WebRtc_UWord32 i = 0; i < ySize; i++)
{
buffer[i] = mapUW8[buffer[i]];

View File

@ -32,7 +32,7 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(VideoFrame* frame,
WebRtc_Word32 ProcessFrame(I420VideoFrame* frame,
VideoProcessingModule::FrameStats* stats);
private:
WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp,

View File

@ -72,7 +72,7 @@ VPMDenoising::Reset()
}
WebRtc_Word32
VPMDenoising::ProcessFrame(VideoFrame* frame)
VPMDenoising::ProcessFrame(I420VideoFrame* frame)
{
assert(frame);
WebRtc_Word32 thevar;
@ -84,21 +84,15 @@ VPMDenoising::ProcessFrame(VideoFrame* frame)
WebRtc_UWord32 tmp;
WebRtc_Word32 numPixelsChanged = 0;
if (frame->Buffer() == NULL)
if (frame->IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Null frame pointer");
"zero size frame");
return VPM_GENERAL_ERROR;
}
int width = frame->Width();
int height = frame->Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id,
"Invalid frame size");
return VPM_GENERAL_ERROR;
}
int width = frame->width();
int height = frame->height();
/* Size of luminance component */
const WebRtc_UWord32 ysize = height * width;
@ -127,7 +121,7 @@ VPMDenoising::ProcessFrame(VideoFrame* frame)
}
/* Apply de-noising on each pixel, but update variance sub-sampled */
uint8_t* buffer = frame->Buffer();
uint8_t* buffer = frame->buffer(kYPlane);
for (int i = 0; i < height; i++)
{ // Collect over height
k = i * width;

View File

@ -29,7 +29,7 @@ public:
void Reset();
WebRtc_Word32 ProcessFrame(VideoFrame* frame);
WebRtc_Word32 ProcessFrame(I420VideoFrame* frame);
private:
WebRtc_Word32 _id;

View File

@ -32,7 +32,6 @@ VPMFramePreprocessor::~VPMFramePreprocessor()
delete _spatialResampler;
delete _ca;
delete _vd;
_resampledFrame.Free(); // is this needed?
}
WebRtc_Word32
@ -136,10 +135,10 @@ VPMFramePreprocessor::DecimatedHeight() const
WebRtc_Word32
VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame)
VPMFramePreprocessor::PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame)
{
if (frame.Buffer() == NULL || frame.Height() == 0 || frame.Width() == 0)
if (frame.IsZeroSize())
{
return VPM_PARAMETER_ERROR;
}
@ -157,9 +156,9 @@ VPMFramePreprocessor::PreprocessFrame(const VideoFrame& frame,
// Note that we must make a copy of it.
// We are not allowed to resample the input frame.
*processedFrame = NULL;
if (_spatialResampler->ApplyResample(frame.Width(), frame.Height())) {
if (_spatialResampler->ApplyResample(frame.width(), frame.height())) {
WebRtc_Word32 ret = _spatialResampler->ResampleFrame(frame,
_resampledFrame);
&_resampledFrame);
if (ret != VPM_OK)
return ret;
*processedFrame = &_resampledFrame;

View File

@ -62,8 +62,8 @@ public:
WebRtc_UWord32 DecimatedHeight() const;
//Preprocess output:
WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame);
WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame);
VideoContentMetrics* ContentMetrics() const;
private:
@ -74,7 +74,7 @@ private:
WebRtc_Word32 _id;
VideoContentMetrics* _contentMetrics;
WebRtc_UWord32 _maxFrameRate;
VideoFrame _resampledFrame;
I420VideoFrame _resampledFrame;
VPMSpatialResampler* _spatialResampler;
VPMContentAnalysis* _ca;
VPMVideoDecimator* _vd;

View File

@ -62,32 +62,32 @@ VPMSimpleSpatialResampler::Reset()
}
WebRtc_Word32
VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame)
VPMSimpleSpatialResampler::ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame)
{
if (_resamplingMode == kNoRescaling)
return outFrame.CopyFrame(inFrame);
return outFrame->CopyFrame(inFrame);
// Check if re-sampling is needed
if ((inFrame.Width() == (WebRtc_UWord32)_targetWidth) &&
(inFrame.Height() == (WebRtc_UWord32)_targetHeight)) {
return outFrame.CopyFrame(inFrame);
if ((inFrame.width() == _targetWidth) &&
(inFrame.height() == _targetHeight)) {
return outFrame->CopyFrame(inFrame);
}
// Setting scaler
// TODO(mikhal/marpan): Should we allow for setting the filter mode in
// _scale.Set() with |_resamplingMode|?
int retVal = 0;
retVal = _scaler.Set(inFrame.Width(), inFrame.Height(),
retVal = _scaler.Set(inFrame.width(), inFrame.height(),
_targetWidth, _targetHeight, kI420, kI420, kScaleBox);
if (retVal < 0)
return retVal;
// Setting time parameters to the output frame - all the rest will be
// set by the scaler.
outFrame.SetTimeStamp(inFrame.TimeStamp());
outFrame.SetRenderTime(inFrame.RenderTimeMs());
outFrame->set_timestamp(inFrame.timestamp());
outFrame->set_render_time_ms(inFrame.render_time_ms());
retVal = _scaler.Scale(inFrame, &outFrame);
retVal = _scaler.Scale(inFrame, outFrame);
if (retVal == 0)
return VPM_OK;
else

View File

@ -34,8 +34,8 @@ public:
virtual void SetInputFrameResampleMode(VideoFrameResampling
resamplingMode) = 0;
virtual void Reset() = 0;
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame) = 0;
virtual WebRtc_Word32 ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame) = 0;
virtual WebRtc_Word32 TargetWidth() = 0;
virtual WebRtc_Word32 TargetHeight() = 0;
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height) = 0;
@ -50,8 +50,8 @@ public:
WebRtc_Word32 height);
virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
virtual void Reset();
virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
VideoFrame& outFrame);
virtual WebRtc_Word32 ResampleFrame(const I420VideoFrame& inFrame,
I420VideoFrame* outFrame);
virtual WebRtc_Word32 TargetWidth();
virtual WebRtc_Word32 TargetHeight();
virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height);

View File

@ -8,6 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "video_processing_impl.h"
#include "critical_section_wrapper.h"
#include "trace.h"
@ -115,29 +116,22 @@ VideoProcessingModuleImpl::Reset()
WebRtc_Word32
VideoProcessingModule::GetFrameStats(FrameStats* stats,
const VideoFrame& frame)
const I420VideoFrame& frame)
{
if (frame.Buffer() == NULL)
if (frame.IsZeroSize())
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Null frame pointer");
"zero size frame");
return VPM_PARAMETER_ERROR;
}
int width = frame.Width();
int height = frame.Height();
if (width == 0 || height == 0)
{
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
"Invalid frame size");
return VPM_PARAMETER_ERROR;
}
int width = frame.width();
int height = frame.height();
ClearFrameStats(stats); // The histogram needs to be zeroed out.
SetSubSampling(stats, width, height);
uint8_t* buffer = frame.Buffer();
const uint8_t* buffer = frame.buffer(kYPlane);
// Compute histogram and sum of frame
for (int i = 0; i < height; i += (1 << stats->subSamplHeight))
{
@ -182,33 +176,34 @@ VideoProcessingModule::ClearFrameStats(FrameStats* stats)
}
WebRtc_Word32
VideoProcessingModule::ColorEnhancement(VideoFrame* frame)
VideoProcessingModule::ColorEnhancement(I420VideoFrame* frame)
{
return VideoProcessing::ColorEnhancement(frame);
}
WebRtc_Word32
VideoProcessingModule::Brighten(VideoFrame* frame, int delta)
VideoProcessingModule::Brighten(I420VideoFrame* frame, int delta)
{
return VideoProcessing::Brighten(frame, delta);
}
WebRtc_Word32
VideoProcessingModuleImpl::Deflickering(VideoFrame* frame, FrameStats* stats)
VideoProcessingModuleImpl::Deflickering(I420VideoFrame* frame,
FrameStats* stats)
{
CriticalSectionScoped mutex(&_mutex);
return _deflickering.ProcessFrame(frame, stats);
}
WebRtc_Word32
VideoProcessingModuleImpl::Denoising(VideoFrame* frame)
VideoProcessingModuleImpl::Denoising(I420VideoFrame* frame)
{
CriticalSectionScoped mutex(&_mutex);
return _denoising.ProcessFrame(frame);
}
WebRtc_Word32
VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame,
VideoProcessingModuleImpl::BrightnessDetection(const I420VideoFrame& frame,
const FrameStats& stats)
{
CriticalSectionScoped mutex(&_mutex);
@ -273,8 +268,8 @@ VideoProcessingModuleImpl::DecimatedHeight() const
}
WebRtc_Word32
VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame& frame,
VideoFrame **processedFrame)
VideoProcessingModuleImpl::PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame **processedFrame)
{
CriticalSectionScoped mutex(&_mutex);
return _framePreProcessor.PreprocessFrame(frame, processedFrame);

View File

@ -36,12 +36,12 @@ public:
virtual void Reset();
virtual WebRtc_Word32 Deflickering(VideoFrame* frame,
virtual WebRtc_Word32 Deflickering(I420VideoFrame* frame,
FrameStats* stats);
virtual WebRtc_Word32 Denoising(VideoFrame* frame);
virtual WebRtc_Word32 Denoising(I420VideoFrame* frame);
virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
virtual WebRtc_Word32 BrightnessDetection(const I420VideoFrame& frame,
const FrameStats& stats);
//Frame pre-processor functions
@ -72,8 +72,8 @@ public:
// Pre-process incoming frame: Sample when needed and compute content
// metrics when enabled.
// If no resampling takes place - processedFrame is set to NULL.
virtual WebRtc_Word32 PreprocessFrame(const VideoFrame& frame,
VideoFrame** processedFrame);
virtual WebRtc_Word32 PreprocessFrame(const I420VideoFrame& frame,
I420VideoFrame** processedFrame);
virtual VideoContentMetrics* ContentMetrics() const;
private:

View File

@ -18,9 +18,16 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
WebRtc_UWord32 frameNum = 0;
WebRtc_Word32 brightnessWarning = 0;
WebRtc_UWord32 warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
@ -42,15 +49,21 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile);
frameNum = 0;
warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) ==
_frameLength &&
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length &&
frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* frame = _videoFrame.Buffer();
WebRtc_UWord8* frame = _videoFrame.buffer(kYPlane);
WebRtc_UWord32 yTmp = 0;
for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
for (int yIdx = 0; yIdx < _width * _height; yIdx++)
{
yTmp = frame[yIdx] << 1;
if (yTmp > 255)
@ -80,17 +93,23 @@ TEST_F(VideoProcessingModuleTest, BrightnessDetection)
rewind(_sourceFile);
frameNum = 0;
warningCount = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
frameNum < 300)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length && frameNum < 300)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* frame = _videoFrame.Buffer();
WebRtc_UWord8* y_plane = _videoFrame.buffer(kYPlane);
WebRtc_Word32 yTmp = 0;
for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
for (int yIdx = 0; yIdx < _width * _height; yIdx++)
{
yTmp = frame[yIdx] >> 1;
frame[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
yTmp = y_plane[yIdx] >> 1;
y_plane[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
}
VideoProcessingModule::FrameStats stats;

View File

@ -39,15 +39,22 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n";
WebRtc_UWord32 frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
t0 = TickTime::Now();
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&_videoFrame));
t1 = TickTime::Now();
accTicks += t1 - t0;
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
modFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, modFile) < 0) {
return;
}
}
@ -76,44 +83,70 @@ TEST_F(VideoProcessingModuleTest, ColorEnhancement)
rewind(modFile);
ASSERT_EQ(refLen, testLen) << "File lengths differ.";
VideoFrame refVideoFrame;
refVideoFrame.VerifyAndAllocate(_frameLength);
refVideoFrame.SetWidth(_width);
refVideoFrame.SetHeight(_height);
I420VideoFrame refVideoFrame;
// Compare frame-by-frame.
while (fread(_videoFrame.Buffer(), 1, _frameLength, modFile) == _frameLength)
scoped_array<uint8_t> ref_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, modFile) ==
_frame_length)
{
ASSERT_EQ(_frameLength, fread(refVideoFrame.Buffer(), 1, _frameLength, refFile));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), refVideoFrame.Buffer(), _frameLength));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(_frame_length, fread(ref_buffer.get(), 1, _frame_length,
refFile));
refVideoFrame.CreateFrame(_size_y, ref_buffer.get(),
_size_uv, ref_buffer.get() + _size_y,
_size_uv, ref_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kYPlane),
refVideoFrame.buffer(kYPlane),
_size_y));
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kUPlane),
refVideoFrame.buffer(kUPlane),
_size_uv));
EXPECT_EQ(0, memcmp(_videoFrame.buffer(kVPlane),
refVideoFrame.buffer(kVPlane),
_size_uv));
}
ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
// Verify that all color pixels are enhanced, and no luminance values are
// altered.
WebRtc_UWord8 *testFrame = new WebRtc_UWord8[_frameLength];
scoped_array<uint8_t> testFrame(new WebRtc_UWord8[_frame_length]);
// Use value 128 as probe value, since we know that this will be changed
// in the enhancement.
memset(testFrame, 128, _frameLength);
memset(testFrame.get(), 128, _frame_length);
I420VideoFrame testVideoFrame;
testVideoFrame.CreateFrame(_size_y, testFrame.get(),
_size_uv, testFrame.get() + _size_y,
_size_uv, testFrame.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
VideoFrame testVideoFrame;
testVideoFrame.CopyFrame(_frameLength, testFrame);
testVideoFrame.SetWidth(_width);
testVideoFrame.SetHeight(_height);
ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testVideoFrame));
EXPECT_EQ(0, memcmp(testVideoFrame.Buffer(), testFrame, _width * _height))
EXPECT_EQ(0, memcmp(testVideoFrame.buffer(kYPlane), testFrame.get(),
_size_y))
<< "Function is modifying the luminance.";
EXPECT_NE(0, memcmp(testVideoFrame.Buffer() + _width * _height,
&testFrame[_width * _height], _width * _height / 2)) <<
"Function is not modifying all chrominance pixels";
EXPECT_NE(0, memcmp(testVideoFrame.buffer(kUPlane),
testFrame.get() + _size_y, _size_uv)) <<
"Function is not modifying all chrominance pixels";
EXPECT_NE(0, memcmp(testVideoFrame.buffer(kVPlane),
testFrame.get() + _size_y + _size_uv, _size_uv)) <<
"Function is not modifying all chrominance pixels";
ASSERT_EQ(0, fclose(refFile));
ASSERT_EQ(0, fclose(modFile));
delete [] testFrame;
}
} // namespace webrtc

View File

@ -23,9 +23,16 @@ TEST_F(VideoProcessingModuleTest, ContentAnalysis)
_ca_c.Initialize(_width,_height);
_ca_sse.Initialize(_width,_height);
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
== _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile)
== _frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
_cM_c = _ca_c.ComputeContentMetrics(_videoFrame);
_cM_SSE = _ca_sse.ComputeContentMetrics(_videoFrame);

View File

@ -11,6 +11,7 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
@ -42,6 +43,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
"Could not open output file: " << output_file << "\n";
printf("\nRun time [us / frame]:\n");
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
{
TickTime t0;
@ -50,10 +52,17 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
WebRtc_UWord32 timeStamp = 1;
frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
frameNum++;
_videoFrame.SetTimeStamp(timeStamp);
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y +
_size_uv,
_width, _height,
_width, _half_width, _half_width);
_videoFrame.set_timestamp(timeStamp);
t0 = TickTime::Now();
VideoProcessingModule::FrameStats stats;
@ -64,8 +73,7 @@ TEST_F(VideoProcessingModuleTest, Deflickering)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
deflickerFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, deflickerFile) < 0) {
return;
}
}

View File

@ -11,6 +11,7 @@
#include <cstdio>
#include <cstdlib>
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_processing/main/interface/video_processing.h"
#include "modules/video_processing/main/test/unit_test/unit_test.h"
#include "system_wrappers/interface/tick_util.h"
@ -47,21 +48,27 @@ TEST_F(VideoProcessingModuleTest, Denoising)
WebRtc_Word32 modifiedPixels = 0;
frameNum = 0;
while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
while (fread(video_buffer.get(), 1, _frame_length, _sourceFile) ==
_frame_length)
{
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv,
video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
frameNum++;
WebRtc_UWord8* sourceBuffer = _videoFrame.Buffer();
WebRtc_UWord8* sourceBuffer = _videoFrame.buffer(kYPlane);
// Add noise to a part in video stream
// Random noise
// TODO: investigate the effectiveness of this test.
//for(WebRtc_UWord32 ir = 0; ir < _frameLength; ir++)
// sourceBuffer[ir] = 128
for (WebRtc_UWord32 ir = 0; ir < _height; ir++)
for (int ir = 0; ir < _height; ir++)
{
WebRtc_UWord32 ik = ir * _width;
for (WebRtc_UWord32 ic = 0; ic < _width; ic++)
for (int ic = 0; ic < _width; ic++)
{
WebRtc_UWord8 r = rand() % 16;
r -= 8;
@ -92,8 +99,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
noiseFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) {
return;
}
}
@ -105,8 +111,7 @@ TEST_F(VideoProcessingModuleTest, Denoising)
if (runIdx == 0)
{
if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
denoiseFile) != _frameLength) {
if (PrintI420VideoFrame(_videoFrame, noiseFile) < 0) {
return;
}
}

View File

@ -23,16 +23,21 @@ namespace webrtc {
// quality when the resampled frame is scaled back up/down to the
// original/source size. |expected_psnr| is set to be ~0.1/0.05dB lower than
// actual PSNR verified under the same conditions.
void TestSize(const VideoFrame& sourceFrame, int target_width,
void TestSize(const I420VideoFrame& sourceFrame, int target_width,
int target_height, int mode, double expected_psnr,
VideoProcessingModule* vpm);
bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2);
VideoProcessingModuleTest::VideoProcessingModuleTest() :
_vpm(NULL),
_sourceFile(NULL),
_width(352),
_half_width(_width / 2),
_height(288),
_frameLength(CalcBufferSize(kI420, 352, 288))
_size_y(_width * _height),
_size_uv(_half_width * _height /2),
_frame_length(CalcBufferSize(kI420, _width, _height))
{
}
@ -41,9 +46,8 @@ void VideoProcessingModuleTest::SetUp()
_vpm = VideoProcessingModule::Create(0);
ASSERT_TRUE(_vpm != NULL);
ASSERT_EQ(0, _videoFrame.VerifyAndAllocate(_frameLength));
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(_height);
ASSERT_EQ(0, _videoFrame.CreateEmptyFrame(_width, _height, _width,
_half_width, _half_width));
const std::string video_file =
webrtc::test::ResourcePath("foreman_cif", "yuv");
@ -70,9 +74,9 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Video frame with unallocated buffer.
VideoFrame videoFrame;
videoFrame.SetWidth(_width);
videoFrame.SetHeight(_height);
I420VideoFrame videoFrame;
videoFrame.set_width(_width);
videoFrame.set_height(_height);
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, videoFrame));
@ -88,12 +92,15 @@ TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
TEST_F(VideoProcessingModuleTest, HandleBadStats)
{
VideoProcessingModule::FrameStats stats;
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(_height);
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
@ -104,21 +111,9 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize)
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
// Bad width
_videoFrame.SetWidth(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
EXPECT_EQ(-1, _vpm->Deflickering(&_videoFrame, &stats));
EXPECT_EQ(-1, _vpm->Denoising(&_videoFrame));
EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
// Bad height
_videoFrame.SetWidth(_width);
_videoFrame.SetHeight(0);
_videoFrame.ResetSize();
_videoFrame.set_width(_width);
_videoFrame.set_height(0);
EXPECT_EQ(-3, _vpm->GetFrameStats(&stats, _videoFrame));
EXPECT_EQ(-1, _vpm->ColorEnhancement(&_videoFrame));
@ -132,58 +127,73 @@ TEST_F(VideoProcessingModuleTest, HandleBadSize)
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0));
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0));
VideoFrame *outFrame = NULL;
I420VideoFrame *outFrame = NULL;
EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(_videoFrame,
&outFrame));
}
TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
{
VideoFrame videoFrame2;
I420VideoFrame videoFrame2;
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(0, videoFrame2.VerifyAndAllocate(_frameLength));
videoFrame2.SetWidth(_width);
videoFrame2.SetHeight(_height);
// Only testing non-static functions here.
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
ASSERT_EQ(0, _videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width));
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(0, videoFrame2.CopyFrame(_videoFrame));
ASSERT_EQ(0, _vpm->Deflickering(&_videoFrame, &stats));
_vpm->Reset();
// Retrieve frame stats again in case Deflickering() has zeroed them.
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, videoFrame2));
ASSERT_EQ(0, _vpm->Deflickering(&videoFrame2, &stats));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
videoFrame2.CopyFrame(_videoFrame);
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
ASSERT_GE(_vpm->Denoising(&_videoFrame), 0);
_vpm->Reset();
ASSERT_GE(_vpm->Denoising(&videoFrame2), 0);
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
ASSERT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
videoFrame2.CopyFrame(_videoFrame);
ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
_vpm->Reset();
ASSERT_EQ(0, _vpm->BrightnessDetection(videoFrame2, stats));
EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
_frameLength));
EXPECT_TRUE(CompareFrames(_videoFrame, videoFrame2));
}
TEST_F(VideoProcessingModuleTest, FrameStats)
{
VideoProcessingModule::FrameStats stats;
ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
_sourceFile));
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
_videoFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
EXPECT_FALSE(_vpm->ValidFrameStats(stats));
EXPECT_EQ(0, _vpm->GetFrameStats(&stats, _videoFrame));
@ -214,7 +224,7 @@ TEST_F(VideoProcessingModuleTest, PreprocessorLogic)
// Disable spatial sampling
_vpm->SetInputFrameResampleMode(kNoRescaling);
ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
VideoFrame *outFrame = NULL;
I420VideoFrame *outFrame = NULL;
ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(_videoFrame, &outFrame));
// No rescaling=> output frame = NULL
ASSERT_TRUE(outFrame == NULL);
@ -230,9 +240,6 @@ TEST_F(VideoProcessingModuleTest, Resampler)
TickTime t0;
TickTime t1;
TickInterval accTicks;
WebRtc_Word32 height = 288;
WebRtc_Word32 width = 352;
WebRtc_Word32 lengthSourceFrame = width*height*3/2;
rewind(_sourceFile);
ASSERT_TRUE(_sourceFile != NULL) <<
@ -244,12 +251,15 @@ TEST_F(VideoProcessingModuleTest, Resampler)
_vpm->EnableTemporalDecimation(false);
// Reading test frame
VideoFrame sourceFrame;
ASSERT_EQ(0, sourceFrame.VerifyAndAllocate(lengthSourceFrame));
EXPECT_GT(fread(sourceFrame.Buffer(), 1, lengthSourceFrame, _sourceFile), 0u);
ASSERT_EQ(0, sourceFrame.SetLength(lengthSourceFrame));
sourceFrame.SetHeight(height);
sourceFrame.SetWidth(width);
I420VideoFrame sourceFrame;
scoped_array<uint8_t> video_buffer(new uint8_t[_frame_length]);
ASSERT_EQ(_frame_length, fread(video_buffer.get(), 1, _frame_length,
_sourceFile));
sourceFrame.CreateFrame(_size_y, video_buffer.get(),
_size_uv, video_buffer.get() + _size_y,
_size_uv, video_buffer.get() + _size_y + _size_uv,
_width, _height,
_width, _half_width, _half_width);
for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
{
@ -282,8 +292,6 @@ TEST_F(VideoProcessingModuleTest, Resampler)
avgRuntime += accTicks.Microseconds();
}
sourceFrame.Free();
printf("\nAverage run time = %d us / frame\n",
//static_cast<int>(avgRuntime / frameNum / NumRuns));
static_cast<int>(avgRuntime));
@ -292,12 +300,12 @@ TEST_F(VideoProcessingModuleTest, Resampler)
static_cast<int>(minRuntime));
}
void TestSize(const VideoFrame& source_frame, int target_width,
void TestSize(const I420VideoFrame& source_frame, int target_width,
int target_height, int mode, double expected_psnr,
VideoProcessingModule* vpm) {
int source_width = source_frame.Width();
int source_height = source_frame.Height();
VideoFrame* out_frame = NULL;
int source_width = source_frame.width();
int source_height = source_frame.height();
I420VideoFrame* out_frame = NULL;
ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(source_frame, &out_frame));
@ -309,12 +317,6 @@ void TestSize(const VideoFrame& source_frame, int target_width,
// (3) write out the processed frame for viewing.
if (target_width != static_cast<int>(source_width) ||
target_height != static_cast<int>(source_height)) {
int target_half_width = (target_width + 1) >> 1;
int target_half_height = (target_height + 1) >> 1;
int required_size_resampled = target_width * target_height +
2 * (target_half_width * target_half_height);
ASSERT_EQ(required_size_resampled, static_cast<int>(out_frame->Length()));
// Write the processed frame to file for visual inspection.
std::ostringstream filename;
filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
@ -323,15 +325,14 @@ void TestSize(const VideoFrame& source_frame, int target_width,
std::cout << "Watch " << filename.str() << " and verify that it is okay."
<< std::endl;
FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
if (fwrite(out_frame->Buffer(), 1,
out_frame->Length(), stand_alone_file) != out_frame->Length()) {
if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
fprintf(stderr, "Failed to write frame for scaling to width/height: "
" %d %d \n", target_width, target_height);
return;
}
fclose(stand_alone_file);
VideoFrame resampled_source_frame;
I420VideoFrame resampled_source_frame;
resampled_source_frame.CopyFrame(*out_frame);
// Scale |resampled_source_frame| back to original/source size.
@ -349,24 +350,36 @@ void TestSize(const VideoFrame& source_frame, int target_width,
std::cout << "Watch " << filename2.str() << " and verify that it is okay."
<< std::endl;
stand_alone_file = fopen(filename2.str().c_str(), "wb");
if (fwrite(out_frame->Buffer(), 1,
out_frame->Length(), stand_alone_file) != out_frame->Length()) {
if (PrintI420VideoFrame(*out_frame, stand_alone_file) < 0) {
fprintf(stderr, "Failed to write frame for scaling to width/height "
"%d %d \n", source_width, source_height);
"%d %d \n", source_width, source_height);
return;
}
fclose(stand_alone_file);
// Compute the PSNR and check expectation.
double psnr = I420PSNR(source_frame.Buffer(), out_frame->Buffer(),
source_width, source_height);
double psnr = I420PSNR(&source_frame, out_frame);
EXPECT_GT(psnr, expected_psnr);
printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
"source which is scaled down/up to: %d %d, and back to source size \n",
psnr, source_width, source_height, target_width, target_height);
resampled_source_frame.Free();
}
}
bool CompareFrames(const webrtc::I420VideoFrame& frame1,
const webrtc::I420VideoFrame& frame2) {
for (int plane = 0; plane < webrtc::kNumOfPlanes; plane ++) {
webrtc::PlaneType plane_type = static_cast<webrtc::PlaneType>(plane);
int allocated_size1 = frame1.allocated_size(plane_type);
int allocated_size2 = frame2.allocated_size(plane_type);
if (allocated_size1 != allocated_size2)
return false;
const uint8_t* plane_buffer1 = frame1.buffer(plane_type);
const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
if (memcmp(plane_buffer1, plane_buffer2, allocated_size1))
return false;
}
return true;
}
} // namespace webrtc

View File

@ -36,10 +36,13 @@ protected:
}
VideoProcessingModule* _vpm;
FILE* _sourceFile;
VideoFrame _videoFrame;
const WebRtc_UWord32 _width;
const WebRtc_UWord32 _height;
const WebRtc_UWord32 _frameLength;
I420VideoFrame _videoFrame;
const int _width;
const int _half_width;
const int _height;
const int _size_y;
const int _size_uv;
const unsigned int _frame_length;
};
} // namespace webrtc