Refactor scaling.

Introduce a new method I420Buffer::CropAndScale, and a static
convenience helper I420Buffer::CenterCropAndScale. Use them for almost
all scaling needs.

Delete the Scaler class and the cricket::VideoFrame::Stretch* methods.

BUG=webrtc:5682
R=pbos@webrtc.org, perkj@webrtc.org, stefan@webrtc.org

Review URL: https://codereview.webrtc.org/2020593002 .

Cr-Commit-Position: refs/heads/master@{#13110}
This commit is contained in:
Niels Möller
2016-06-13 13:06:01 +02:00
parent be99ab9356
commit 718a763d59
33 changed files with 396 additions and 1030 deletions

View File

@ -670,7 +670,7 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
VideoFrame input_frame = frame; VideoFrame input_frame = frame;
if (scale_) { if (scale_) {
// Check framerate before spatial resolution change. // Check framerate before spatial resolution change.
quality_scaler_.OnEncodeFrame(frame); quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
const webrtc::QualityScaler::Resolution scaled_resolution = const webrtc::QualityScaler::Resolution scaled_resolution =
quality_scaler_.GetScaledResolution(); quality_scaler_.GetScaledResolution();
if (scaled_resolution.width != frame.width() || if (scaled_resolution.width != frame.width() ||
@ -684,7 +684,8 @@ int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
webrtc::kVideoRotation_0)); webrtc::kVideoRotation_0));
input_frame.set_video_frame_buffer(scaled_buffer); input_frame.set_video_frame_buffer(scaled_buffer);
} else { } else {
input_frame = quality_scaler_.GetScaledFrame(frame); input_frame.set_video_frame_buffer(
quality_scaler_.GetScaledBuffer(frame.video_frame_buffer()));
} }
} }
} }

View File

@ -13,7 +13,6 @@
#include "webrtc/api/java/jni/native_handle_impl.h" #include "webrtc/api/java/jni/native_handle_impl.h"
#include "webrtc/api/java/jni/surfacetexturehelper_jni.h" #include "webrtc/api/java/jni/surfacetexturehelper_jni.h"
#include "third_party/libyuv/include/libyuv/convert.h" #include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/scale.h"
#include "webrtc/base/bind.h" #include "webrtc/base/bind.h"
namespace webrtc_jni { namespace webrtc_jni {
@ -223,25 +222,11 @@ void AndroidVideoCapturerJni::OnMemoryBufferFrame(void* video_frame,
crop_width, crop_height, static_cast<libyuv::RotationMode>( crop_width, crop_height, static_cast<libyuv::RotationMode>(
capturer_->apply_rotation() ? rotation : 0)); capturer_->apply_rotation() ? rotation : 0));
if (adapted_width != rotated_width || adapted_height != rotated_height) { if (adapted_width != buffer->width() || adapted_height != buffer->height()) {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled = rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
post_scale_pool_.CreateBuffer(adapted_width, adapted_height); post_scale_pool_.CreateBuffer(adapted_width, adapted_height));
// TODO(nisse): This should be done by some Scale method in scaled_buffer->ScaleFrom(buffer);
// I420Buffer, but we can't do that right now, since buffer = scaled_buffer;
// I420BufferPool uses a wrapper object.
if (libyuv::I420Scale(buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),
rotated_width, rotated_height,
scaled->MutableDataY(), scaled->StrideY(),
scaled->MutableDataU(), scaled->StrideU(),
scaled->MutableDataV(), scaled->StrideV(),
adapted_width, adapted_height,
libyuv::kFilterBox) < 0) {
LOG(LS_WARNING) << "I420Scale failed";
return;
}
buffer = scaled;
} }
// TODO(nisse): Use microsecond time instead. // TODO(nisse): Use microsecond time instead.
capturer_->OnFrame(cricket::WebRtcVideoFrame( capturer_->OnFrame(cricket::WebRtcVideoFrame(

View File

@ -34,9 +34,7 @@ source_set("common_video") {
"include/incoming_video_stream.h", "include/incoming_video_stream.h",
"include/video_frame_buffer.h", "include/video_frame_buffer.h",
"incoming_video_stream.cc", "incoming_video_stream.cc",
"libyuv/include/scaler.h",
"libyuv/include/webrtc_libyuv.h", "libyuv/include/webrtc_libyuv.h",
"libyuv/scaler.cc",
"libyuv/webrtc_libyuv.cc", "libyuv/webrtc_libyuv.cc",
"video_frame.cc", "video_frame.cc",
"video_frame_buffer.cc", "video_frame_buffer.cc",
@ -94,7 +92,6 @@ if (rtc_include_tests) {
"i420_buffer_pool_unittest.cc", "i420_buffer_pool_unittest.cc",
"i420_video_frame_unittest.cc", "i420_video_frame_unittest.cc",
"libyuv/libyuv_unittest.cc", "libyuv/libyuv_unittest.cc",
"libyuv/scaler_unittest.cc",
] ]
configs += [ "..:common_config" ] configs += [ "..:common_config" ]

View File

@ -69,9 +69,7 @@
'include/i420_buffer_pool.h', 'include/i420_buffer_pool.h',
'include/incoming_video_stream.h', 'include/incoming_video_stream.h',
'include/video_frame_buffer.h', 'include/video_frame_buffer.h',
'libyuv/include/scaler.h',
'libyuv/include/webrtc_libyuv.h', 'libyuv/include/webrtc_libyuv.h',
'libyuv/scaler.cc',
'libyuv/webrtc_libyuv.cc', 'libyuv/webrtc_libyuv.cc',
'video_frame_buffer.cc', 'video_frame_buffer.cc',
'video_render_frames.cc', 'video_render_frames.cc',

View File

@ -27,7 +27,6 @@
'i420_buffer_pool_unittest.cc', 'i420_buffer_pool_unittest.cc',
'i420_video_frame_unittest.cc', 'i420_video_frame_unittest.cc',
'libyuv/libyuv_unittest.cc', 'libyuv/libyuv_unittest.cc',
'libyuv/scaler_unittest.cc',
], ],
# Disable warnings to enable Win64 build, issue 1323. # Disable warnings to enable Win64 build, issue 1323.
'msvs_disabled_warnings': [ 'msvs_disabled_warnings': [

View File

@ -19,7 +19,70 @@
namespace webrtc { namespace webrtc {
int ExpectedSize(int plane_stride, int image_height, PlaneType type); namespace {
int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
if (type == kYPlane)
return plane_stride * image_height;
return plane_stride * ((image_height + 1) / 2);
}
rtc::scoped_refptr<I420Buffer> CreateGradient(int width, int height) {
rtc::scoped_refptr<I420Buffer> buffer(
new rtc::RefCountedObject<I420Buffer>(width, height));
// Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h
for (int x = 0; x < width; x++) {
for (int y = 0; y < height; y++) {
buffer->MutableDataY()[x + y * width] =
128 * (x * height + y * width) / (width * height);
}
}
int chroma_width = (width + 1) / 2;
int chroma_height = (height + 1) / 2;
for (int x = 0; x < chroma_width; x++) {
for (int y = 0; y < chroma_height; y++) {
buffer->MutableDataU()[x + y * chroma_width] =
255 * x / (chroma_width - 1);
buffer->MutableDataV()[x + y * chroma_width] =
255 * y / (chroma_height - 1);
}
}
return buffer;
}
// The offsets and sizes describe the rectangle extracted from the
// original (gradient) frame, in relative coordinates where the
// original frame correspond to the unit square, 0.0 <= x, y < 1.0.
void CheckCrop(webrtc::VideoFrameBuffer* frame,
double offset_x,
double offset_y,
double rel_width,
double rel_height) {
int width = frame->width();
int height = frame->height();
// Check that pixel values in the corners match the gradient used
// for initialization.
for (int i = 0; i < 2; i++) {
for (int j = 0; j < 2; j++) {
// Pixel coordinates of the corner.
int x = i * (width - 1);
int y = j * (height - 1);
// Relative coordinates, range 0.0 - 1.0 correspond to the
// size of the uncropped input frame.
double orig_x = offset_x + i * rel_width;
double orig_y = offset_y + j * rel_height;
EXPECT_NEAR(frame->DataY()[x + y * frame->StrideY()] / 256.0,
(orig_x + orig_y) / 2, 0.02);
EXPECT_NEAR(frame->DataU()[x / 2 + (y / 2) * frame->StrideU()] / 256.0,
orig_x, 0.02);
EXPECT_NEAR(frame->DataV()[x / 2 + (y / 2) * frame->StrideV()] / 256.0,
orig_y, 0.02);
}
}
}
} // namespace
TEST(TestVideoFrame, InitialValues) { TEST(TestVideoFrame, InitialValues) {
VideoFrame frame; VideoFrame frame;
@ -241,4 +304,70 @@ TEST(TestI420FrameBuffer, Copy) {
EXPECT_TRUE(test::FrameBufsEqual(buf1, buf2)); EXPECT_TRUE(test::FrameBufsEqual(buf1, buf2));
} }
TEST(TestI420FrameBuffer, Scale) {
rtc::scoped_refptr<I420Buffer> buf = CreateGradient(200, 100);
// Pure scaling, no cropping.
rtc::scoped_refptr<I420Buffer> scaled_buffer(
new rtc::RefCountedObject<I420Buffer>(150, 75));
scaled_buffer->ScaleFrom(buf);
CheckCrop(scaled_buffer, 0.0, 0.0, 1.0, 1.0);
}
TEST(TestI420FrameBuffer, CropXCenter) {
rtc::scoped_refptr<I420Buffer> buf = CreateGradient(200, 100);
// Pure center cropping, no scaling.
rtc::scoped_refptr<I420Buffer> scaled_buffer(
new rtc::RefCountedObject<I420Buffer>(100, 100));
scaled_buffer->CropAndScaleFrom(buf, 50, 0, 100, 100);
CheckCrop(scaled_buffer, 0.25, 0.0, 0.5, 1.0);
}
TEST(TestI420FrameBuffer, CropXNotCenter) {
rtc::scoped_refptr<I420Buffer> buf = CreateGradient(200, 100);
// Non-center cropping, no scaling.
rtc::scoped_refptr<I420Buffer> scaled_buffer(
new rtc::RefCountedObject<I420Buffer>(100, 100));
scaled_buffer->CropAndScaleFrom(buf, 25, 0, 100, 100);
CheckCrop(scaled_buffer, 0.125, 0.0, 0.5, 1.0);
}
TEST(TestI420FrameBuffer, CropYCenter) {
rtc::scoped_refptr<I420Buffer> buf = CreateGradient(100, 200);
// Pure center cropping, no scaling.
rtc::scoped_refptr<I420Buffer> scaled_buffer(
new rtc::RefCountedObject<I420Buffer>(100, 100));
scaled_buffer->CropAndScaleFrom(buf, 0, 50, 100, 100);
CheckCrop(scaled_buffer, 0.0, 0.25, 1.0, 0.5);
}
TEST(TestI420FrameBuffer, CropYNotCenter) {
rtc::scoped_refptr<I420Buffer> buf = CreateGradient(100, 200);
// Non-center cropping, no scaling.
rtc::scoped_refptr<I420Buffer> scaled_buffer(
new rtc::RefCountedObject<I420Buffer>(100, 100));
scaled_buffer->CropAndScaleFrom(buf, 0, 25, 100, 100);
CheckCrop(scaled_buffer, 0.0, 0.125, 1.0, 0.5);
}
TEST(TestI420FrameBuffer, CropAndScale16x9) {
rtc::scoped_refptr<I420Buffer> buf = CreateGradient(640, 480);
// Center crop to 640 x 360 (16/9 aspect), then scale down by 2.
rtc::scoped_refptr<I420Buffer> scaled_buffer(
new rtc::RefCountedObject<I420Buffer>(320, 180));
scaled_buffer->CropAndScaleFrom(buf);
CheckCrop(scaled_buffer, 0.0, 0.125, 1.0, 0.75);
}
} // namespace webrtc } // namespace webrtc

View File

@ -119,6 +119,21 @@ class I420Buffer : public VideoFrameBuffer {
static rtc::scoped_refptr<I420Buffer> Copy( static rtc::scoped_refptr<I420Buffer> Copy(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer); const rtc::scoped_refptr<VideoFrameBuffer>& buffer);
// Scale the cropped area of |src| to the size of |this| buffer, and
// write the result into |this|.
void CropAndScaleFrom(const rtc::scoped_refptr<VideoFrameBuffer>& src,
int offset_x,
int offset_y,
int crop_width,
int crop_height);
// The common case of a center crop, when needed to adjust the
// aspect ratio without distorting the image.
void CropAndScaleFrom(const rtc::scoped_refptr<VideoFrameBuffer>& src);
// Scale all of |src| to the size of |this| buffer, with no cropping.
void ScaleFrom(const rtc::scoped_refptr<VideoFrameBuffer>& src);
protected: protected:
~I420Buffer() override; ~I420Buffer() override;
@ -196,13 +211,6 @@ class WrappedI420Buffer : public webrtc::VideoFrameBuffer {
rtc::Callback0<void> no_longer_used_cb_; rtc::Callback0<void> no_longer_used_cb_;
}; };
// Helper function to crop |buffer| without making a deep copy. May only be used
// for non-native frames.
rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
int cropped_width,
int cropped_height);
} // namespace webrtc } // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_ #endif // WEBRTC_COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_

View File

@ -1,71 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* Interface to the LibYuv scaling functionality
*/
#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
#include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/typedefs.h"
#include "webrtc/video_frame.h"
namespace webrtc {
// Supported scaling types
// Note: Must have the same values as libyuv::FilterMode.
enum ScaleMethod {
kScalePoint, // no interpolation
kFilterLinear,
kScaleBilinear,
kScaleBox
};
class Scaler {
public:
Scaler();
~Scaler();
// Set interpolation properties:
//
// Return value: 0 - OK
// -1 - parameter error
int Set(int src_width, int src_height,
int dst_width, int dst_height,
VideoType src_video_type, VideoType dst_video_type,
ScaleMethod method);
// Scale frame
// Memory is allocated by this object and recycled using |buffer_pool_|.
// Return value: 0 - OK,
// -1 - parameter error
// -2 - scaler not set
int Scale(const VideoFrame& src_frame, VideoFrame* dst_frame);
private:
// Determine if the VideoTypes are currently supported.
bool SupportedVideoType(VideoType src_video_type,
VideoType dst_video_type);
ScaleMethod method_;
int src_width_;
int src_height_;
int dst_width_;
int dst_height_;
bool set_;
I420BufferPool buffer_pool_;
};
} // namespace webrtc
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_

View File

@ -69,13 +69,17 @@ size_t CalcBufferSize(VideoType type, int width, int height);
// Return value: 0 if OK, < 0 otherwise. // Return value: 0 if OK, < 0 otherwise.
int PrintVideoFrame(const VideoFrame& frame, FILE* file); int PrintVideoFrame(const VideoFrame& frame, FILE* file);
// Extract buffer from VideoFrame (consecutive planes, no stride) // Extract buffer from VideoFrame or VideoFrameBuffer (consecutive
// planes, no stride)
// Input: // Input:
// - frame : Reference to video frame. // - frame : Reference to video frame.
// - size : pointer to the size of the allocated buffer. If size is // - size : pointer to the size of the allocated buffer. If size is
// insufficient, an error will be returned. // insufficient, an error will be returned.
// - buffer : Pointer to buffer // - buffer : Pointer to buffer
// Return value: length of buffer if OK, < 0 otherwise. // Return value: length of buffer if OK, < 0 otherwise.
int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame,
size_t size,
uint8_t* buffer);
int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer); int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer);
// Convert To I420 // Convert To I420
// Input: // Input:

View File

@ -1,116 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "webrtc/common_video/libyuv/include/scaler.h"
#include <algorithm>
// NOTE(ajm): Path provided by gyp.
#include "libyuv.h" // NOLINT
namespace webrtc {
Scaler::Scaler()
: method_(kScaleBox),
src_width_(0),
src_height_(0),
dst_width_(0),
dst_height_(0),
set_(false) {}
Scaler::~Scaler() {}
int Scaler::Set(int src_width, int src_height,
int dst_width, int dst_height,
VideoType src_video_type, VideoType dst_video_type,
ScaleMethod method) {
set_ = false;
if (src_width < 1 || src_height < 1 || dst_width < 1 || dst_height < 1)
return -1;
if (!SupportedVideoType(src_video_type, dst_video_type))
return -1;
src_width_ = src_width;
src_height_ = src_height;
dst_width_ = dst_width;
dst_height_ = dst_height;
method_ = method;
set_ = true;
return 0;
}
// TODO(nisse): Should work with VideoFrameBuffer instead.
int Scaler::Scale(const VideoFrame& src_frame, VideoFrame* dst_frame) {
assert(dst_frame);
if (src_frame.IsZeroSize())
return -1;
if (!set_)
return -2;
// Making sure that destination frame is of sufficient size.
dst_frame->set_video_frame_buffer(
buffer_pool_.CreateBuffer(dst_width_, dst_height_));
// We want to preserve aspect ratio instead of stretching the frame.
// Therefore, we need to crop the source frame. Calculate the largest center
// aligned region of the source frame that can be used.
const int cropped_src_width =
std::min(src_width_, dst_width_ * src_height_ / dst_height_);
const int cropped_src_height =
std::min(src_height_, dst_height_ * src_width_ / dst_width_);
// Make sure the offsets are even to avoid rounding errors for the U/V planes.
const int src_offset_x = ((src_width_ - cropped_src_width) / 2) & ~1;
const int src_offset_y = ((src_height_ - cropped_src_height) / 2) & ~1;
const uint8_t* y_ptr =
src_frame.video_frame_buffer()->DataY() +
src_offset_y * src_frame.video_frame_buffer()->StrideY() +
src_offset_x;
const uint8_t* u_ptr =
src_frame.video_frame_buffer()->DataU() +
src_offset_y / 2 * src_frame.video_frame_buffer()->StrideU() +
src_offset_x / 2;
const uint8_t* v_ptr =
src_frame.video_frame_buffer()->DataV() +
src_offset_y / 2 * src_frame.video_frame_buffer()->StrideV() +
src_offset_x / 2;
return libyuv::I420Scale(
y_ptr,
src_frame.video_frame_buffer()->StrideY(),
u_ptr,
src_frame.video_frame_buffer()->StrideU(),
v_ptr,
src_frame.video_frame_buffer()->StrideV(),
cropped_src_width, cropped_src_height,
dst_frame->video_frame_buffer()->MutableDataY(),
dst_frame->video_frame_buffer()->StrideY(),
dst_frame->video_frame_buffer()->MutableDataU(),
dst_frame->video_frame_buffer()->StrideU(),
dst_frame->video_frame_buffer()->MutableDataV(),
dst_frame->video_frame_buffer()->StrideV(),
dst_width_, dst_height_,
libyuv::FilterMode(method_));
}
bool Scaler::SupportedVideoType(VideoType src_video_type,
VideoType dst_video_type) {
if (src_video_type != dst_video_type)
return false;
if ((src_video_type == kI420) || (src_video_type == kIYUV) ||
(src_video_type == kYV12))
return true;
return false;
}
} // namespace webrtc

View File

@ -1,399 +0,0 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <math.h>
#include <string.h>
#include <memory>
#include "testing/gtest/include/gtest/gtest.h"
#include "webrtc/base/timeutils.h"
#include "webrtc/common_video/libyuv/include/scaler.h"
#include "webrtc/test/testsupport/fileutils.h"
namespace webrtc {
class TestScaler : public ::testing::Test {
protected:
TestScaler();
virtual void SetUp();
virtual void TearDown();
void ScaleSequence(ScaleMethod method,
FILE* source_file, std::string out_name,
int src_width, int src_height,
int dst_width, int dst_height);
// Computes the sequence average PSNR between an input sequence in
// |input_file| and an output sequence with filename |out_name|. |width| and
// |height| are the frame sizes of both sequences.
double ComputeAvgSequencePSNR(FILE* input_file, std::string out_name,
int width, int height);
Scaler test_scaler_;
FILE* source_file_;
VideoFrame test_frame_;
const int width_;
const int half_width_;
const int height_;
const int half_height_;
const int size_y_;
const int size_uv_;
const size_t frame_length_;
};
TestScaler::TestScaler()
: source_file_(NULL),
width_(352),
half_width_(width_ / 2),
height_(288),
half_height_(height_ / 2),
size_y_(width_ * height_),
size_uv_(half_width_ * half_height_),
frame_length_(CalcBufferSize(kI420, width_, height_)) {
}
void TestScaler::SetUp() {
const std::string input_file_name =
webrtc::test::ResourcePath("foreman_cif", "yuv");
source_file_ = fopen(input_file_name.c_str(), "rb");
ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
input_file_name << "\n";
test_frame_.CreateEmptyFrame(width_, height_,
width_, half_width_, half_width_);
}
void TestScaler::TearDown() {
if (source_file_ != NULL) {
ASSERT_EQ(0, fclose(source_file_));
}
source_file_ = NULL;
}
TEST_F(TestScaler, ScaleWithoutSettingValues) {
EXPECT_EQ(-2, test_scaler_.Scale(test_frame_, &test_frame_));
}
TEST_F(TestScaler, ScaleBadInitialValues) {
EXPECT_EQ(-1, test_scaler_.Set(0, 288, 352, 288, kI420, kI420, kScalePoint));
EXPECT_EQ(-1, test_scaler_.Set(704, 0, 352, 288, kI420, kI420, kScaleBox));
EXPECT_EQ(-1, test_scaler_.Set(704, 576, 352, 0, kI420, kI420,
kScaleBilinear));
EXPECT_EQ(-1, test_scaler_.Set(704, 576, 0, 288, kI420, kI420, kScalePoint));
}
TEST_F(TestScaler, ScaleSendingNullSourcePointer) {
VideoFrame null_src_frame;
EXPECT_EQ(-1, test_scaler_.Scale(null_src_frame, &test_frame_));
}
TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
// Sending a buffer which is too small (should reallocate and update size)
EXPECT_EQ(0, test_scaler_.Set(width_, height_,
half_width_, half_height_,
kI420, kI420,
kScalePoint));
VideoFrame test_frame2;
std::unique_ptr<uint8_t[]> orig_buffer(new uint8_t[frame_length_]);
EXPECT_GT(fread(orig_buffer.get(), 1, frame_length_, source_file_), 0U);
test_frame_.CreateFrame(orig_buffer.get(),
orig_buffer.get() + size_y_,
orig_buffer.get() + size_y_ + size_uv_,
width_, height_,
width_, half_width_, half_width_,
kVideoRotation_0);
EXPECT_EQ(0, test_scaler_.Scale(test_frame_, &test_frame2));
EXPECT_GT(width_ * height_, test_frame2.allocated_size(kYPlane));
EXPECT_GT(size_uv_, test_frame2.allocated_size(kUPlane));
EXPECT_GT(size_uv_, test_frame2.allocated_size(kVPlane));
EXPECT_EQ(half_width_, test_frame2.width());
EXPECT_EQ(half_height_, test_frame2.height());
}
// TODO(mikhal): Converge the test into one function that accepts the method.
#if defined(WEBRTC_ANDROID)
#define MAYBE_PointScaleTest DISABLED_PointScaleTest
#else
#define MAYBE_PointScaleTest PointScaleTest
#endif
TEST_F(TestScaler, MAYBE_PointScaleTest) {
double avg_psnr;
FILE* source_file2;
ScaleMethod method = kScalePoint;
std::string out_name = webrtc::test::OutputPath() +
"LibYuvTest_PointScale_176_144.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
half_width_, half_height_);
// Upsample back up and check PSNR.
source_file2 = fopen(out_name.c_str(), "rb");
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_352_288_"
"upfrom_176_144.yuv";
ScaleSequence(method,
source_file2, out_name,
176, 144,
352, 288);
avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
"original size: %f \n", width_, height_, 176, 144, avg_psnr);
// Average PSNR for lower bound in assert is ~0.1dB lower than the actual
// average PSNR under same conditions.
ASSERT_GT(avg_psnr, 27.9);
ASSERT_EQ(0, fclose(source_file2));
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_320_240.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
320, 240);
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_704_576.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ * 2, height_ * 2);
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_300_200.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
300, 200);
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_400_300.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
400, 300);
// Down-sample to odd size frame and scale back up.
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_282_231.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
282, 231);
source_file2 = fopen(out_name.c_str(), "rb");
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_352_288_"
"upfrom_282_231.yuv";
ScaleSequence(method,
source_file2, out_name,
282, 231,
352, 288);
avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
"original size: %f \n", width_, height_, 282, 231, avg_psnr);
// Average PSNR for lower bound in assert is ~0.1dB lower than the actual
// average PSNR under same conditions.
ASSERT_GT(avg_psnr, 25.8);
ASSERT_EQ(0, fclose(source_file2));
}
#if defined(WEBRTC_ANDROID)
#define MAYBE_BilinearScaleTest DISABLED_BiLinearScaleTest
#else
#define MAYBE_BilinearScaleTest BiLinearScaleTest
#endif
TEST_F(TestScaler, MAYBE_BiLinearScaleTest) {
double avg_psnr;
FILE* source_file2;
ScaleMethod method = kScaleBilinear;
std::string out_name = webrtc::test::OutputPath() +
"LibYuvTest_BilinearScale_176_144.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ / 2, height_ / 2);
// Up-sample back up and check PSNR.
source_file2 = fopen(out_name.c_str(), "rb");
out_name = webrtc::test::OutputPath() + "LibYuvTest_BilinearScale_352_288_"
"upfrom_176_144.yuv";
ScaleSequence(method,
source_file2, out_name,
176, 144,
352, 288);
avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
"original size: %f \n", width_, height_, 176, 144, avg_psnr);
// Average PSNR for lower bound in assert is ~0.1dB lower than the actual
// average PSNR under same conditions.
ASSERT_GT(avg_psnr, 27.5);
ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
ASSERT_EQ(0, fclose(source_file2));
out_name = webrtc::test::OutputPath() +
"LibYuvTest_BilinearScale_320_240.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
320, 240);
out_name = webrtc::test::OutputPath() +
"LibYuvTest_BilinearScale_704_576.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ * 2, height_ * 2);
out_name = webrtc::test::OutputPath() +
"LibYuvTest_BilinearScale_300_200.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
300, 200);
out_name = webrtc::test::OutputPath() +
"LibYuvTest_BilinearScale_400_300.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
400, 300);
}
#if defined(WEBRTC_ANDROID)
#define MAYBE_BoxScaleTest DISABLED_BoxScaleTest
#else
#define MAYBE_BoxScaleTest BoxScaleTest
#endif
TEST_F(TestScaler, MAYBE_BoxScaleTest) {
double avg_psnr;
FILE* source_file2;
ScaleMethod method = kScaleBox;
std::string out_name = webrtc::test::OutputPath() +
"LibYuvTest_BoxScale_176_144.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ / 2, height_ / 2);
// Up-sample back up and check PSNR.
source_file2 = fopen(out_name.c_str(), "rb");
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_352_288_"
"upfrom_176_144.yuv";
ScaleSequence(method,
source_file2, out_name,
176, 144,
352, 288);
avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
"original size: %f \n", width_, height_, 176, 144, avg_psnr);
// Average PSNR for lower bound in assert is ~0.1dB lower than the actual
// average PSNR under same conditions.
ASSERT_GT(avg_psnr, 27.5);
ASSERT_EQ(0, fclose(source_file2));
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_320_240.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
320, 240);
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_704_576.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
width_ * 2, height_ * 2);
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_300_200.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
300, 200);
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_400_300.yuv";
ScaleSequence(method,
source_file_, out_name,
width_, height_,
400, 300);
}
double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
std::string out_name,
int width, int height) {
FILE* output_file;
output_file = fopen(out_name.c_str(), "rb");
assert(output_file != NULL);
rewind(input_file);
rewind(output_file);
size_t required_size = CalcBufferSize(kI420, width, height);
uint8_t* input_buffer = new uint8_t[required_size];
uint8_t* output_buffer = new uint8_t[required_size];
int frame_count = 0;
double avg_psnr = 0;
VideoFrame in_frame, out_frame;
const int half_width = (width + 1) / 2;
in_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
out_frame.CreateEmptyFrame(width, height, width, half_width, half_width);
while (feof(input_file) == 0) {
if (fread(input_buffer, 1, required_size, input_file) != required_size) {
break;
}
if (fread(output_buffer, 1, required_size, output_file) != required_size) {
break;
}
frame_count++;
EXPECT_EQ(0, ConvertToI420(kI420, input_buffer, 0, 0, width, height,
required_size, kVideoRotation_0, &in_frame));
EXPECT_EQ(0, ConvertToI420(kI420, output_buffer, 0, 0, width, height,
required_size, kVideoRotation_0, &out_frame));
double psnr = I420PSNR(&in_frame, &out_frame);
avg_psnr += psnr;
}
avg_psnr = avg_psnr / frame_count;
assert(0 == fclose(output_file));
delete [] input_buffer;
delete [] output_buffer;
return avg_psnr;
}
// TODO(mikhal): Move part to a separate scale test.
void TestScaler::ScaleSequence(ScaleMethod method,
FILE* source_file, std::string out_name,
int src_width, int src_height,
int dst_width, int dst_height) {
FILE* output_file;
EXPECT_EQ(0, test_scaler_.Set(src_width, src_height,
dst_width, dst_height,
kI420, kI420, method));
output_file = fopen(out_name.c_str(), "wb");
ASSERT_TRUE(output_file != NULL);
rewind(source_file);
VideoFrame input_frame;
VideoFrame output_frame;
int64_t start_clock, total_clock;
total_clock = 0;
int frame_count = 0;
size_t src_required_size = CalcBufferSize(kI420, src_width, src_height);
std::unique_ptr<uint8_t[]> frame_buffer(new uint8_t[src_required_size]);
int size_y = src_width * src_height;
int size_uv = ((src_width + 1) / 2) * ((src_height + 1) / 2);
// Running through entire sequence.
while (feof(source_file) == 0) {
if (fread(frame_buffer.get(), 1, src_required_size, source_file) !=
src_required_size)
break;
input_frame.CreateFrame(frame_buffer.get(),
frame_buffer.get() + size_y,
frame_buffer.get() + size_y + size_uv,
src_width, src_height,
src_width, (src_width + 1) / 2,
(src_width + 1) / 2,
kVideoRotation_0);
start_clock = rtc::TimeMillis();
EXPECT_EQ(0, test_scaler_.Scale(input_frame, &output_frame));
total_clock += rtc::TimeMillis() - start_clock;
if (PrintVideoFrame(output_frame, output_file) < 0) {
return;
}
frame_count++;
}
if (frame_count) {
printf("Scaling[%d %d] => [%d %d]: ",
src_width, src_height, dst_width, dst_height);
printf("Average time per frame[ms]: %.2lf\n",
(static_cast<double>(total_clock) / frame_count));
}
ASSERT_EQ(0, fclose(output_file));
}
} // namespace webrtc

View File

@ -130,27 +130,28 @@ int PrintVideoFrame(const VideoFrame& frame, FILE* file) {
return 0; return 0;
} }
int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) { int ExtractBuffer(const rtc::scoped_refptr<VideoFrameBuffer>& input_frame,
size_t size,
uint8_t* buffer) {
assert(buffer); assert(buffer);
if (input_frame.IsZeroSize()) if (!input_frame)
return -1; return -1;
size_t length = int width = input_frame->width();
CalcBufferSize(kI420, input_frame.width(), input_frame.height()); int height = input_frame->height();
size_t length = CalcBufferSize(kI420, width, height);
if (size < length) { if (size < length) {
return -1; return -1;
} }
int width = input_frame.video_frame_buffer()->width();
int height = input_frame.video_frame_buffer()->height();
int chroma_width = (width + 1) / 2; int chroma_width = (width + 1) / 2;
int chroma_height = (height + 1) / 2; int chroma_height = (height + 1) / 2;
libyuv::I420Copy(input_frame.video_frame_buffer()->DataY(), libyuv::I420Copy(input_frame->DataY(),
input_frame.video_frame_buffer()->StrideY(), input_frame->StrideY(),
input_frame.video_frame_buffer()->DataU(), input_frame->DataU(),
input_frame.video_frame_buffer()->StrideU(), input_frame->StrideU(),
input_frame.video_frame_buffer()->DataV(), input_frame->DataV(),
input_frame.video_frame_buffer()->StrideV(), input_frame->StrideV(),
buffer, width, buffer, width,
buffer + width*height, chroma_width, buffer + width*height, chroma_width,
buffer + width*height + chroma_width*chroma_height, buffer + width*height + chroma_width*chroma_height,
@ -160,6 +161,9 @@ int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) {
return static_cast<int>(length); return static_cast<int>(length);
} }
int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) {
return ExtractBuffer(input_frame.video_frame_buffer(), size, buffer);
}
int ConvertNV12ToRGB565(const uint8_t* src_frame, int ConvertNV12ToRGB565(const uint8_t* src_frame,
uint8_t* dst_frame, uint8_t* dst_frame,

View File

@ -23,12 +23,6 @@ namespace webrtc {
// to optimized bitstream readers. See avcodec_decode_video2. // to optimized bitstream readers. See avcodec_decode_video2.
const size_t EncodedImage::kBufferPaddingBytesH264 = 8; const size_t EncodedImage::kBufferPaddingBytesH264 = 8;
int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
if (type == kYPlane)
return plane_stride * image_height;
return plane_stride * ((image_height + 1) / 2);
}
VideoFrame::VideoFrame() VideoFrame::VideoFrame()
: video_frame_buffer_(nullptr), : video_frame_buffer_(nullptr),
timestamp_(0), timestamp_(0),

View File

@ -8,11 +8,14 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include <algorithm>
#include "webrtc/common_video/include/video_frame_buffer.h" #include "webrtc/common_video/include/video_frame_buffer.h"
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/base/keep_ref_until_done.h" #include "webrtc/base/keep_ref_until_done.h"
#include "libyuv/convert.h" #include "libyuv/convert.h"
#include "libyuv/scale.h"
// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
static const int kBufferAlignment = 64; static const int kBufferAlignment = 64;
@ -208,6 +211,60 @@ void I420Buffer::SetToBlack() {
0, 128, 128) == 0); 0, 128, 128) == 0);
} }
void I420Buffer::CropAndScaleFrom(
const rtc::scoped_refptr<VideoFrameBuffer>& src,
int offset_x,
int offset_y,
int crop_width,
int crop_height) {
RTC_CHECK_LE(crop_width, src->width());
RTC_CHECK_LE(crop_height, src->height());
RTC_CHECK_LE(crop_width + offset_x, src->width());
RTC_CHECK_LE(crop_height + offset_y, src->height());
RTC_CHECK_GE(offset_x, 0);
RTC_CHECK_GE(offset_y, 0);
// Make sure offset is even so that u/v plane becomes aligned.
const int uv_offset_x = offset_x / 2;
const int uv_offset_y = offset_y / 2;
offset_x = uv_offset_x * 2;
offset_y = uv_offset_y * 2;
const uint8_t* y_plane =
src->DataY() + src->StrideY() * offset_y + offset_x;
const uint8_t* u_plane =
src->DataU() + src->StrideU() * uv_offset_y + uv_offset_x;
const uint8_t* v_plane =
src->DataV() + src->StrideV() * uv_offset_y + uv_offset_x;
int res = libyuv::I420Scale(y_plane, src->StrideY(),
u_plane, src->StrideU(),
v_plane, src->StrideV(),
crop_width, crop_height,
MutableDataY(), StrideY(),
MutableDataU(), StrideU(),
MutableDataV(), StrideV(),
width(), height(), libyuv::kFilterBox);
RTC_DCHECK_EQ(res, 0);
}
void I420Buffer::CropAndScaleFrom(
const rtc::scoped_refptr<VideoFrameBuffer>& src) {
const int crop_width =
std::min(src->width(), width() * src->height() / height());
const int crop_height =
std::min(src->height(), height() * src->width() / width());
CropAndScaleFrom(
src,
(src->width() - crop_width) / 2, (src->height() - crop_height) / 2,
crop_width, crop_height);
}
void I420Buffer::ScaleFrom(const rtc::scoped_refptr<VideoFrameBuffer>& src) {
CropAndScaleFrom(src, 0, 0, src->width(), src->height());
}
NativeHandleBuffer::NativeHandleBuffer(void* native_handle, NativeHandleBuffer::NativeHandleBuffer(void* native_handle,
int width, int width,
int height) int height)
@ -316,35 +373,4 @@ rtc::scoped_refptr<VideoFrameBuffer> WrappedI420Buffer::NativeToI420Buffer() {
return nullptr; return nullptr;
} }
rtc::scoped_refptr<VideoFrameBuffer> ShallowCenterCrop(
const rtc::scoped_refptr<VideoFrameBuffer>& buffer,
int cropped_width,
int cropped_height) {
RTC_CHECK(buffer->native_handle() == nullptr);
RTC_CHECK_LE(cropped_width, buffer->width());
RTC_CHECK_LE(cropped_height, buffer->height());
if (buffer->width() == cropped_width && buffer->height() == cropped_height)
return buffer;
// Center crop to |cropped_width| x |cropped_height|.
// Make sure offset is even so that u/v plane becomes aligned.
const int uv_offset_x = (buffer->width() - cropped_width) / 4;
const int uv_offset_y = (buffer->height() - cropped_height) / 4;
const int offset_x = uv_offset_x * 2;
const int offset_y = uv_offset_y * 2;
const uint8_t* y_plane = buffer->DataY() +
buffer->StrideY() * offset_y + offset_x;
const uint8_t* u_plane = buffer->DataU() +
buffer->StrideU() * uv_offset_y + uv_offset_x;
const uint8_t* v_plane = buffer->DataV() +
buffer->StrideV() * uv_offset_y + uv_offset_x;
return new rtc::RefCountedObject<WrappedI420Buffer>(
cropped_width, cropped_height,
y_plane, buffer->StrideY(),
u_plane, buffer->StrideU(),
v_plane, buffer->StrideV(),
rtc::KeepRefUntilDone(buffer));
}
} // namespace webrtc } // namespace webrtc

View File

@ -22,32 +22,6 @@
namespace cricket { namespace cricket {
// Round to 2 pixels because Chroma channels are half size.
#define ROUNDTO2(v) (v & ~1)
bool VideoFrame::CopyToPlanes(uint8_t* dst_y,
uint8_t* dst_u,
uint8_t* dst_v,
int32_t dst_pitch_y,
int32_t dst_pitch_u,
int32_t dst_pitch_v) const {
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer =
video_frame_buffer();
if (!buffer) {
LOG(LS_ERROR) << "NULL video buffer.";
return false;
}
int32_t src_width = width();
int32_t src_height = height();
return libyuv::I420Copy(buffer->DataY(), buffer->StrideY(),
buffer->DataU(), buffer->StrideU(),
buffer->DataV(), buffer->StrideV(),
dst_y, dst_pitch_y,
dst_u, dst_pitch_u,
dst_v, dst_pitch_v,
src_width, src_height) == 0;
}
size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc, size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
uint8_t* buffer, uint8_t* buffer,
size_t size, size_t size,
@ -69,85 +43,6 @@ size_t VideoFrame::ConvertToRgbBuffer(uint32_t to_fourcc,
return needed; return needed;
} }
// TODO(fbarchard): Handle odd width/height with rounding.
// TODO(nisse): If method is kept, switch to using int instead of
// size_t and int32_t.
void VideoFrame::StretchToPlanes(uint8_t* dst_y,
uint8_t* dst_u,
uint8_t* dst_v,
int32_t dst_pitch_y,
int32_t dst_pitch_u,
int32_t dst_pitch_v,
size_t dst_width,
size_t dst_height,
bool interpolate,
bool vert_crop) const {
if (!video_frame_buffer()) {
LOG(LS_ERROR) << "NULL frame buffer.";
return;
}
size_t src_width = width();
size_t src_height = height();
if (dst_width == src_width && dst_height == src_height) {
CopyToPlanes(dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v);
return;
}
const uint8_t* src_y = video_frame_buffer()->DataY();
const uint8_t* src_u = video_frame_buffer()->DataU();
const uint8_t* src_v = video_frame_buffer()->DataV();
if (vert_crop) {
// Adjust the input width:height ratio to be the same as the output ratio.
if (src_width * dst_height > src_height * dst_width) {
// Reduce the input width, but keep size/position aligned for YuvScaler
src_width = ROUNDTO2(src_height * dst_width / dst_height);
int32_t iwidth_offset = ROUNDTO2((width() - src_width) / 2);
src_y += iwidth_offset;
src_u += iwidth_offset / 2;
src_v += iwidth_offset / 2;
} else if (src_width * dst_height < src_height * dst_width) {
// Reduce the input height.
src_height = src_width * dst_height / dst_width;
int32_t iheight_offset =
static_cast<int32_t>((height() - src_height) >> 2);
iheight_offset <<= 1; // Ensure that iheight_offset is even.
src_y += iheight_offset * video_frame_buffer()->StrideY();
src_u += iheight_offset / 2 * video_frame_buffer()->StrideU();
src_v += iheight_offset / 2 * video_frame_buffer()->StrideV();
}
}
// Scale to the output I420 frame.
libyuv::Scale(src_y, src_u, src_v, video_frame_buffer()->StrideY(),
video_frame_buffer()->StrideU(),
video_frame_buffer()->StrideV(),
static_cast<int>(src_width), static_cast<int>(src_height),
dst_y, dst_u, dst_v, dst_pitch_y, dst_pitch_u, dst_pitch_v,
static_cast<int>(dst_width), static_cast<int>(dst_height),
interpolate);
}
void VideoFrame::StretchToFrame(VideoFrame* dst,
bool interpolate, bool vert_crop) const {
if (!dst) {
LOG(LS_ERROR) << "NULL dst pointer.";
return;
}
StretchToPlanes(dst->video_frame_buffer()->MutableDataY(),
dst->video_frame_buffer()->MutableDataU(),
dst->video_frame_buffer()->MutableDataV(),
dst->video_frame_buffer()->StrideY(),
dst->video_frame_buffer()->StrideU(),
dst->video_frame_buffer()->StrideV(),
dst->width(), dst->height(),
interpolate, vert_crop);
dst->SetTimeStamp(GetTimeStamp());
// Stretched frame should have the same rotation as the source.
dst->set_rotation(rotation());
}
static const size_t kMaxSampleSize = 1000000000u; static const size_t kMaxSampleSize = 1000000000u;
// Returns whether a sample is valid. // Returns whether a sample is valid.
bool VideoFrame::Validate(uint32_t fourcc, bool VideoFrame::Validate(uint32_t fourcc,

View File

@ -69,28 +69,6 @@ class VideoFrame {
size_t size, size_t size,
int stride_rgb) const; int stride_rgb) const;
// Writes the frame into the given planes, stretched to the given width and
// height. The parameter "interpolate" controls whether to interpolate or just
// take the nearest-point. The parameter "crop" controls whether to crop this
// frame to the aspect ratio of the given dimensions before stretching.
virtual void StretchToPlanes(uint8_t* y,
uint8_t* u,
uint8_t* v,
int32_t pitchY,
int32_t pitchU,
int32_t pitchV,
size_t width,
size_t height,
bool interpolate,
bool crop) const;
// Writes the frame into the target VideoFrame, stretched to the size of that
// frame. The parameter "interpolate" controls whether to interpolate or just
// take the nearest-point. The parameter "crop" controls whether to crop this
// frame to the aspect ratio of the target frame before stretching.
virtual void StretchToFrame(VideoFrame *target, bool interpolate,
bool crop) const;
// Tests if sample is valid. Returns true if valid. // Tests if sample is valid. Returns true if valid.
static bool Validate(uint32_t fourcc, static bool Validate(uint32_t fourcc,
int w, int w,
@ -99,17 +77,6 @@ class VideoFrame {
size_t sample_size); size_t sample_size);
protected: protected:
// Writes the frame into the given planes, stretched to the given width and
// height. The parameter "interpolate" controls whether to interpolate or just
// take the nearest-point. The parameter "crop" controls whether to crop this
// frame to the aspect ratio of the given dimensions before stretching.
virtual bool CopyToPlanes(uint8_t* dst_y,
uint8_t* dst_u,
uint8_t* dst_v,
int32_t dst_pitch_y,
int32_t dst_pitch_u,
int32_t dst_pitch_v) const;
// Creates an empty frame. // Creates an empty frame.
virtual VideoFrame* CreateEmptyFrame(int w, virtual VideoFrame* CreateEmptyFrame(int w,
int h, int h,

View File

@ -1811,33 +1811,6 @@ class VideoFrameTest : public testing::Test {
EXPECT_EQ(const_source->video_frame_buffer(), target->video_frame_buffer()); EXPECT_EQ(const_source->video_frame_buffer(), target->video_frame_buffer());
} }
void StretchToFrame() {
// Create the source frame as a black frame.
rtc::scoped_refptr<webrtc::I420Buffer> buffer(
new rtc::RefCountedObject<webrtc::I420Buffer>(kWidth * 2, kHeight * 2));
buffer->SetToBlack();
T source(buffer, 0, webrtc::kVideoRotation_0);
EXPECT_TRUE(IsSize(source, kWidth * 2, kHeight * 2));
// Create the target frame by loading from a file.
T target1;
ASSERT_TRUE(LoadFrameNoRepeat(&target1));
EXPECT_FALSE(IsBlack(target1));
// Stretch and check if the stretched target is black.
source.StretchToFrame(&target1, true, false);
EXPECT_TRUE(IsBlack(target1));
// Crop and stretch and check if the stretched target is black.
T target2;
ASSERT_TRUE(LoadFrameNoRepeat(&target2));
source.StretchToFrame(&target2, true, true);
EXPECT_TRUE(IsBlack(target2));
EXPECT_EQ(source.GetTimeStamp(), target2.GetTimeStamp());
}
int repeat_; int repeat_;
}; };

View File

@ -39,13 +39,12 @@ VideoFrame* VideoFrameFactory::CreateAliasedFrame(
std::swap(output_width, output_height); std::swap(output_width, output_height);
} }
std::unique_ptr<VideoFrame> output_frame(new WebRtcVideoFrame( rtc::scoped_refptr<webrtc::I420Buffer> scaled_buffer(
pool_.CreateBuffer(output_width, output_height), pool_.CreateBuffer(output_width, output_height));
cropped_input_frame->rotation(), scaled_buffer->CropAndScaleFrom(cropped_input_frame->video_frame_buffer());
cropped_input_frame->timestamp_us()));
cropped_input_frame->StretchToFrame(output_frame.get(), true, true); return new WebRtcVideoFrame(scaled_buffer, cropped_input_frame->rotation(),
return output_frame.release(); cropped_input_frame->timestamp_us());
} }
} // namespace cricket } // namespace cricket

View File

@ -245,7 +245,6 @@ TEST_WEBRTCVIDEOFRAME(ConvertFromUYVYBufferInverted)
// TEST_WEBRTCVIDEOFRAME(ConvertToI422Buffer) // TEST_WEBRTCVIDEOFRAME(ConvertToI422Buffer)
// TEST_WEBRTCVIDEOFRAME(ConstructARGBBlackWhitePixel) // TEST_WEBRTCVIDEOFRAME(ConstructARGBBlackWhitePixel)
TEST_WEBRTCVIDEOFRAME(StretchToFrame)
TEST_WEBRTCVIDEOFRAME(Copy) TEST_WEBRTCVIDEOFRAME(Copy)
TEST_WEBRTCVIDEOFRAME(CopyIsRef) TEST_WEBRTCVIDEOFRAME(CopyIsRef)

View File

@ -145,15 +145,16 @@ struct FrameEncodeParams {
// We receive I420Frames as input, but we need to feed CVPixelBuffers into the // We receive I420Frames as input, but we need to feed CVPixelBuffers into the
// encoder. This performs the copy and format conversion. // encoder. This performs the copy and format conversion.
// TODO(tkchin): See if encoder will accept i420 frames and compare performance. // TODO(tkchin): See if encoder will accept i420 frames and compare performance.
bool CopyVideoFrameToPixelBuffer(const webrtc::VideoFrame& frame, bool CopyVideoFrameToPixelBuffer(
CVPixelBufferRef pixel_buffer) { const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& frame,
CVPixelBufferRef pixel_buffer) {
RTC_DCHECK(pixel_buffer); RTC_DCHECK(pixel_buffer);
RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) == RTC_DCHECK(CVPixelBufferGetPixelFormatType(pixel_buffer) ==
kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); kCVPixelFormatType_420YpCbCr8BiPlanarFullRange);
RTC_DCHECK(CVPixelBufferGetHeightOfPlane(pixel_buffer, 0) == RTC_DCHECK(CVPixelBufferGetHeightOfPlane(pixel_buffer, 0) ==
static_cast<size_t>(frame.height())); static_cast<size_t>(frame->height()));
RTC_DCHECK(CVPixelBufferGetWidthOfPlane(pixel_buffer, 0) == RTC_DCHECK(CVPixelBufferGetWidthOfPlane(pixel_buffer, 0) ==
static_cast<size_t>(frame.width())); static_cast<size_t>(frame->width()));
CVReturn cvRet = CVPixelBufferLockBaseAddress(pixel_buffer, 0); CVReturn cvRet = CVPixelBufferLockBaseAddress(pixel_buffer, 0);
if (cvRet != kCVReturnSuccess) { if (cvRet != kCVReturnSuccess) {
@ -168,14 +169,11 @@ bool CopyVideoFrameToPixelBuffer(const webrtc::VideoFrame& frame,
int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1); int dst_stride_uv = CVPixelBufferGetBytesPerRowOfPlane(pixel_buffer, 1);
// Convert I420 to NV12. // Convert I420 to NV12.
int ret = libyuv::I420ToNV12( int ret = libyuv::I420ToNV12(
frame.video_frame_buffer()->DataY(), frame->DataY(), frame->StrideY(),
frame.video_frame_buffer()->StrideY(), frame->DataU(), frame->StrideU(),
frame.video_frame_buffer()->DataU(), frame->DataV(), frame->StrideV(),
frame.video_frame_buffer()->StrideU(),
frame.video_frame_buffer()->DataV(),
frame.video_frame_buffer()->StrideV(),
dst_y, dst_stride_y, dst_uv, dst_stride_uv, dst_y, dst_stride_y, dst_uv, dst_stride_uv,
frame.width(), frame.height()); frame->width(), frame->height());
CVPixelBufferUnlockBaseAddress(pixel_buffer, 0); CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);
if (ret) { if (ret) {
LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret; LOG(LS_ERROR) << "Error converting I420 VideoFrame to NV12 :" << ret;
@ -247,11 +245,12 @@ int H264VideoToolboxEncoder::InitEncode(const VideoCodec* codec_settings,
return ResetCompressionSession(); return ResetCompressionSession();
} }
const VideoFrame& H264VideoToolboxEncoder::GetScaledFrameOnEncode( rtc::scoped_refptr<VideoFrameBuffer>
const VideoFrame& frame) { H264VideoToolboxEncoder::GetScaledBufferOnEncode(
const rtc::scoped_refptr<VideoFrameBuffer>& frame) {
rtc::CritScope lock(&quality_scaler_crit_); rtc::CritScope lock(&quality_scaler_crit_);
quality_scaler_.OnEncodeFrame(frame); quality_scaler_.OnEncodeFrame(frame->width(), frame->height());
return quality_scaler_.GetScaledFrame(frame); return quality_scaler_.GetScaledBuffer(frame);
} }
int H264VideoToolboxEncoder::Encode( int H264VideoToolboxEncoder::Encode(
@ -270,11 +269,12 @@ int H264VideoToolboxEncoder::Encode(
} }
#endif #endif
bool is_keyframe_required = false; bool is_keyframe_required = false;
const VideoFrame& input_image = GetScaledFrameOnEncode(frame); rtc::scoped_refptr<VideoFrameBuffer> input_image(
GetScaledBufferOnEncode(frame.video_frame_buffer()));
if (input_image.width() != width_ || input_image.height() != height_) { if (input_image->width() != width_ || input_image->height() != height_) {
width_ = input_image.width(); width_ = input_image->width();
height_ = input_image.height(); height_ = input_image->height();
int ret = ResetCompressionSession(); int ret = ResetCompressionSession();
if (ret < 0) if (ret < 0)
return ret; return ret;
@ -327,7 +327,7 @@ int H264VideoToolboxEncoder::Encode(
} }
CMTime presentation_time_stamp = CMTime presentation_time_stamp =
CMTimeMake(input_image.render_time_ms(), 1000); CMTimeMake(frame.render_time_ms(), 1000);
CFDictionaryRef frame_properties = nullptr; CFDictionaryRef frame_properties = nullptr;
if (is_keyframe_required) { if (is_keyframe_required) {
CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame};
@ -336,8 +336,8 @@ int H264VideoToolboxEncoder::Encode(
} }
std::unique_ptr<internal::FrameEncodeParams> encode_params; std::unique_ptr<internal::FrameEncodeParams> encode_params;
encode_params.reset(new internal::FrameEncodeParams( encode_params.reset(new internal::FrameEncodeParams(
this, codec_specific_info, width_, height_, input_image.render_time_ms(), this, codec_specific_info, width_, height_, frame.render_time_ms(),
input_image.timestamp(), input_image.rotation())); frame.timestamp(), frame.rotation()));
// Update the bitrate if needed. // Update the bitrate if needed.
SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps()); SetBitrateBps(bitrate_adjuster_.GetAdjustedBitrateBps());

View File

@ -70,7 +70,8 @@ class H264VideoToolboxEncoder : public H264Encoder {
int ResetCompressionSession(); int ResetCompressionSession();
void ConfigureCompressionSession(); void ConfigureCompressionSession();
void DestroyCompressionSession(); void DestroyCompressionSession();
const VideoFrame& GetScaledFrameOnEncode(const VideoFrame& frame); rtc::scoped_refptr<VideoFrameBuffer> GetScaledBufferOnEncode(
const rtc::scoped_refptr<VideoFrameBuffer>& frame);
void SetBitrateBps(uint32_t bitrate_bps); void SetBitrateBps(uint32_t bitrate_bps);
void SetEncoderBitrateBps(uint32_t bitrate_bps); void SetEncoderBitrateBps(uint32_t bitrate_bps);

View File

@ -66,8 +66,7 @@ VideoProcessorImpl::VideoProcessorImpl(webrtc::VideoEncoder* encoder,
num_dropped_frames_(0), num_dropped_frames_(0),
num_spatial_resizes_(0), num_spatial_resizes_(0),
last_encoder_frame_width_(0), last_encoder_frame_width_(0),
last_encoder_frame_height_(0), last_encoder_frame_height_(0) {
scaler_() {
assert(encoder); assert(encoder);
assert(decoder); assert(decoder);
assert(frame_reader); assert(frame_reader);
@ -335,23 +334,16 @@ void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
// upsample back to original size: needed for PSNR and SSIM computations. // upsample back to original size: needed for PSNR and SSIM computations.
if (image.width() != config_.codec_settings->width || if (image.width() != config_.codec_settings->width ||
image.height() != config_.codec_settings->height) { image.height() != config_.codec_settings->height) {
VideoFrame up_image; rtc::scoped_refptr<I420Buffer> up_image(
int ret_val = scaler_.Set( new rtc::RefCountedObject<I420Buffer>(config_.codec_settings->width,
image.width(), image.height(), config_.codec_settings->width, config_.codec_settings->height));
config_.codec_settings->height, kI420, kI420, kScaleBilinear);
assert(ret_val >= 0); // Should be the same aspect ratio, no cropping needed.
if (ret_val < 0) { up_image->ScaleFrom(image.video_frame_buffer());
fprintf(stderr, "Failed to set scalar for frame: %d, return code: %d\n",
frame_number, ret_val);
}
ret_val = scaler_.Scale(image, &up_image);
assert(ret_val >= 0);
if (ret_val < 0) {
fprintf(stderr, "Failed to scale frame: %d, return code: %d\n",
frame_number, ret_val);
}
// TODO(mikhal): Extracting the buffer for now - need to update test. // TODO(mikhal): Extracting the buffer for now - need to update test.
size_t length = CalcBufferSize(kI420, up_image.width(), up_image.height()); size_t length =
CalcBufferSize(kI420, up_image->width(), up_image->height());
std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]); std::unique_ptr<uint8_t[]> image_buffer(new uint8_t[length]);
int extracted_length = ExtractBuffer(up_image, length, image_buffer.get()); int extracted_length = ExtractBuffer(up_image, length, image_buffer.get());
assert(extracted_length > 0); assert(extracted_length > 0);

View File

@ -14,7 +14,6 @@
#include <string> #include <string>
#include "webrtc/base/checks.h" #include "webrtc/base/checks.h"
#include "webrtc/common_video/libyuv/include/scaler.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_coding/include/video_codec_interface.h" #include "webrtc/modules/video_coding/include/video_codec_interface.h"
#include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h" #include "webrtc/modules/video_coding/codecs/test/packet_manipulator.h"
@ -219,7 +218,6 @@ class VideoProcessorImpl : public VideoProcessor {
int num_spatial_resizes_; int num_spatial_resizes_;
int last_encoder_frame_width_; int last_encoder_frame_width_;
int last_encoder_frame_height_; int last_encoder_frame_height_;
Scaler scaler_;
// Statistics // Statistics
double bit_rate_factor_; // multiply frame length with this to get bit rate double bit_rate_factor_; // multiply frame length with this to get bit rate

View File

@ -525,8 +525,8 @@ class VideoProcessorIntegrationTest : public testing::Test {
EXPECT_GT(psnr_result.min, quality_metrics.minimum_min_psnr); EXPECT_GT(psnr_result.min, quality_metrics.minimum_min_psnr);
EXPECT_GT(ssim_result.average, quality_metrics.minimum_avg_ssim); EXPECT_GT(ssim_result.average, quality_metrics.minimum_avg_ssim);
EXPECT_GT(ssim_result.min, quality_metrics.minimum_min_ssim); EXPECT_GT(ssim_result.min, quality_metrics.minimum_min_ssim);
if (!remove(config_.output_filename.c_str())) { if (remove(config_.output_filename.c_str()) < 0) {
fprintf(stderr, "Failed to remove temporary file!"); fprintf(stderr, "Failed to remove temporary file!\n");
} }
} }
}; };

View File

@ -729,40 +729,40 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
if (encoded_complete_callback_ == NULL) if (encoded_complete_callback_ == NULL)
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
if (quality_scaler_enabled_) rtc::scoped_refptr<VideoFrameBuffer> input_image = frame.video_frame_buffer();
quality_scaler_.OnEncodeFrame(frame);
const VideoFrame& input_image =
quality_scaler_enabled_ ? quality_scaler_.GetScaledFrame(frame) : frame;
if (quality_scaler_enabled_ && (input_image.width() != codec_.width || if (quality_scaler_enabled_) {
input_image.height() != codec_.height)) { quality_scaler_.OnEncodeFrame(frame.width(), frame.height());
int ret = UpdateCodecFrameSize(input_image); input_image = quality_scaler_.GetScaledBuffer(input_image);
if (ret < 0)
return ret; if (input_image->width() != codec_.width ||
input_image->height() != codec_.height) {
int ret =
UpdateCodecFrameSize(input_image->width(), input_image->height());
if (ret < 0)
return ret;
}
} }
// Since we are extracting raw pointers from |input_image| to // Since we are extracting raw pointers from |input_image| to
// |raw_images_[0]|, the resolution of these frames must match. Note that // |raw_images_[0]|, the resolution of these frames must match. Note that
// |input_image| might be scaled from |frame|. In that case, the resolution of // |input_image| might be scaled from |frame|. In that case, the resolution of
// |raw_images_[0]| should have been updated in UpdateCodecFrameSize. // |raw_images_[0]| should have been updated in UpdateCodecFrameSize.
RTC_DCHECK_EQ(input_image.width(), static_cast<int>(raw_images_[0].d_w)); RTC_DCHECK_EQ(input_image->width(), static_cast<int>(raw_images_[0].d_w));
RTC_DCHECK_EQ(input_image.height(), static_cast<int>(raw_images_[0].d_h)); RTC_DCHECK_EQ(input_image->height(), static_cast<int>(raw_images_[0].d_h));
// Image in vpx_image_t format. // Image in vpx_image_t format.
// Input image is const. VP8's raw image is not defined as const. // Input image is const. VP8's raw image is not defined as const.
raw_images_[0].planes[VPX_PLANE_Y] = raw_images_[0].planes[VPX_PLANE_Y] =
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataY()); const_cast<uint8_t*>(input_image->DataY());
raw_images_[0].planes[VPX_PLANE_U] = raw_images_[0].planes[VPX_PLANE_U] =
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataU()); const_cast<uint8_t*>(input_image->DataU());
raw_images_[0].planes[VPX_PLANE_V] = raw_images_[0].planes[VPX_PLANE_V] =
const_cast<uint8_t*>(input_image.video_frame_buffer()->DataV()); const_cast<uint8_t*>(input_image->DataV());
raw_images_[0].stride[VPX_PLANE_Y] = raw_images_[0].stride[VPX_PLANE_Y] = input_image->StrideY();
input_image.video_frame_buffer()->StrideY(); raw_images_[0].stride[VPX_PLANE_U] = input_image->StrideU();
raw_images_[0].stride[VPX_PLANE_U] = raw_images_[0].stride[VPX_PLANE_V] = input_image->StrideV();
input_image.video_frame_buffer()->StrideU();
raw_images_[0].stride[VPX_PLANE_V] =
input_image.video_frame_buffer()->StrideV();
for (size_t i = 1; i < encoders_.size(); ++i) { for (size_t i = 1; i < encoders_.size(); ++i) {
// Scale the image down a number of times by downsampling factor // Scale the image down a number of times by downsampling factor
@ -781,7 +781,7 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
} }
vpx_enc_frame_flags_t flags[kMaxSimulcastStreams]; vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
for (size_t i = 0; i < encoders_.size(); ++i) { for (size_t i = 0; i < encoders_.size(); ++i) {
int ret = temporal_layers_[i]->EncodeFlags(input_image.timestamp()); int ret = temporal_layers_[i]->EncodeFlags(frame.timestamp());
if (ret < 0) { if (ret < 0) {
// Drop this frame. // Drop this frame.
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
@ -833,11 +833,11 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
rps_.ReceivedRPSI(codec_specific_info->codecSpecific.VP8.pictureIdRPSI); rps_.ReceivedRPSI(codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
} }
if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) { if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
sendRefresh = rps_.ReceivedSLI(input_image.timestamp()); sendRefresh = rps_.ReceivedSLI(frame.timestamp());
} }
for (size_t i = 0; i < encoders_.size(); ++i) { for (size_t i = 0; i < encoders_.size(); ++i) {
flags[i] = rps_.EncodeFlags(picture_id_[i], sendRefresh, flags[i] = rps_.EncodeFlags(picture_id_[i], sendRefresh,
input_image.timestamp()); frame.timestamp());
} }
} else { } else {
if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) { if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) {
@ -905,17 +905,18 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
if (error) if (error)
return WEBRTC_VIDEO_CODEC_ERROR; return WEBRTC_VIDEO_CODEC_ERROR;
timestamp_ += duration; timestamp_ += duration;
return GetEncodedPartitions(input_image, only_predict_from_key_frame); // Examines frame timestamps only.
return GetEncodedPartitions(frame, only_predict_from_key_frame);
} }
// TODO(pbos): Make sure this works for properly for >1 encoders. // TODO(pbos): Make sure this works for properly for >1 encoders.
int VP8EncoderImpl::UpdateCodecFrameSize(const VideoFrame& input_image) { int VP8EncoderImpl::UpdateCodecFrameSize(int width, int height) {
codec_.width = input_image.width(); codec_.width = width;
codec_.height = input_image.height(); codec_.height = height;
if (codec_.numberOfSimulcastStreams <= 1) { if (codec_.numberOfSimulcastStreams <= 1) {
// For now scaling is only used for single-layer streams. // For now scaling is only used for single-layer streams.
codec_.simulcastStream[0].width = input_image.width(); codec_.simulcastStream[0].width = width;
codec_.simulcastStream[0].height = input_image.height(); codec_.simulcastStream[0].height = height;
} }
// Update the cpu_speed setting for resolution change. // Update the cpu_speed setting for resolution change.
vpx_codec_control(&(encoders_[0]), VP8E_SET_CPUUSED, vpx_codec_control(&(encoders_[0]), VP8E_SET_CPUUSED,

View File

@ -75,7 +75,7 @@ class VP8EncoderImpl : public VP8Encoder {
int InitAndSetControlSettings(); int InitAndSetControlSettings();
// Update frame size for codec. // Update frame size for codec.
int UpdateCodecFrameSize(const VideoFrame& input_image); int UpdateCodecFrameSize(int width, int height);
void PopulateCodecSpecific(CodecSpecificInfo* codec_specific, void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
const vpx_codec_cx_pkt& pkt, const vpx_codec_cx_pkt& pkt,

View File

@ -11,6 +11,8 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_ #ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_ #define WEBRTC_MODULES_VIDEO_CODING_UTILITY_MOVING_AVERAGE_H_
#include <stddef.h>
#include <list> #include <list>
#include "webrtc/typedefs.h" #include "webrtc/typedefs.h"

View File

@ -7,6 +7,7 @@
* in the file PATENTS. All contributing project authors may * in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/modules/video_coding/utility/quality_scaler.h" #include "webrtc/modules/video_coding/utility/quality_scaler.h"
namespace webrtc { namespace webrtc {
@ -94,11 +95,11 @@ void QualityScaler::ReportDroppedFrame() {
framedrop_percent_.AddSample(100); framedrop_percent_.AddSample(100);
} }
void QualityScaler::OnEncodeFrame(const VideoFrame& frame) { void QualityScaler::OnEncodeFrame(int width, int height) {
// Should be set through InitEncode -> Should be set by now. // Should be set through InitEncode -> Should be set by now.
assert(low_qp_threshold_ >= 0); RTC_DCHECK_GE(low_qp_threshold_, 0);
assert(num_samples_upscale_ > 0); RTC_DCHECK_GT(num_samples_upscale_, 0u);
assert(num_samples_downscale_ > 0); RTC_DCHECK_GT(num_samples_downscale_, 0u);
// Update scale factor. // Update scale factor.
int avg_drop = 0; int avg_drop = 0;
@ -113,38 +114,31 @@ void QualityScaler::OnEncodeFrame(const VideoFrame& frame) {
avg_qp <= low_qp_threshold_) { avg_qp <= low_qp_threshold_) {
AdjustScale(true); AdjustScale(true);
} }
UpdateTargetResolution(frame.width(), frame.height()); UpdateTargetResolution(width, height);
} }
QualityScaler::Resolution QualityScaler::GetScaledResolution() const { QualityScaler::Resolution QualityScaler::GetScaledResolution() const {
return res_; return res_;
} }
const VideoFrame& QualityScaler::GetScaledFrame(const VideoFrame& frame) { rtc::scoped_refptr<VideoFrameBuffer> QualityScaler::GetScaledBuffer(
const rtc::scoped_refptr<VideoFrameBuffer>& frame) {
Resolution res = GetScaledResolution(); Resolution res = GetScaledResolution();
if (res.width == frame.width()) int src_width = frame->width();
int src_height = frame->height();
if (res.width == src_width && res.height == src_height)
return frame; return frame;
rtc::scoped_refptr<I420Buffer> scaled_buffer =
pool_.CreateBuffer(res.width, res.height);
scaler_.Set(frame.width(), frame.height(), res.width, res.height, kI420, scaled_buffer->ScaleFrom(frame);
kI420, kScaleBox);
if (scaler_.Scale(frame, &scaled_frame_) != 0)
return frame;
// TODO(perkj): Refactor the scaler to not own |scaled_frame|. VideoFrame are return scaled_buffer;
// just thin wrappers so instead the scaler should return a
// rtc::scoped_refptr<VideoFrameBuffer> and a new VideoFrame be created with
// the meta data from |frame|. That way we would not have to set all these
// meta data.
scaled_frame_.set_ntp_time_ms(frame.ntp_time_ms());
scaled_frame_.set_timestamp(frame.timestamp());
scaled_frame_.set_render_time_ms(frame.render_time_ms());
scaled_frame_.set_rotation(frame.rotation());
return scaled_frame_;
} }
void QualityScaler::UpdateTargetResolution(int frame_width, int frame_height) { void QualityScaler::UpdateTargetResolution(int frame_width, int frame_height) {
assert(downscale_shift_ >= 0); RTC_DCHECK_GE(downscale_shift_, 0);
int shifts_performed = 0; int shifts_performed = 0;
for (int shift = downscale_shift_; for (int shift = downscale_shift_;
shift > 0 && (frame_width / 2 >= kMinDownscaleDimension) && shift > 0 && (frame_width / 2 >= kMinDownscaleDimension) &&

View File

@ -11,7 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_ #ifndef WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
#define WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_ #define WEBRTC_MODULES_VIDEO_CODING_UTILITY_QUALITY_SCALER_H_
#include "webrtc/common_video/libyuv/include/scaler.h" #include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/modules/video_coding/utility/moving_average.h" #include "webrtc/modules/video_coding/utility/moving_average.h"
namespace webrtc { namespace webrtc {
@ -32,9 +32,10 @@ class QualityScaler {
void ReportFramerate(int framerate); void ReportFramerate(int framerate);
void ReportQP(int qp); void ReportQP(int qp);
void ReportDroppedFrame(); void ReportDroppedFrame();
void OnEncodeFrame(const VideoFrame& frame); void OnEncodeFrame(int width, int height);
Resolution GetScaledResolution() const; Resolution GetScaledResolution() const;
const VideoFrame& GetScaledFrame(const VideoFrame& frame); rtc::scoped_refptr<VideoFrameBuffer> GetScaledBuffer(
const rtc::scoped_refptr<VideoFrameBuffer>& frame);
int downscale_shift() const { return downscale_shift_; } int downscale_shift() const { return downscale_shift_; }
// QP is obtained from VP8-bitstream for HW, so the QP corresponds to the // QP is obtained from VP8-bitstream for HW, so the QP corresponds to the
@ -52,8 +53,7 @@ class QualityScaler {
void ClearSamples(); void ClearSamples();
void UpdateSampleCounts(); void UpdateSampleCounts();
Scaler scaler_; I420BufferPool pool_;
VideoFrame scaled_frame_;
size_t num_samples_downscale_; size_t num_samples_downscale_;
size_t num_samples_upscale_; size_t num_samples_upscale_;

View File

@ -16,7 +16,6 @@ namespace webrtc {
namespace { namespace {
static const int kNumSeconds = 10; static const int kNumSeconds = 10;
static const int kWidth = 1920; static const int kWidth = 1920;
static const int kHalfWidth = kWidth / 2;
static const int kHeight = 1080; static const int kHeight = 1080;
static const int kFramerate = 30; static const int kFramerate = 30;
static const int kLowQp = 15; static const int kLowQp = 15;
@ -43,14 +42,14 @@ class QualityScalerTest : public ::testing::Test {
}; };
QualityScalerTest() { QualityScalerTest() {
input_frame_.CreateEmptyFrame(kWidth, kHeight, kWidth, kHalfWidth, input_frame_ = rtc::scoped_refptr<VideoFrameBuffer>(
kHalfWidth); new rtc::RefCountedObject<I420Buffer>(kWidth, kHeight));
qs_.Init(kLowQpThreshold, kHighQp, 0, 0, 0, kFramerate); qs_.Init(kLowQpThreshold, kHighQp, 0, 0, 0, kFramerate);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
} }
bool TriggerScale(ScaleDirection scale_direction) { bool TriggerScale(ScaleDirection scale_direction) {
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
int initial_width = qs_.GetScaledResolution().width; int initial_width = qs_.GetScaledResolution().width;
for (int i = 0; i < kFramerate * kNumSeconds; ++i) { for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
switch (scale_direction) { switch (scale_direction) {
@ -67,7 +66,7 @@ class QualityScalerTest : public ::testing::Test {
qs_.ReportQP(kHighQp + 1); qs_.ReportQP(kHighQp + 1);
break; break;
} }
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
if (qs_.GetScaledResolution().width != initial_width) if (qs_.GetScaledResolution().width != initial_width)
return true; return true;
} }
@ -76,16 +75,17 @@ class QualityScalerTest : public ::testing::Test {
} }
void ExpectOriginalFrame() { void ExpectOriginalFrame() {
EXPECT_EQ(&input_frame_, &qs_.GetScaledFrame(input_frame_)) EXPECT_EQ(input_frame_, qs_.GetScaledBuffer(input_frame_))
<< "Using scaled frame instead of original input."; << "Using scaled frame instead of original input.";
} }
void ExpectScaleUsingReportedResolution() { void ExpectScaleUsingReportedResolution() {
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
QualityScaler::Resolution res = qs_.GetScaledResolution(); QualityScaler::Resolution res = qs_.GetScaledResolution();
const VideoFrame& scaled_frame = qs_.GetScaledFrame(input_frame_); rtc::scoped_refptr<VideoFrameBuffer> scaled_frame =
EXPECT_EQ(res.width, scaled_frame.width()); qs_.GetScaledBuffer(input_frame_);
EXPECT_EQ(res.height, scaled_frame.height()); EXPECT_EQ(res.width, scaled_frame->width());
EXPECT_EQ(res.height, scaled_frame->height());
} }
void ContinuouslyDownscalesByHalfDimensionsAndBackUp(); void ContinuouslyDownscalesByHalfDimensionsAndBackUp();
@ -98,7 +98,7 @@ class QualityScalerTest : public ::testing::Test {
int end_height); int end_height);
QualityScaler qs_; QualityScaler qs_;
VideoFrame input_frame_; rtc::scoped_refptr<VideoFrameBuffer> input_frame_;
}; };
TEST_F(QualityScalerTest, UsesOriginalFrameInitially) { TEST_F(QualityScalerTest, UsesOriginalFrameInitially) {
@ -106,34 +106,34 @@ TEST_F(QualityScalerTest, UsesOriginalFrameInitially) {
} }
TEST_F(QualityScalerTest, ReportsOriginalResolutionInitially) { TEST_F(QualityScalerTest, ReportsOriginalResolutionInitially) {
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
QualityScaler::Resolution res = qs_.GetScaledResolution(); QualityScaler::Resolution res = qs_.GetScaledResolution();
EXPECT_EQ(input_frame_.width(), res.width); EXPECT_EQ(input_frame_->width(), res.width);
EXPECT_EQ(input_frame_.height(), res.height); EXPECT_EQ(input_frame_->height(), res.height);
} }
TEST_F(QualityScalerTest, DownscalesAfterContinuousFramedrop) { TEST_F(QualityScalerTest, DownscalesAfterContinuousFramedrop) {
EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within " << kNumSeconds EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within " << kNumSeconds
<< " seconds."; << " seconds.";
QualityScaler::Resolution res = qs_.GetScaledResolution(); QualityScaler::Resolution res = qs_.GetScaledResolution();
EXPECT_LT(res.width, input_frame_.width()); EXPECT_LT(res.width, input_frame_->width());
EXPECT_LT(res.height, input_frame_.height()); EXPECT_LT(res.height, input_frame_->height());
} }
TEST_F(QualityScalerTest, KeepsScaleAtHighQp) { TEST_F(QualityScalerTest, KeepsScaleAtHighQp) {
EXPECT_FALSE(TriggerScale(kKeepScaleAtHighQp)) EXPECT_FALSE(TriggerScale(kKeepScaleAtHighQp))
<< "Downscale at high threshold which should keep scale."; << "Downscale at high threshold which should keep scale.";
QualityScaler::Resolution res = qs_.GetScaledResolution(); QualityScaler::Resolution res = qs_.GetScaledResolution();
EXPECT_EQ(res.width, input_frame_.width()); EXPECT_EQ(res.width, input_frame_->width());
EXPECT_EQ(res.height, input_frame_.height()); EXPECT_EQ(res.height, input_frame_->height());
} }
TEST_F(QualityScalerTest, DownscalesAboveHighQp) { TEST_F(QualityScalerTest, DownscalesAboveHighQp) {
EXPECT_TRUE(TriggerScale(kScaleDownAboveHighQp)) EXPECT_TRUE(TriggerScale(kScaleDownAboveHighQp))
<< "No downscale within " << kNumSeconds << " seconds."; << "No downscale within " << kNumSeconds << " seconds.";
QualityScaler::Resolution res = qs_.GetScaledResolution(); QualityScaler::Resolution res = qs_.GetScaledResolution();
EXPECT_LT(res.width, input_frame_.width()); EXPECT_LT(res.width, input_frame_->width());
EXPECT_LT(res.height, input_frame_.height()); EXPECT_LT(res.height, input_frame_->height());
} }
TEST_F(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) { TEST_F(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
@ -141,8 +141,8 @@ TEST_F(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
qs_.ReportQP(kNormalQp); qs_.ReportQP(kNormalQp);
qs_.ReportDroppedFrame(); qs_.ReportDroppedFrame();
qs_.ReportDroppedFrame(); qs_.ReportDroppedFrame();
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
if (qs_.GetScaledResolution().width < input_frame_.width()) if (qs_.GetScaledResolution().width < input_frame_->width())
return; return;
} }
@ -152,8 +152,8 @@ TEST_F(QualityScalerTest, DownscalesAfterTwoThirdsFramedrop) {
TEST_F(QualityScalerTest, DoesNotDownscaleOnNormalQp) { TEST_F(QualityScalerTest, DoesNotDownscaleOnNormalQp) {
for (int i = 0; i < kFramerate * kNumSeconds; ++i) { for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
qs_.ReportQP(kNormalQp); qs_.ReportQP(kNormalQp);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width) ASSERT_EQ(input_frame_->width(), qs_.GetScaledResolution().width)
<< "Unexpected scale on half framedrop."; << "Unexpected scale on half framedrop.";
} }
} }
@ -161,33 +161,33 @@ TEST_F(QualityScalerTest, DoesNotDownscaleOnNormalQp) {
TEST_F(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) { TEST_F(QualityScalerTest, DoesNotDownscaleAfterHalfFramedrop) {
for (int i = 0; i < kFramerate * kNumSeconds / 2; ++i) { for (int i = 0; i < kFramerate * kNumSeconds / 2; ++i) {
qs_.ReportQP(kNormalQp); qs_.ReportQP(kNormalQp);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width) ASSERT_EQ(input_frame_->width(), qs_.GetScaledResolution().width)
<< "Unexpected scale on half framedrop."; << "Unexpected scale on half framedrop.";
qs_.ReportDroppedFrame(); qs_.ReportDroppedFrame();
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width) ASSERT_EQ(input_frame_->width(), qs_.GetScaledResolution().width)
<< "Unexpected scale on half framedrop."; << "Unexpected scale on half framedrop.";
} }
} }
void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() { void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() {
const int initial_min_dimension = input_frame_.width() < input_frame_.height() const int initial_min_dimension =
? input_frame_.width() input_frame_->width() < input_frame_->height() ? input_frame_->width()
: input_frame_.height(); : input_frame_->height();
int min_dimension = initial_min_dimension; int min_dimension = initial_min_dimension;
int current_shift = 0; int current_shift = 0;
// Drop all frames to force-trigger downscaling. // Drop all frames to force-trigger downscaling.
while (min_dimension >= 2 * kMinDownscaleDimension) { while (min_dimension >= 2 * kMinDownscaleDimension) {
EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within " EXPECT_TRUE(TriggerScale(kScaleDown)) << "No downscale within "
<< kNumSeconds << " seconds."; << kNumSeconds << " seconds.";
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
QualityScaler::Resolution res = qs_.GetScaledResolution(); QualityScaler::Resolution res = qs_.GetScaledResolution();
min_dimension = res.width < res.height ? res.width : res.height; min_dimension = res.width < res.height ? res.width : res.height;
++current_shift; ++current_shift;
ASSERT_EQ(input_frame_.width() >> current_shift, res.width); ASSERT_EQ(input_frame_->width() >> current_shift, res.width);
ASSERT_EQ(input_frame_.height() >> current_shift, res.height); ASSERT_EQ(input_frame_->height() >> current_shift, res.height);
ExpectScaleUsingReportedResolution(); ExpectScaleUsingReportedResolution();
} }
@ -195,12 +195,12 @@ void QualityScalerTest::ContinuouslyDownscalesByHalfDimensionsAndBackUp() {
while (min_dimension < initial_min_dimension) { while (min_dimension < initial_min_dimension) {
EXPECT_TRUE(TriggerScale(kScaleUp)) << "No upscale within " << kNumSeconds EXPECT_TRUE(TriggerScale(kScaleUp)) << "No upscale within " << kNumSeconds
<< " seconds."; << " seconds.";
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
QualityScaler::Resolution res = qs_.GetScaledResolution(); QualityScaler::Resolution res = qs_.GetScaledResolution();
min_dimension = res.width < res.height ? res.width : res.height; min_dimension = res.width < res.height ? res.width : res.height;
--current_shift; --current_shift;
ASSERT_EQ(input_frame_.width() >> current_shift, res.width); ASSERT_EQ(input_frame_->width() >> current_shift, res.width);
ASSERT_EQ(input_frame_.height() >> current_shift, res.height); ASSERT_EQ(input_frame_->height() >> current_shift, res.height);
ExpectScaleUsingReportedResolution(); ExpectScaleUsingReportedResolution();
} }
@ -218,21 +218,20 @@ TEST_F(QualityScalerTest, ContinuouslyDownscalesByHalfDimensionsAndBackUp) {
TEST_F(QualityScalerTest, TEST_F(QualityScalerTest,
ContinuouslyDownscalesOddResolutionsByHalfDimensionsAndBackUp) { ContinuouslyDownscalesOddResolutionsByHalfDimensionsAndBackUp) {
const int kOddWidth = 517; const int kOddWidth = 517;
const int kHalfOddWidth = (kOddWidth + 1) / 2;
const int kOddHeight = 1239; const int kOddHeight = 1239;
input_frame_.CreateEmptyFrame(kOddWidth, kOddHeight, kOddWidth, kHalfOddWidth, input_frame_ = rtc::scoped_refptr<VideoFrameBuffer>(
kHalfOddWidth); new rtc::RefCountedObject<I420Buffer>(kOddWidth, kOddHeight));
ContinuouslyDownscalesByHalfDimensionsAndBackUp(); ContinuouslyDownscalesByHalfDimensionsAndBackUp();
} }
void QualityScalerTest::DoesNotDownscaleFrameDimensions(int width, int height) { void QualityScalerTest::DoesNotDownscaleFrameDimensions(int width, int height) {
input_frame_.CreateEmptyFrame(width, height, width, (width + 1) / 2, input_frame_ = rtc::scoped_refptr<VideoFrameBuffer>(
(width + 1) / 2); new rtc::RefCountedObject<I420Buffer>(width, height));
for (int i = 0; i < kFramerate * kNumSeconds; ++i) { for (int i = 0; i < kFramerate * kNumSeconds; ++i) {
qs_.ReportDroppedFrame(); qs_.ReportDroppedFrame();
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
ASSERT_EQ(input_frame_.width(), qs_.GetScaledResolution().width) ASSERT_EQ(input_frame_->width(), qs_.GetScaledResolution().width)
<< "Unexpected scale of minimal-size frame."; << "Unexpected scale of minimal-size frame.";
} }
} }
@ -263,11 +262,11 @@ TEST_F(QualityScalerTest, DownscaleToVgaOnLowInitialBitrate) {
static const int kWidth720p = 1280; static const int kWidth720p = 1280;
static const int kHeight720p = 720; static const int kHeight720p = 720;
static const int kInitialBitrateKbps = 300; static const int kInitialBitrateKbps = 300;
input_frame_.CreateEmptyFrame(kWidth720p, kHeight720p, kWidth720p, input_frame_ = rtc::scoped_refptr<VideoFrameBuffer>(
kWidth720p / 2, kWidth720p / 2); new rtc::RefCountedObject<I420Buffer>(kWidth720p, kHeight720p));
qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps, qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps,
kWidth720p, kHeight720p, kFramerate); kWidth720p, kHeight720p, kFramerate);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
int init_width = qs_.GetScaledResolution().width; int init_width = qs_.GetScaledResolution().width;
int init_height = qs_.GetScaledResolution().height; int init_height = qs_.GetScaledResolution().height;
EXPECT_EQ(640, init_width); EXPECT_EQ(640, init_width);
@ -278,11 +277,11 @@ TEST_F(QualityScalerTest, DownscaleToQvgaOnLowerInitialBitrate) {
static const int kWidth720p = 1280; static const int kWidth720p = 1280;
static const int kHeight720p = 720; static const int kHeight720p = 720;
static const int kInitialBitrateKbps = 200; static const int kInitialBitrateKbps = 200;
input_frame_.CreateEmptyFrame(kWidth720p, kHeight720p, kWidth720p, input_frame_ = rtc::scoped_refptr<VideoFrameBuffer>(
kWidth720p / 2, kWidth720p / 2); new rtc::RefCountedObject<I420Buffer>(kWidth720p, kHeight720p));
qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps, qs_.Init(kLowQpThreshold, kDisabledBadQpThreshold, kInitialBitrateKbps,
kWidth720p, kHeight720p, kFramerate); kWidth720p, kHeight720p, kFramerate);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
int init_width = qs_.GetScaledResolution().width; int init_width = qs_.GetScaledResolution().width;
int init_height = qs_.GetScaledResolution().height; int init_height = qs_.GetScaledResolution().height;
EXPECT_EQ(320, init_width); EXPECT_EQ(320, init_width);
@ -291,13 +290,13 @@ TEST_F(QualityScalerTest, DownscaleToQvgaOnLowerInitialBitrate) {
TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) { TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
qs_.Init(kLowQpThreshold, kHighQp, 0, kWidth, kHeight, kFramerate); qs_.Init(kLowQpThreshold, kHighQp, 0, kWidth, kHeight, kFramerate);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
QualityScaler::Resolution initial_res = qs_.GetScaledResolution(); QualityScaler::Resolution initial_res = qs_.GetScaledResolution();
// Should not downscale if less than kMeasureSecondsDownscale seconds passed. // Should not downscale if less than kMeasureSecondsDownscale seconds passed.
for (int i = 0; i < kFramerate * kMeasureSecondsDownscale - 1; ++i) { for (int i = 0; i < kFramerate * kMeasureSecondsDownscale - 1; ++i) {
qs_.ReportQP(kHighQp + 1); qs_.ReportQP(kHighQp + 1);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
} }
EXPECT_EQ(initial_res.width, qs_.GetScaledResolution().width); EXPECT_EQ(initial_res.width, qs_.GetScaledResolution().width);
EXPECT_EQ(initial_res.height, qs_.GetScaledResolution().height); EXPECT_EQ(initial_res.height, qs_.GetScaledResolution().height);
@ -305,7 +304,7 @@ TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
// Should downscale if more than kMeasureSecondsDownscale seconds passed (add // Should downscale if more than kMeasureSecondsDownscale seconds passed (add
// last frame). // last frame).
qs_.ReportQP(kHighQp + 1); qs_.ReportQP(kHighQp + 1);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
EXPECT_GT(initial_res.width, qs_.GetScaledResolution().width); EXPECT_GT(initial_res.width, qs_.GetScaledResolution().width);
EXPECT_GT(initial_res.height, qs_.GetScaledResolution().height); EXPECT_GT(initial_res.height, qs_.GetScaledResolution().height);
@ -313,7 +312,7 @@ TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
// we saw issues initially (have already gone down). // we saw issues initially (have already gone down).
for (int i = 0; i < kFramerate * kMeasureSecondsUpscale - 1; ++i) { for (int i = 0; i < kFramerate * kMeasureSecondsUpscale - 1; ++i) {
qs_.ReportQP(kLowQp); qs_.ReportQP(kLowQp);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
} }
EXPECT_GT(initial_res.width, qs_.GetScaledResolution().width); EXPECT_GT(initial_res.width, qs_.GetScaledResolution().width);
EXPECT_GT(initial_res.height, qs_.GetScaledResolution().height); EXPECT_GT(initial_res.height, qs_.GetScaledResolution().height);
@ -321,7 +320,7 @@ TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
// Should upscale (back to initial) if kMeasureSecondsUpscale seconds passed // Should upscale (back to initial) if kMeasureSecondsUpscale seconds passed
// (add last frame). // (add last frame).
qs_.ReportQP(kLowQp); qs_.ReportQP(kLowQp);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
EXPECT_EQ(initial_res.width, qs_.GetScaledResolution().width); EXPECT_EQ(initial_res.width, qs_.GetScaledResolution().width);
EXPECT_EQ(initial_res.height, qs_.GetScaledResolution().height); EXPECT_EQ(initial_res.height, qs_.GetScaledResolution().height);
} }
@ -329,13 +328,13 @@ TEST_F(QualityScalerTest, DownscaleAfterMeasuredSecondsThenSlowerBackUp) {
TEST_F(QualityScalerTest, UpscaleQuicklyInitiallyAfterMeasuredSeconds) { TEST_F(QualityScalerTest, UpscaleQuicklyInitiallyAfterMeasuredSeconds) {
qs_.Init(kLowQpThreshold, kHighQp, kLowInitialBitrateKbps, kWidth, kHeight, qs_.Init(kLowQpThreshold, kHighQp, kLowInitialBitrateKbps, kWidth, kHeight,
kFramerate); kFramerate);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
QualityScaler::Resolution initial_res = qs_.GetScaledResolution(); QualityScaler::Resolution initial_res = qs_.GetScaledResolution();
// Should not upscale if less than kMeasureSecondsFastUpscale seconds passed. // Should not upscale if less than kMeasureSecondsFastUpscale seconds passed.
for (int i = 0; i < kFramerate * kMeasureSecondsFastUpscale - 1; ++i) { for (int i = 0; i < kFramerate * kMeasureSecondsFastUpscale - 1; ++i) {
qs_.ReportQP(kLowQp); qs_.ReportQP(kLowQp);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
} }
EXPECT_EQ(initial_res.width, qs_.GetScaledResolution().width); EXPECT_EQ(initial_res.width, qs_.GetScaledResolution().width);
EXPECT_EQ(initial_res.height, qs_.GetScaledResolution().height); EXPECT_EQ(initial_res.height, qs_.GetScaledResolution().height);
@ -343,7 +342,7 @@ TEST_F(QualityScalerTest, UpscaleQuicklyInitiallyAfterMeasuredSeconds) {
// Should upscale if kMeasureSecondsFastUpscale seconds passed (add last // Should upscale if kMeasureSecondsFastUpscale seconds passed (add last
// frame). // frame).
qs_.ReportQP(kLowQp); qs_.ReportQP(kLowQp);
qs_.OnEncodeFrame(input_frame_); qs_.OnEncodeFrame(input_frame_->width(), input_frame_->height());
EXPECT_LT(initial_res.width, qs_.GetScaledResolution().width); EXPECT_LT(initial_res.width, qs_.GetScaledResolution().width);
EXPECT_LT(initial_res.height, qs_.GetScaledResolution().height); EXPECT_LT(initial_res.height, qs_.GetScaledResolution().height);
} }
@ -354,8 +353,8 @@ void QualityScalerTest::DownscaleEndsAt(int input_width,
int end_height) { int end_height) {
// Create a frame with 2x expected end width/height to verify that we can // Create a frame with 2x expected end width/height to verify that we can
// scale down to expected end width/height. // scale down to expected end width/height.
input_frame_.CreateEmptyFrame(input_width, input_height, input_width, input_frame_ = rtc::scoped_refptr<VideoFrameBuffer>(
(input_width + 1) / 2, (input_width + 1) / 2); new rtc::RefCountedObject<I420Buffer>(input_width, input_height));
int last_width = input_width; int last_width = input_width;
int last_height = input_height; int last_height = input_height;

View File

@ -13,10 +13,7 @@
namespace webrtc { namespace webrtc {
VPMSimpleSpatialResampler::VPMSimpleSpatialResampler() VPMSimpleSpatialResampler::VPMSimpleSpatialResampler()
: resampling_mode_(kFastRescaling), : resampling_mode_(kFastRescaling), target_width_(0), target_height_(0) {}
target_width_(0),
target_height_(0),
scaler_() {}
VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler() {} VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler() {}
@ -56,26 +53,17 @@ int32_t VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
return VPM_OK; return VPM_OK;
} }
// Setting scaler rtc::scoped_refptr<I420Buffer> scaled_buffer(
// TODO(mikhal/marpan): Should we allow for setting the filter mode in buffer_pool_.CreateBuffer(target_width_, target_height_));
// _scale.Set() with |resampling_mode_|?
int ret_val = 0;
ret_val = scaler_.Set(inFrame.width(), inFrame.height(), target_width_,
target_height_, kI420, kI420, kScaleBox);
if (ret_val < 0)
return ret_val;
ret_val = scaler_.Scale(inFrame, outFrame); scaled_buffer->CropAndScaleFrom(inFrame.video_frame_buffer());
outFrame->set_video_frame_buffer(scaled_buffer);
// Setting time parameters to the output frame. // Setting time parameters to the output frame.
// Timestamp will be reset in Scale call above, so we should set it after.
outFrame->set_timestamp(inFrame.timestamp()); outFrame->set_timestamp(inFrame.timestamp());
outFrame->set_render_time_ms(inFrame.render_time_ms()); outFrame->set_render_time_ms(inFrame.render_time_ms());
if (ret_val == 0) return VPM_OK;
return VPM_OK;
else
return VPM_SCALE_ERROR;
} }
int32_t VPMSimpleSpatialResampler::TargetHeight() { int32_t VPMSimpleSpatialResampler::TargetHeight() {

View File

@ -16,8 +16,8 @@
#include "webrtc/modules/include/module_common_types.h" #include "webrtc/modules/include/module_common_types.h"
#include "webrtc/modules/video_processing/include/video_processing_defines.h" #include "webrtc/modules/video_processing/include/video_processing_defines.h"
#include "webrtc/common_video/libyuv/include/scaler.h" #include "webrtc/common_video/include/i420_buffer_pool.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/video_frame.h"
namespace webrtc { namespace webrtc {
@ -52,7 +52,7 @@ class VPMSimpleSpatialResampler : public VPMSpatialResampler {
VideoFrameResampling resampling_mode_; VideoFrameResampling resampling_mode_;
int32_t target_width_; int32_t target_width_;
int32_t target_height_; int32_t target_height_;
Scaler scaler_; I420BufferPool buffer_pool_;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree. * be found in the AUTHORS file in the root of the source tree.
*/ */
#include "webrtc/common_video/libyuv/include/scaler.h"
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h" #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
#include "webrtc/modules/video_processing/video_denoiser.h" #include "webrtc/modules/video_processing/video_denoiser.h"