Update video_coding/codecs to new VideoFrameBuffer interface

This is a follow-up cleanup for CL
https://codereview.webrtc.org/2847383002/.

Bug: webrtc:7632
Change-Id: I47861d779968f2fee94db9c017102a8e87e67fb7
Reviewed-on: https://chromium-review.googlesource.com/524163
Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Magnus Jedvert <magjed@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#18477}
This commit is contained in:
Magnus Jedvert
2017-06-07 11:50:57 +02:00
committed by Commit Bot
parent 9932e255ea
commit 20ebf4ede8
9 changed files with 76 additions and 80 deletions

View File

@ -335,28 +335,25 @@ int SimulcastEncoderAdapter::Encode(
// TODO(perkj): ensure that works going forward, and figure out how this
// affects webrtc:5683.
if ((dst_width == src_width && dst_height == src_height) ||
input_image.video_frame_buffer()->native_handle()) {
input_image.video_frame_buffer()->type() ==
VideoFrameBuffer::Type::kNative) {
int ret = streaminfos_[stream_idx].encoder->Encode(
input_image, codec_specific_info, &stream_frame_types);
if (ret != WEBRTC_VIDEO_CODEC_OK) {
return ret;
}
} else {
// Aligning stride values based on width.
rtc::scoped_refptr<I420Buffer> dst_buffer =
I420Buffer::Create(dst_width, dst_height, dst_width,
(dst_width + 1) / 2, (dst_width + 1) / 2);
libyuv::I420Scale(input_image.video_frame_buffer()->DataY(),
input_image.video_frame_buffer()->StrideY(),
input_image.video_frame_buffer()->DataU(),
input_image.video_frame_buffer()->StrideU(),
input_image.video_frame_buffer()->DataV(),
input_image.video_frame_buffer()->StrideV(),
src_width, src_height,
dst_buffer->MutableDataY(), dst_buffer->StrideY(),
dst_buffer->MutableDataU(), dst_buffer->StrideU(),
dst_buffer->MutableDataV(), dst_buffer->StrideV(),
dst_width, dst_height,
I420Buffer::Create(dst_width, dst_height);
rtc::scoped_refptr<I420BufferInterface> src_buffer =
input_image.video_frame_buffer()->ToI420();
libyuv::I420Scale(src_buffer->DataY(), src_buffer->StrideY(),
src_buffer->DataU(), src_buffer->StrideU(),
src_buffer->DataV(), src_buffer->StrideV(), src_width,
src_height, dst_buffer->MutableDataY(),
dst_buffer->StrideY(), dst_buffer->MutableDataU(),
dst_buffer->StrideU(), dst_buffer->MutableDataV(),
dst_buffer->StrideV(), dst_width, dst_height,
libyuv::kFilterBilinear);
int ret = streaminfos_[stream_idx].encoder->Encode(

View File

@ -717,14 +717,22 @@ TEST_F(TestSimulcastEncoderAdapterFake,
}
// TODO(nisse): Reuse definition in webrtc/test/fake_texture_handle.h.
class FakeNativeHandleBuffer : public NativeHandleBuffer {
class FakeNativeBuffer : public VideoFrameBuffer {
public:
FakeNativeHandleBuffer(void* native_handle, int width, int height)
: NativeHandleBuffer(native_handle, width, height) {}
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override {
FakeNativeBuffer(int width, int height) : width_(width), height_(height) {}
Type type() const override { return Type::kNative; }
int width() const override { return width_; }
int height() const override { return height_; }
rtc::scoped_refptr<I420BufferInterface> ToI420() override {
RTC_NOTREACHED();
return nullptr;
}
private:
const int width_;
const int height_;
};
TEST_F(TestSimulcastEncoderAdapterFake,
@ -743,7 +751,7 @@ TEST_F(TestSimulcastEncoderAdapterFake,
EXPECT_TRUE(adapter_->SupportsNativeHandle());
rtc::scoped_refptr<VideoFrameBuffer> buffer(
new rtc::RefCountedObject<FakeNativeHandleBuffer>(this, 1280, 720));
new rtc::RefCountedObject<FakeNativeBuffer>(1280, 720));
VideoFrame input_frame(buffer, 100, 1000, kVideoRotation_180);
// Expect calls with the given video frame verbatim, since it's a texture
// frame and can't otherwise be modified/resized.
@ -766,9 +774,8 @@ TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) {
.WillOnce(Return(WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE));
// Send a fake frame and assert the return is software fallback.
int half_width = (kDefaultWidth + 1) / 2;
rtc::scoped_refptr<I420Buffer> input_buffer = I420Buffer::Create(
kDefaultWidth, kDefaultHeight, kDefaultWidth, half_width, half_width);
rtc::scoped_refptr<I420Buffer> input_buffer =
I420Buffer::Create(kDefaultWidth, kDefaultHeight);
input_buffer->InitializeData();
VideoFrame input_frame(input_buffer, 0, 0, webrtc::kVideoRotation_0);
std::vector<FrameType> frame_types(3, kVideoFrameKey);

View File

@ -129,14 +129,16 @@ class Vp8TestDecodedImageCallback : public DecodedImageCallback {
public:
Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
int32_t Decoded(VideoFrame& decoded_image) override {
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
decoded_image.video_frame_buffer()->ToI420();
for (int i = 0; i < decoded_image.width(); ++i) {
EXPECT_NEAR(kColorY, decoded_image.video_frame_buffer()->DataY()[i], 1);
EXPECT_NEAR(kColorY, i420_buffer->DataY()[i], 1);
}
// TODO(mikhal): Verify the difference between U,V and the original.
for (int i = 0; i < ((decoded_image.width() + 1) / 2); ++i) {
EXPECT_NEAR(kColorU, decoded_image.video_frame_buffer()->DataU()[i], 4);
EXPECT_NEAR(kColorV, decoded_image.video_frame_buffer()->DataV()[i], 4);
for (int i = 0; i < i420_buffer->ChromaWidth(); ++i) {
EXPECT_NEAR(kColorU, i420_buffer->DataU()[i], 4);
EXPECT_NEAR(kColorV, i420_buffer->DataV()[i], 4);
}
decoded_frames_++;
return 0;
@ -178,21 +180,14 @@ class TestVp8Simulcast : public ::testing::Test {
// Fills in an I420Buffer from |plane_colors|.
static void CreateImage(const rtc::scoped_refptr<I420Buffer>& buffer,
int plane_colors[kNumOfPlanes]) {
int width = buffer->width();
int height = buffer->height();
int chroma_width = (width + 1) / 2;
int chroma_height = (height + 1) / 2;
SetPlane(buffer->MutableDataY(), plane_colors[0], buffer->width(),
buffer->height(), buffer->StrideY());
SetPlane(buffer->MutableDataY(), plane_colors[0],
width, height, buffer->StrideY());
SetPlane(buffer->MutableDataU(), plane_colors[1], buffer->ChromaWidth(),
buffer->ChromaHeight(), buffer->StrideU());
SetPlane(buffer->MutableDataU(), plane_colors[1],
chroma_width, chroma_height,
buffer->StrideU());
SetPlane(buffer->MutableDataV(), plane_colors[2],
chroma_width, chroma_height,
buffer->StrideV());
SetPlane(buffer->MutableDataV(), plane_colors[2], buffer->ChromaWidth(),
buffer->ChromaHeight(), buffer->StrideV());
}
static void DefaultSettings(VideoCodec* settings,
@ -260,9 +255,7 @@ class TestVp8Simulcast : public ::testing::Test {
SetUpRateAllocator();
EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
int half_width = (kDefaultWidth + 1) / 2;
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight,
kDefaultWidth, half_width, half_width);
input_buffer_ = I420Buffer::Create(kDefaultWidth, kDefaultHeight);
input_buffer_->InitializeData();
input_frame_.reset(
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));
@ -513,9 +506,7 @@ class TestVp8Simulcast : public ::testing::Test {
settings_.simulcastStream[i].height = settings_.height;
}
// Setting input image to new resolution.
int half_width = (settings_.width + 1) / 2;
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
settings_.width, half_width, half_width);
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
input_buffer_->InitializeData();
input_frame_.reset(
@ -556,9 +547,7 @@ class TestVp8Simulcast : public ::testing::Test {
SetRates(settings_.startBitrate, 30);
ExpectStreams(kVideoFrameKey, 1);
// Resize |input_frame_| to the new resolution.
half_width = (settings_.width + 1) / 2;
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height,
settings_.width, half_width, half_width);
input_buffer_ = I420Buffer::Create(settings_.width, settings_.height);
input_buffer_->InitializeData();
input_frame_.reset(
new VideoFrame(input_buffer_, 0, 0, webrtc::kVideoRotation_0));

View File

@ -668,7 +668,8 @@ int VP8EncoderImpl::Encode(const VideoFrame& frame,
if (encoded_complete_callback_ == NULL)
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
rtc::scoped_refptr<VideoFrameBuffer> input_image = frame.video_frame_buffer();
rtc::scoped_refptr<I420BufferInterface> input_image =
frame.video_frame_buffer()->ToI420();
// Since we are extracting raw pointers from |input_image| to
// |raw_images_[0]|, the resolution of these frames must match.
RTC_DCHECK_EQ(input_image->width(), raw_images_[0].d_w);