diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index b65fa18faa..6a5576ce3d 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -47,6 +47,22 @@ rtc_source_set("video_frame_i420") { ] } +rtc_source_set("video_frame_i010") { + visibility = [ "*" ] + sources = [ + "i010_buffer.cc", + "i010_buffer.h", + ] + deps = [ + ":video_frame", + ":video_frame_i420", + "../../rtc_base:checks", + "../../rtc_base:rtc_base", + "../../rtc_base/memory:aligned_malloc", + "//third_party/libyuv", + ] +} + rtc_source_set("encoded_frame") { visibility = [ "*" ] sources = [ diff --git a/api/video/i010_buffer.cc b/api/video/i010_buffer.cc new file mode 100644 index 0000000000..adb5a5e95a --- /dev/null +++ b/api/video/i010_buffer.cc @@ -0,0 +1,237 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/video/i010_buffer.h" + +#include +#include + +#include "api/video/i420_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/refcountedobject.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. +static const int kBufferAlignment = 64; +static const int kBytesPerPixel = 2; + +namespace webrtc { + +namespace { + +int I010DataSize(int height, int stride_y, int stride_u, int stride_v) { + return kBytesPerPixel * + (stride_y * height + (stride_u + stride_v) * ((height + 1) / 2)); +} + +} // namespace + +I010Buffer::I010Buffer(int width, + int height, + int stride_y, + int stride_u, + int stride_v) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_u_(stride_u), + stride_v_(stride_v), + data_(static_cast( + AlignedMalloc(I010DataSize(height, stride_y, stride_u, stride_v), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_u, (width + 1) / 2); + RTC_DCHECK_GE(stride_v, (width + 1) / 2); +} + +I010Buffer::~I010Buffer() {} + +// static +rtc::scoped_refptr I010Buffer::Create(int width, int height) { + return new rtc::RefCountedObject( + width, height, width, (width + 1) / 2, (width + 1) / 2); +} + +// static +rtc::scoped_refptr I010Buffer::Copy( + const I010BufferInterface& source) { + const int width = source.width(); + const int height = source.height(); + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ( + 0, libyuv::I010Copy( + source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), + source.DataV(), source.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I010Buffer::Copy( + const I420BufferInterface& source) { + const int width = source.width(); + const int height = source.height(); + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ( + 0, libyuv::I420ToI010( + source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), + source.DataV(), source.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I010Buffer::Rotate( + const I010BufferInterface& src, + VideoRotation rotation) { + if (rotation == webrtc::kVideoRotation_0) + return Copy(src); + + RTC_CHECK(src.DataY()); + RTC_CHECK(src.DataU()); + RTC_CHECK(src.DataV()); + int rotated_width = src.width(); + int rotated_height = src.height(); + if (rotation == webrtc::kVideoRotation_90 || + rotation == webrtc::kVideoRotation_270) { + std::swap(rotated_width, rotated_height); + } + + rtc::scoped_refptr buffer = + Create(rotated_width, rotated_height); + // TODO(emircan): Remove this when there is libyuv::I010Rotate(). + for (int x = 0; x < src.width(); x++) { + for (int y = 0; y < src.height(); y++) { + int dest_x = x; + int dest_y = y; + switch (rotation) { + // This case is covered by the early return. + case webrtc::kVideoRotation_0: + RTC_NOTREACHED(); + break; + case webrtc::kVideoRotation_90: + dest_x = src.height() - y - 1; + dest_y = x; + break; + case webrtc::kVideoRotation_180: + dest_x = src.width() - x - 1; + dest_y = src.height() - y - 1; + break; + case webrtc::kVideoRotation_270: + dest_x = y; + dest_y = src.width() - x - 1; + break; + } + buffer->MutableDataY()[dest_x + buffer->StrideY() * dest_y] = + src.DataY()[x + src.StrideY() * y]; + dest_x /= 2; + dest_y /= 2; + int src_x = x / 2; + int src_y = y / 2; + buffer->MutableDataU()[dest_x + buffer->StrideU() * dest_y] = + src.DataU()[src_x + src.StrideU() * src_y]; + buffer->MutableDataV()[dest_x + buffer->StrideV() * dest_y] = + src.DataV()[src_x + src.StrideV() * src_y]; + } + } + return buffer; +} + +rtc::scoped_refptr I010Buffer::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I010ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + +int I010Buffer::width() const { + return width_; +} + +int I010Buffer::height() const { + return height_; +} + +const uint16_t* I010Buffer::DataY() const { + return data_.get(); +} +const uint16_t* I010Buffer::DataU() const { + return data_.get() + stride_y_ * height_; +} +const uint16_t* I010Buffer::DataV() const { + return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2); +} + +int I010Buffer::StrideY() const { + return stride_y_; +} +int I010Buffer::StrideU() const { + return stride_u_; +} +int I010Buffer::StrideV() const { + return stride_v_; +} + +uint16_t* I010Buffer::MutableDataY() { + return const_cast(DataY()); +} +uint16_t* I010Buffer::MutableDataU() { + return const_cast(DataU()); +} +uint16_t* I010Buffer::MutableDataV() { + return const_cast(DataV()); +} + +void I010Buffer::CropAndScaleFrom(const I010BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + + // Make sure offset is even so that u/v plane becomes aligned. + const int uv_offset_x = offset_x / 2; + const int uv_offset_y = offset_y / 2; + offset_x = uv_offset_x * 2; + offset_y = uv_offset_y * 2; + + const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint16_t* u_plane = + src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x; + const uint16_t* v_plane = + src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x; + int res = libyuv::I420Scale_16( + y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(), + crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(), + StrideU(), MutableDataV(), StrideV(), width(), height(), + libyuv::kFilterBox); + + RTC_DCHECK_EQ(res, 0); +} + +void I010Buffer::ScaleFrom(const I010BufferInterface& src) { + CropAndScaleFrom(src, 0, 0, src.width(), src.height()); +} + +} // namespace webrtc diff --git a/api/video/i010_buffer.h b/api/video/i010_buffer.h new file mode 100644 index 0000000000..1208b31852 --- /dev/null +++ b/api/video/i010_buffer.h @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_I010_BUFFER_H_ +#define API_VIDEO_I010_BUFFER_H_ + +#include + +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +// Plain I010 buffer in standard memory. +class I010Buffer : public I010BufferInterface { + public: + // Create a new buffer. + static rtc::scoped_refptr Create(int width, int height); + + // Create a new buffer and copy the pixel data. + static rtc::scoped_refptr Copy(const I010BufferInterface& buffer); + + // Convert and put I420 buffer into a new buffer. + static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + + // Return a rotated copy of |src|. + static rtc::scoped_refptr Rotate(const I010BufferInterface& src, + VideoRotation rotation); + + // VideoFrameBuffer implementation. + rtc::scoped_refptr ToI420() override; + + // PlanarYuv16BBuffer implementation. + int width() const override; + int height() const override; + const uint16_t* DataY() const override; + const uint16_t* DataU() const override; + const uint16_t* DataV() const override; + int StrideY() const override; + int StrideU() const override; + int StrideV() const override; + + uint16_t* MutableDataY(); + uint16_t* MutableDataU(); + uint16_t* MutableDataV(); + + // Scale the cropped area of |src| to the size of |this| buffer, and + // write the result into |this|. + void CropAndScaleFrom(const I010BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + // Scale all of |src| to the size of |this| buffer, with no cropping. + void ScaleFrom(const I010BufferInterface& src); + + protected: + I010Buffer(int width, int height, int stride_y, int stride_u, int stride_v); + ~I010Buffer() override; + + private: + const int width_; + const int height_; + const int stride_y_; + const int stride_u_; + const int stride_v_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_I010_BUFFER_H_ diff --git a/api/video/video_frame_buffer.cc b/api/video/video_frame_buffer.cc index 867f249fe6..41276bec27 100644 --- a/api/video/video_frame_buffer.cc +++ b/api/video/video_frame_buffer.cc @@ -45,6 +45,16 @@ const I444BufferInterface* VideoFrameBuffer::GetI444() const { return static_cast(this); } +I010BufferInterface* VideoFrameBuffer::GetI010() { + RTC_CHECK(type() == Type::kI010); + return static_cast(this); +} + +const I010BufferInterface* VideoFrameBuffer::GetI010() const { + RTC_CHECK(type() == Type::kI010); + return static_cast(this); +} + VideoFrameBuffer::Type I420BufferInterface::type() const { return Type::kI420; } @@ -77,4 +87,16 @@ int I444BufferInterface::ChromaHeight() const { return height(); } +VideoFrameBuffer::Type I010BufferInterface::type() const { + return Type::kI010; +} + +int I010BufferInterface::ChromaWidth() const { + return (width() + 1) / 2; +} + +int I010BufferInterface::ChromaHeight() const { + return (height() + 1) / 2; +} + } // namespace webrtc diff --git a/api/video/video_frame_buffer.h b/api/video/video_frame_buffer.h index 2be7e0bb9f..1e8169ac57 100644 --- a/api/video/video_frame_buffer.h +++ b/api/video/video_frame_buffer.h @@ -21,6 +21,7 @@ namespace webrtc { class I420BufferInterface; class I420ABufferInterface; class I444BufferInterface; +class I010BufferInterface; // Base class for frame buffers of different types of pixel format and storage. // The tag in type() indicates how the data is represented, and each type is @@ -47,6 +48,7 @@ class VideoFrameBuffer : public rtc::RefCountInterface { kI420, kI420A, kI444, + kI010, }; // This function specifies in what pixel format the data is stored in. @@ -73,24 +75,21 @@ class VideoFrameBuffer : public rtc::RefCountInterface { const I420ABufferInterface* GetI420A() const; I444BufferInterface* GetI444(); const I444BufferInterface* GetI444() const; + I010BufferInterface* GetI010(); + const I010BufferInterface* GetI010() const; protected: ~VideoFrameBuffer() override {} }; -// This interface represents Type::kI420 and Type::kI444. +// This interface represents planar formats. class PlanarYuvBuffer : public VideoFrameBuffer { public: virtual int ChromaWidth() const = 0; virtual int ChromaHeight() const = 0; - // Returns pointer to the pixel data for a given plane. The memory is owned by - // the VideoFrameBuffer object and must not be freed by the caller. - virtual const uint8_t* DataY() const = 0; - virtual const uint8_t* DataU() const = 0; - virtual const uint8_t* DataV() const = 0; - - // Returns the number of bytes between successive rows for a given plane. + // Returns the number of steps(in terms of Data*() return type) between + // successive rows for a given plane. virtual int StrideY() const = 0; virtual int StrideU() const = 0; virtual int StrideV() const = 0; @@ -99,7 +98,21 @@ class PlanarYuvBuffer : public VideoFrameBuffer { ~PlanarYuvBuffer() override {} }; -class I420BufferInterface : public PlanarYuvBuffer { +// This interface represents 8-bit color depth formats: Type::kI420, +// Type::kI420A and Type::kI444. +class PlanarYuv8Buffer : public PlanarYuvBuffer { + public: + // Returns pointer to the pixel data for a given plane. The memory is owned by + // the VideoFrameBuffer object and must not be freed by the caller. + virtual const uint8_t* DataY() const = 0; + virtual const uint8_t* DataU() const = 0; + virtual const uint8_t* DataV() const = 0; + + protected: + ~PlanarYuv8Buffer() override {} +}; + +class I420BufferInterface : public PlanarYuv8Buffer { public: Type type() const override; @@ -122,7 +135,7 @@ class I420ABufferInterface : public I420BufferInterface { ~I420ABufferInterface() override {} }; -class I444BufferInterface : public PlanarYuvBuffer { +class I444BufferInterface : public PlanarYuv8Buffer { public: Type type() const final; @@ -133,6 +146,32 @@ class I444BufferInterface : public PlanarYuvBuffer { ~I444BufferInterface() override {} }; +// This interface represents 8-bit to 16-bit color depth formats: Type::kI010. +class PlanarYuv16BBuffer : public PlanarYuvBuffer { + public: + // Returns pointer to the pixel data for a given plane. The memory is owned by + // the VideoFrameBuffer object and must not be freed by the caller. + virtual const uint16_t* DataY() const = 0; + virtual const uint16_t* DataU() const = 0; + virtual const uint16_t* DataV() const = 0; + + protected: + ~PlanarYuv16BBuffer() override {} +}; + +// Represents Type::kI010, allocates 16 bits per pixel and fills 10 least +// significant bits with color information. +class I010BufferInterface : public PlanarYuv16BBuffer { + public: + Type type() const override; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + protected: + ~I010BufferInterface() override {} +}; + } // namespace webrtc #endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_ diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn index 0f4d400366..7b08e42f48 100644 --- a/common_video/BUILD.gn +++ b/common_video/BUILD.gn @@ -95,8 +95,8 @@ if (rtc_include_tests) { "h264/sps_parser_unittest.cc", "h264/sps_vui_rewriter_unittest.cc", "i420_buffer_pool_unittest.cc", - "i420_video_frame_unittest.cc", "libyuv/libyuv_unittest.cc", + "video_frame_unittest.cc", ] if (!build_with_chromium && is_clang) { @@ -107,6 +107,7 @@ if (rtc_include_tests) { deps = [ ":common_video", "../api/video:video_frame", + "../api/video:video_frame_i010", "../api/video:video_frame_i420", "../modules/video_capture:video_capture", "../rtc_base:rtc_base", diff --git a/common_video/i420_video_frame_unittest.cc b/common_video/i420_video_frame_unittest.cc deleted file mode 100644 index 4fec8cf388..0000000000 --- a/common_video/i420_video_frame_unittest.cc +++ /dev/null @@ -1,291 +0,0 @@ -/* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include "api/video/i420_buffer.h" -#include "api/video/video_frame.h" -#include "rtc_base/bind.h" -#include "rtc_base/timeutils.h" -#include "test/fake_texture_frame.h" -#include "test/frame_utils.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { - -rtc::scoped_refptr CreateGradient(int width, int height) { - rtc::scoped_refptr buffer(I420Buffer::Create(width, height)); - // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h - for (int x = 0; x < width; x++) { - for (int y = 0; y < height; y++) { - buffer->MutableDataY()[x + y * width] = - 128 * (x * height + y * width) / (width * height); - } - } - int chroma_width = buffer->ChromaWidth(); - int chroma_height = buffer->ChromaHeight(); - for (int x = 0; x < chroma_width; x++) { - for (int y = 0; y < chroma_height; y++) { - buffer->MutableDataU()[x + y * chroma_width] = - 255 * x / (chroma_width - 1); - buffer->MutableDataV()[x + y * chroma_width] = - 255 * y / (chroma_height - 1); - } - } - return buffer; -} - -// The offsets and sizes describe the rectangle extracted from the -// original (gradient) frame, in relative coordinates where the -// original frame correspond to the unit square, 0.0 <= x, y < 1.0. -void CheckCrop(const webrtc::I420BufferInterface& frame, - double offset_x, - double offset_y, - double rel_width, - double rel_height) { - int width = frame.width(); - int height = frame.height(); - // Check that pixel values in the corners match the gradient used - // for initialization. - for (int i = 0; i < 2; i++) { - for (int j = 0; j < 2; j++) { - // Pixel coordinates of the corner. - int x = i * (width - 1); - int y = j * (height - 1); - // Relative coordinates, range 0.0 - 1.0 correspond to the - // size of the uncropped input frame. - double orig_x = offset_x + i * rel_width; - double orig_y = offset_y + j * rel_height; - - EXPECT_NEAR(frame.DataY()[x + y * frame.StrideY()] / 256.0, - (orig_x + orig_y) / 2, 0.02); - EXPECT_NEAR(frame.DataU()[x / 2 + (y / 2) * frame.StrideU()] / 256.0, - orig_x, 0.02); - EXPECT_NEAR(frame.DataV()[x / 2 + (y / 2) * frame.StrideV()] / 256.0, - orig_y, 0.02); - } - } -} - -void CheckRotate(int width, - int height, - webrtc::VideoRotation rotation, - const webrtc::I420BufferInterface& rotated) { - int rotated_width = width; - int rotated_height = height; - - if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { - std::swap(rotated_width, rotated_height); - } - EXPECT_EQ(rotated_width, rotated.width()); - EXPECT_EQ(rotated_height, rotated.height()); - - // Clock-wise order (with 0,0 at top-left) - const struct { - int x; - int y; - } corners[] = {{0, 0}, {1, 0}, {1, 1}, {0, 1}}; - // Corresponding corner colors of the frame produced by CreateGradient. - const struct { - int y; - int u; - int v; - } colors[] = {{0, 0, 0}, {127, 255, 0}, {255, 255, 255}, {127, 0, 255}}; - int corner_offset = static_cast(rotation) / 90; - - for (int i = 0; i < 4; i++) { - int j = (i + corner_offset) % 4; - int x = corners[j].x * (rotated_width - 1); - int y = corners[j].y * (rotated_height - 1); - EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]); - EXPECT_EQ(colors[i].u, - rotated.DataU()[(x / 2) + (y / 2) * rotated.StrideU()]); - EXPECT_EQ(colors[i].v, - rotated.DataV()[(x / 2) + (y / 2) * rotated.StrideV()]); - } -} - -} // namespace - -TEST(TestVideoFrame, WidthHeightValues) { - VideoFrame frame(I420Buffer::Create(10, 10, 10, 14, 90), - webrtc::kVideoRotation_0, - 789 * rtc::kNumMicrosecsPerMillisec); - const int valid_value = 10; - EXPECT_EQ(valid_value, frame.width()); - EXPECT_EQ(valid_value, frame.height()); - frame.set_timestamp(123u); - EXPECT_EQ(123u, frame.timestamp()); - frame.set_ntp_time_ms(456); - EXPECT_EQ(456, frame.ntp_time_ms()); - EXPECT_EQ(789, frame.render_time_ms()); -} - -TEST(TestVideoFrame, ShallowCopy) { - uint32_t timestamp = 1; - int64_t ntp_time_ms = 2; - int64_t timestamp_us = 3; - int stride_y = 15; - int stride_u = 10; - int stride_v = 10; - int width = 15; - int height = 15; - - const int kSizeY = 400; - const int kSizeU = 100; - const int kSizeV = 100; - const VideoRotation kRotation = kVideoRotation_270; - uint8_t buffer_y[kSizeY]; - uint8_t buffer_u[kSizeU]; - uint8_t buffer_v[kSizeV]; - memset(buffer_y, 16, kSizeY); - memset(buffer_u, 8, kSizeU); - memset(buffer_v, 4, kSizeV); - - VideoFrame frame1(I420Buffer::Copy(width, height, buffer_y, stride_y, - buffer_u, stride_u, buffer_v, stride_v), - kRotation, 0); - frame1.set_timestamp(timestamp); - frame1.set_ntp_time_ms(ntp_time_ms); - frame1.set_timestamp_us(timestamp_us); - VideoFrame frame2(frame1); - - EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer()); - rtc::scoped_refptr yuv1 = - frame1.video_frame_buffer()->GetI420(); - rtc::scoped_refptr yuv2 = - frame2.video_frame_buffer()->GetI420(); - EXPECT_EQ(yuv1->DataY(), yuv2->DataY()); - EXPECT_EQ(yuv1->DataU(), yuv2->DataU()); - EXPECT_EQ(yuv1->DataV(), yuv2->DataV()); - - EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); - EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); - EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us()); - EXPECT_EQ(frame2.rotation(), frame1.rotation()); - - frame2.set_timestamp(timestamp + 1); - frame2.set_ntp_time_ms(ntp_time_ms + 1); - frame2.set_timestamp_us(timestamp_us + 1); - frame2.set_rotation(kVideoRotation_90); - - EXPECT_NE(frame2.timestamp(), frame1.timestamp()); - EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms()); - EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us()); - EXPECT_NE(frame2.rotation(), frame1.rotation()); -} - -TEST(TestVideoFrame, TextureInitialValues) { - VideoFrame frame = test::FakeNativeBuffer::CreateFrame( - 640, 480, 100, 10, webrtc::kVideoRotation_0); - EXPECT_EQ(640, frame.width()); - EXPECT_EQ(480, frame.height()); - EXPECT_EQ(100u, frame.timestamp()); - EXPECT_EQ(10, frame.render_time_ms()); - ASSERT_TRUE(frame.video_frame_buffer() != nullptr); - EXPECT_TRUE(frame.video_frame_buffer()->type() == - VideoFrameBuffer::Type::kNative); - - frame.set_timestamp(200); - EXPECT_EQ(200u, frame.timestamp()); - frame.set_timestamp_us(20); - EXPECT_EQ(20, frame.timestamp_us()); -} - -TEST(TestI420FrameBuffer, Copy) { - rtc::scoped_refptr buf1(I420Buffer::Create(20, 10)); - memset(buf1->MutableDataY(), 1, 200); - memset(buf1->MutableDataU(), 2, 50); - memset(buf1->MutableDataV(), 3, 50); - rtc::scoped_refptr buf2 = I420Buffer::Copy(*buf1); - EXPECT_TRUE(test::FrameBufsEqual(buf1, buf2)); -} - -TEST(TestI420FrameBuffer, Scale) { - rtc::scoped_refptr buf = CreateGradient(200, 100); - - // Pure scaling, no cropping. - rtc::scoped_refptr scaled_buffer(I420Buffer::Create(150, 75)); - - scaled_buffer->ScaleFrom(*buf); - CheckCrop(*scaled_buffer, 0.0, 0.0, 1.0, 1.0); -} - -TEST(TestI420FrameBuffer, CropXCenter) { - rtc::scoped_refptr buf = CreateGradient(200, 100); - - // Pure center cropping, no scaling. - rtc::scoped_refptr scaled_buffer(I420Buffer::Create(100, 100)); - - scaled_buffer->CropAndScaleFrom(*buf, 50, 0, 100, 100); - CheckCrop(*scaled_buffer, 0.25, 0.0, 0.5, 1.0); -} - -TEST(TestI420FrameBuffer, CropXNotCenter) { - rtc::scoped_refptr buf = CreateGradient(200, 100); - - // Non-center cropping, no scaling. - rtc::scoped_refptr scaled_buffer(I420Buffer::Create(100, 100)); - - scaled_buffer->CropAndScaleFrom(*buf, 25, 0, 100, 100); - CheckCrop(*scaled_buffer, 0.125, 0.0, 0.5, 1.0); -} - -TEST(TestI420FrameBuffer, CropYCenter) { - rtc::scoped_refptr buf = CreateGradient(100, 200); - - // Pure center cropping, no scaling. - rtc::scoped_refptr scaled_buffer(I420Buffer::Create(100, 100)); - - scaled_buffer->CropAndScaleFrom(*buf, 0, 50, 100, 100); - CheckCrop(*scaled_buffer, 0.0, 0.25, 1.0, 0.5); -} - -TEST(TestI420FrameBuffer, CropYNotCenter) { - rtc::scoped_refptr buf = CreateGradient(100, 200); - - // Non-center cropping, no scaling. - rtc::scoped_refptr scaled_buffer(I420Buffer::Create(100, 100)); - - scaled_buffer->CropAndScaleFrom(*buf, 0, 25, 100, 100); - CheckCrop(*scaled_buffer, 0.0, 0.125, 1.0, 0.5); -} - -TEST(TestI420FrameBuffer, CropAndScale16x9) { - rtc::scoped_refptr buf = CreateGradient(640, 480); - - // Center crop to 640 x 360 (16/9 aspect), then scale down by 2. - rtc::scoped_refptr scaled_buffer(I420Buffer::Create(320, 180)); - - scaled_buffer->CropAndScaleFrom(*buf); - CheckCrop(*scaled_buffer, 0.0, 0.125, 1.0, 0.75); -} - -class TestI420BufferRotate - : public ::testing::TestWithParam {}; - -TEST_P(TestI420BufferRotate, Rotates) { - rtc::scoped_refptr buffer = CreateGradient(640, 480); - rtc::scoped_refptr rotated_buffer = - I420Buffer::Rotate(*buffer, GetParam()); - CheckRotate(640, 480, GetParam(), *rotated_buffer); -} - -INSTANTIATE_TEST_CASE_P(Rotate, - TestI420BufferRotate, - ::testing::Values(kVideoRotation_0, - kVideoRotation_90, - kVideoRotation_180, - kVideoRotation_270)); - -} // namespace webrtc diff --git a/common_video/include/video_frame_buffer.h b/common_video/include/video_frame_buffer.h index 312979b05d..11bb812531 100644 --- a/common_video/include/video_frame_buffer.h +++ b/common_video/include/video_frame_buffer.h @@ -103,6 +103,17 @@ rtc::scoped_refptr WrapYuvBuffer( int v_stride, const rtc::Callback0& no_longer_used); +rtc::scoped_refptr WrapI010Buffer( + int width, + int height, + const uint16_t* y_plane, + int y_stride, + const uint16_t* u_plane, + int u_stride, + const uint16_t* v_plane, + int v_stride, + const rtc::Callback0& no_longer_used); + } // namespace webrtc #endif // COMMON_VIDEO_INCLUDE_VIDEO_FRAME_BUFFER_H_ diff --git a/common_video/video_frame_buffer.cc b/common_video/video_frame_buffer.cc index eeb77a2bbd..2cd0b290f1 100644 --- a/common_video/video_frame_buffer.cc +++ b/common_video/video_frame_buffer.cc @@ -130,6 +130,77 @@ rtc::scoped_refptr I444BufferBase::ToI420() { return i420_buffer; } +// Template to implement a wrapped buffer for a PlanarYuv16BBuffer. +template +class WrappedYuv16BBuffer : public Base { + public: + WrappedYuv16BBuffer(int width, + int height, + const uint16_t* y_plane, + int y_stride, + const uint16_t* u_plane, + int u_stride, + const uint16_t* v_plane, + int v_stride, + const rtc::Callback0& no_longer_used) + : width_(width), + height_(height), + y_plane_(y_plane), + u_plane_(u_plane), + v_plane_(v_plane), + y_stride_(y_stride), + u_stride_(u_stride), + v_stride_(v_stride), + no_longer_used_cb_(no_longer_used) {} + + ~WrappedYuv16BBuffer() override { no_longer_used_cb_(); } + + int width() const override { return width_; } + + int height() const override { return height_; } + + const uint16_t* DataY() const override { return y_plane_; } + + const uint16_t* DataU() const override { return u_plane_; } + + const uint16_t* DataV() const override { return v_plane_; } + + int StrideY() const override { return y_stride_; } + + int StrideU() const override { return u_stride_; } + + int StrideV() const override { return v_stride_; } + + private: + friend class rtc::RefCountedObject; + + const int width_; + const int height_; + const uint16_t* const y_plane_; + const uint16_t* const u_plane_; + const uint16_t* const v_plane_; + const int y_stride_; + const int u_stride_; + const int v_stride_; + rtc::Callback0 no_longer_used_cb_; +}; + +class I010BufferBase : public I010BufferInterface { + public: + rtc::scoped_refptr ToI420() final; +}; + +rtc::scoped_refptr I010BufferBase::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I010ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + } // namespace WrappedI420Buffer::WrappedI420Buffer(int width, @@ -257,4 +328,20 @@ rtc::scoped_refptr WrapYuvBuffer( } } +rtc::scoped_refptr WrapI010Buffer( + int width, + int height, + const uint16_t* y_plane, + int y_stride, + const uint16_t* u_plane, + int u_stride, + const uint16_t* v_plane, + int v_stride, + const rtc::Callback0& no_longer_used) { + return rtc::scoped_refptr( + new rtc::RefCountedObject>( + width, height, y_plane, y_stride, u_plane, u_stride, v_plane, + v_stride, no_longer_used)); +} + } // namespace webrtc diff --git a/common_video/video_frame_unittest.cc b/common_video/video_frame_unittest.cc new file mode 100644 index 0000000000..a4b110b6b9 --- /dev/null +++ b/common_video/video_frame_unittest.cc @@ -0,0 +1,435 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "api/video/i010_buffer.h" +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "rtc_base/bind.h" +#include "rtc_base/timeutils.h" +#include "test/fake_texture_frame.h" +#include "test/frame_utils.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +// Helper class to delegate calls to appropriate container. +class PlanarYuvBufferFactory { + public: + static rtc::scoped_refptr Create(VideoFrameBuffer::Type type, + int width, + int height) { + switch (type) { + case VideoFrameBuffer::Type::kI420: + return I420Buffer::Create(width, height); + case VideoFrameBuffer::Type::kI010: + return I010Buffer::Create(width, height); + default: + RTC_NOTREACHED(); + } + return nullptr; + } + + static rtc::scoped_refptr Copy(const VideoFrameBuffer& src) { + switch (src.type()) { + case VideoFrameBuffer::Type::kI420: + return I420Buffer::Copy(src); + case VideoFrameBuffer::Type::kI010: + return I010Buffer::Copy(*src.GetI010()); + default: + RTC_NOTREACHED(); + } + return nullptr; + } + + static rtc::scoped_refptr Rotate(const VideoFrameBuffer& src, + VideoRotation rotation) { + switch (src.type()) { + case VideoFrameBuffer::Type::kI420: + return I420Buffer::Rotate(src, rotation); + case VideoFrameBuffer::Type::kI010: + return I010Buffer::Rotate(*src.GetI010(), rotation); + default: + RTC_NOTREACHED(); + } + return nullptr; + } + + static rtc::scoped_refptr CropAndScaleFrom( + const VideoFrameBuffer& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + switch (src.type()) { + case VideoFrameBuffer::Type::kI420: { + rtc::scoped_refptr buffer = + I420Buffer::Create(crop_width, crop_height); + buffer->CropAndScaleFrom(*src.GetI420(), offset_x, offset_y, crop_width, + crop_height); + return buffer; + } + case VideoFrameBuffer::Type::kI010: { + rtc::scoped_refptr buffer = + I010Buffer::Create(crop_width, crop_height); + buffer->CropAndScaleFrom(*src.GetI010(), offset_x, offset_y, crop_width, + crop_height); + return buffer; + } + default: + RTC_NOTREACHED(); + } + return nullptr; + } + + static rtc::scoped_refptr CropAndScaleFrom( + const VideoFrameBuffer& src, + int crop_width, + int crop_height) { + const int out_width = + std::min(src.width(), crop_width * src.height() / crop_height); + const int out_height = + std::min(src.height(), crop_height * src.width() / crop_width); + return CropAndScaleFrom(src, (src.width() - out_width) / 2, + (src.height() - out_height) / 2, out_width, + out_height); + } + + static rtc::scoped_refptr + ScaleFrom(const VideoFrameBuffer& src, int crop_width, int crop_height) { + switch (src.type()) { + case VideoFrameBuffer::Type::kI420: { + rtc::scoped_refptr buffer = + I420Buffer::Create(crop_width, crop_height); + buffer->ScaleFrom(*src.GetI420()); + return buffer; + } + case VideoFrameBuffer::Type::kI010: { + rtc::scoped_refptr buffer = + I010Buffer::Create(crop_width, crop_height); + buffer->ScaleFrom(*src.GetI010()); + return buffer; + } + default: + RTC_NOTREACHED(); + } + return nullptr; + } +}; + +rtc::scoped_refptr CreateGradient(VideoFrameBuffer::Type type, + int width, + int height) { + rtc::scoped_refptr buffer(I420Buffer::Create(width, height)); + // Initialize with gradient, Y = 128(x/w + y/h), U = 256 x/w, V = 256 y/h + for (int x = 0; x < width; x++) { + for (int y = 0; y < height; y++) { + buffer->MutableDataY()[x + y * width] = + 128 * (x * height + y * width) / (width * height); + } + } + int chroma_width = buffer->ChromaWidth(); + int chroma_height = buffer->ChromaHeight(); + for (int x = 0; x < chroma_width; x++) { + for (int y = 0; y < chroma_height; y++) { + buffer->MutableDataU()[x + y * chroma_width] = + 255 * x / (chroma_width - 1); + buffer->MutableDataV()[x + y * chroma_width] = + 255 * y / (chroma_height - 1); + } + } + if (type == VideoFrameBuffer::Type::kI420) + return buffer; + + RTC_DCHECK(type == VideoFrameBuffer::Type::kI010); + return I010Buffer::Copy(*buffer); +} + +// The offsets and sizes describe the rectangle extracted from the +// original (gradient) frame, in relative coordinates where the +// original frame correspond to the unit square, 0.0 <= x, y < 1.0. +void CheckCrop(const webrtc::I420BufferInterface& frame, + double offset_x, + double offset_y, + double rel_width, + double rel_height) { + int width = frame.width(); + int height = frame.height(); + // Check that pixel values in the corners match the gradient used + // for initialization. + for (int i = 0; i < 2; i++) { + for (int j = 0; j < 2; j++) { + // Pixel coordinates of the corner. + int x = i * (width - 1); + int y = j * (height - 1); + // Relative coordinates, range 0.0 - 1.0 correspond to the + // size of the uncropped input frame. + double orig_x = offset_x + i * rel_width; + double orig_y = offset_y + j * rel_height; + + EXPECT_NEAR(frame.DataY()[x + y * frame.StrideY()] / 256.0, + (orig_x + orig_y) / 2, 0.02); + EXPECT_NEAR(frame.DataU()[x / 2 + (y / 2) * frame.StrideU()] / 256.0, + orig_x, 0.02); + EXPECT_NEAR(frame.DataV()[x / 2 + (y / 2) * frame.StrideV()] / 256.0, + orig_y, 0.02); + } + } +} + +void CheckRotate(int width, + int height, + webrtc::VideoRotation rotation, + const webrtc::I420BufferInterface& rotated) { + int rotated_width = width; + int rotated_height = height; + + if (rotation == kVideoRotation_90 || rotation == kVideoRotation_270) { + std::swap(rotated_width, rotated_height); + } + EXPECT_EQ(rotated_width, rotated.width()); + EXPECT_EQ(rotated_height, rotated.height()); + + // Clock-wise order (with 0,0 at top-left) + const struct { + int x; + int y; + } corners[] = {{0, 0}, {1, 0}, {1, 1}, {0, 1}}; + // Corresponding corner colors of the frame produced by CreateGradient. + const struct { + int y; + int u; + int v; + } colors[] = {{0, 0, 0}, {127, 255, 0}, {255, 255, 255}, {127, 0, 255}}; + int corner_offset = static_cast(rotation) / 90; + + for (int i = 0; i < 4; i++) { + int j = (i + corner_offset) % 4; + int x = corners[j].x * (rotated_width - 1); + int y = corners[j].y * (rotated_height - 1); + EXPECT_EQ(colors[i].y, rotated.DataY()[x + y * rotated.StrideY()]); + EXPECT_EQ(colors[i].u, + rotated.DataU()[(x / 2) + (y / 2) * rotated.StrideU()]); + EXPECT_EQ(colors[i].v, + rotated.DataV()[(x / 2) + (y / 2) * rotated.StrideV()]); + } +} + +} // namespace + +TEST(TestVideoFrame, WidthHeightValues) { + VideoFrame frame(I420Buffer::Create(10, 10, 10, 14, 90), + webrtc::kVideoRotation_0, + 789 * rtc::kNumMicrosecsPerMillisec); + const int valid_value = 10; + EXPECT_EQ(valid_value, frame.width()); + EXPECT_EQ(valid_value, frame.height()); + frame.set_timestamp(123u); + EXPECT_EQ(123u, frame.timestamp()); + frame.set_ntp_time_ms(456); + EXPECT_EQ(456, frame.ntp_time_ms()); + EXPECT_EQ(789, frame.render_time_ms()); +} + +TEST(TestVideoFrame, ShallowCopy) { + uint32_t timestamp = 1; + int64_t ntp_time_ms = 2; + int64_t timestamp_us = 3; + int stride_y = 15; + int stride_u = 10; + int stride_v = 10; + int width = 15; + int height = 15; + + const int kSizeY = 400; + const int kSizeU = 100; + const int kSizeV = 100; + const VideoRotation kRotation = kVideoRotation_270; + uint8_t buffer_y[kSizeY]; + uint8_t buffer_u[kSizeU]; + uint8_t buffer_v[kSizeV]; + memset(buffer_y, 16, kSizeY); + memset(buffer_u, 8, kSizeU); + memset(buffer_v, 4, kSizeV); + + VideoFrame frame1(I420Buffer::Copy(width, height, buffer_y, stride_y, + buffer_u, stride_u, buffer_v, stride_v), + kRotation, 0); + frame1.set_timestamp(timestamp); + frame1.set_ntp_time_ms(ntp_time_ms); + frame1.set_timestamp_us(timestamp_us); + VideoFrame frame2(frame1); + + EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer()); + rtc::scoped_refptr yuv1 = + frame1.video_frame_buffer()->GetI420(); + rtc::scoped_refptr yuv2 = + frame2.video_frame_buffer()->GetI420(); + EXPECT_EQ(yuv1->DataY(), yuv2->DataY()); + EXPECT_EQ(yuv1->DataU(), yuv2->DataU()); + EXPECT_EQ(yuv1->DataV(), yuv2->DataV()); + + EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); + EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); + EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us()); + EXPECT_EQ(frame2.rotation(), frame1.rotation()); + + frame2.set_timestamp(timestamp + 1); + frame2.set_ntp_time_ms(ntp_time_ms + 1); + frame2.set_timestamp_us(timestamp_us + 1); + frame2.set_rotation(kVideoRotation_90); + + EXPECT_NE(frame2.timestamp(), frame1.timestamp()); + EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms()); + EXPECT_NE(frame2.timestamp_us(), frame1.timestamp_us()); + EXPECT_NE(frame2.rotation(), frame1.rotation()); +} + +TEST(TestVideoFrame, TextureInitialValues) { + VideoFrame frame = test::FakeNativeBuffer::CreateFrame( + 640, 480, 100, 10, webrtc::kVideoRotation_0); + EXPECT_EQ(640, frame.width()); + EXPECT_EQ(480, frame.height()); + EXPECT_EQ(100u, frame.timestamp()); + EXPECT_EQ(10, frame.render_time_ms()); + ASSERT_TRUE(frame.video_frame_buffer() != nullptr); + EXPECT_TRUE(frame.video_frame_buffer()->type() == + VideoFrameBuffer::Type::kNative); + + frame.set_timestamp(200); + EXPECT_EQ(200u, frame.timestamp()); + frame.set_timestamp_us(20); + EXPECT_EQ(20, frame.timestamp_us()); +} + +class TestPlanarYuvBuffer + : public ::testing::TestWithParam {}; + +rtc::scoped_refptr CreateAndFillBuffer() { + auto buf = I420Buffer::Create(20, 10); + memset(buf->MutableDataY(), 1, 200); + memset(buf->MutableDataU(), 2, 50); + memset(buf->MutableDataV(), 3, 50); + return buf; +} + +TEST_P(TestPlanarYuvBuffer, Copy) { + rtc::scoped_refptr buf1; + switch (GetParam()) { + case VideoFrameBuffer::Type::kI420: { + buf1 = CreateAndFillBuffer(); + break; + } + case VideoFrameBuffer::Type::kI010: { + buf1 = I010Buffer::Copy(*CreateAndFillBuffer()); + break; + } + default: + RTC_NOTREACHED(); + } + + rtc::scoped_refptr buf2 = + PlanarYuvBufferFactory::Copy(*buf1); + EXPECT_TRUE(test::FrameBufsEqual(buf1->ToI420(), buf2->ToI420())); +} + +TEST_P(TestPlanarYuvBuffer, Scale) { + rtc::scoped_refptr buf = + CreateGradient(GetParam(), 200, 100); + + // Pure scaling, no cropping. + rtc::scoped_refptr scaled_buffer = + PlanarYuvBufferFactory::ScaleFrom(*buf, 150, 75); + CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.0, 1.0, 1.0); +} + +TEST_P(TestPlanarYuvBuffer, CropXCenter) { + rtc::scoped_refptr buf = + CreateGradient(GetParam(), 200, 100); + + // Pure center cropping, no scaling. + rtc::scoped_refptr scaled_buffer = + PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 50, 0, 100, 100); + CheckCrop(*scaled_buffer->ToI420(), 0.25, 0.0, 0.5, 1.0); +} + +TEST_P(TestPlanarYuvBuffer, CropXNotCenter) { + rtc::scoped_refptr buf = + CreateGradient(GetParam(), 200, 100); + + // Non-center cropping, no scaling. + rtc::scoped_refptr scaled_buffer = + PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 25, 0, 100, 100); + CheckCrop(*scaled_buffer->ToI420(), 0.125, 0.0, 0.5, 1.0); +} + +TEST_P(TestPlanarYuvBuffer, CropYCenter) { + rtc::scoped_refptr buf = + CreateGradient(GetParam(), 100, 200); + + // Pure center cropping, no scaling. + rtc::scoped_refptr scaled_buffer = + PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 50, 100, 100); + CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.25, 1.0, 0.5); +} + +TEST_P(TestPlanarYuvBuffer, CropYNotCenter) { + rtc::scoped_refptr buf = + CreateGradient(GetParam(), 100, 200); + + // Pure center cropping, no scaling. + rtc::scoped_refptr scaled_buffer = + PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 0, 25, 100, 100); + CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.5); +} + +TEST_P(TestPlanarYuvBuffer, CropAndScale16x9) { + rtc::scoped_refptr buf = + CreateGradient(GetParam(), 640, 480); + + // Pure center cropping, no scaling. + rtc::scoped_refptr scaled_buffer = + PlanarYuvBufferFactory::CropAndScaleFrom(*buf, 320, 180); + CheckCrop(*scaled_buffer->ToI420(), 0.0, 0.125, 1.0, 0.75); +} + +INSTANTIATE_TEST_CASE_P(, + TestPlanarYuvBuffer, + ::testing::Values(VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kI010)); + +class TestPlanarYuvBufferRotate + : public ::testing::TestWithParam< + std::tuple> {}; + +TEST_P(TestPlanarYuvBufferRotate, Rotates) { + const webrtc::VideoRotation rotation = std::get<0>(GetParam()); + const VideoFrameBuffer::Type type = std::get<1>(GetParam()); + rtc::scoped_refptr buffer = CreateGradient(type, 640, 480); + rtc::scoped_refptr rotated_buffer = + PlanarYuvBufferFactory::Rotate(*buffer, rotation); + CheckRotate(640, 480, rotation, *rotated_buffer->ToI420()); +} + +INSTANTIATE_TEST_CASE_P( + Rotate, + TestPlanarYuvBufferRotate, + ::testing::Combine(::testing::Values(kVideoRotation_0, + kVideoRotation_90, + kVideoRotation_180, + kVideoRotation_270), + ::testing::Values(VideoFrameBuffer::Type::kI420, + VideoFrameBuffer::Type::kI010))); + +} // namespace webrtc