diff --git a/AUTHORS b/AUTHORS index e4729a574b..eb650c59d7 100644 --- a/AUTHORS +++ b/AUTHORS @@ -138,7 +138,6 @@ Microsoft Corporation <*@microsoft.com> MIPS Technologies <*@mips.com> Mozilla Foundation <*@mozilla.com> Netgem S.A. <*@netgem.com> -Nutanix Inc. <*@nutanix.com> NVIDIA Corporation <*@nvidia.com> Opera Software ASA <*@opera.com> Optical Tone Ltd <*@opticaltone.com> diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index 9fd28914b2..e6052fe763 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -43,8 +43,6 @@ rtc_library("video_frame") { sources = [ "i420_buffer.cc", "i420_buffer.h", - "i444_buffer.cc", - "i444_buffer.h", "nv12_buffer.cc", "nv12_buffer.h", "video_codec_type.h", diff --git a/api/video/DEPS b/api/video/DEPS index 5a3e496bcf..cf6770dce0 100644 --- a/api/video/DEPS +++ b/api/video/DEPS @@ -18,10 +18,6 @@ specific_include_rules = { "+rtc_base/memory/aligned_malloc.h", ], - "i444_buffer\.h": [ - "+rtc_base/memory/aligned_malloc.h", - ], - "nv12_buffer\.h": [ "+rtc_base/memory/aligned_malloc.h", ], diff --git a/api/video/i444_buffer.cc b/api/video/i444_buffer.cc deleted file mode 100644 index 8bf9f76625..0000000000 --- a/api/video/i444_buffer.cc +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "api/video/i444_buffer.h" - -#include - -#include -#include - -#include "api/video/i420_buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" -#include "third_party/libyuv/include/libyuv/convert.h" -#include "third_party/libyuv/include/libyuv/planar_functions.h" -#include "third_party/libyuv/include/libyuv/scale.h" - -// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. -static const int kBufferAlignment = 64; - -namespace webrtc { - -namespace { - -int I444DataSize(int height, int stride_y, int stride_u, int stride_v) { - return stride_y * height + stride_u * height + stride_v * height; -} - -} // namespace - -I444Buffer::I444Buffer(int width, int height) - : I444Buffer(width, height, width, (width), (width)) {} - -I444Buffer::I444Buffer(int width, - int height, - int stride_y, - int stride_u, - int stride_v) - : width_(width), - height_(height), - stride_y_(stride_y), - stride_u_(stride_u), - stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I444DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); - RTC_DCHECK_GE(stride_u, (width)); - RTC_DCHECK_GE(stride_v, (width)); -} - -I444Buffer::~I444Buffer() {} - -// static -rtc::scoped_refptr I444Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height); -} - -// static -rtc::scoped_refptr I444Buffer::Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - return rtc::make_ref_counted(width, height, stride_y, stride_u, - stride_v); -} - -// static -rtc::scoped_refptr I444Buffer::Copy( - const I444BufferInterface& source) { - return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), - source.DataU(), source.StrideU(), source.DataV(), - source.StrideV()); -} - -// static -rtc::scoped_refptr I444Buffer::Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v) { - // Note: May use different strides than the input data. - rtc::scoped_refptr buffer = Create(width, height); - RTC_CHECK_EQ(0, libyuv::I444Copy(data_y, stride_y, data_u, stride_u, data_v, - stride_v, buffer->MutableDataY(), - buffer->StrideY(), buffer->MutableDataU(), - buffer->StrideU(), buffer->MutableDataV(), - buffer->StrideV(), width, height)); - return buffer; -} - -// static -rtc::scoped_refptr I444Buffer::Rotate( - const I444BufferInterface& src, - VideoRotation rotation) { - RTC_CHECK(src.DataY()); - RTC_CHECK(src.DataU()); - RTC_CHECK(src.DataV()); - - int rotated_width = src.width(); - int rotated_height = src.height(); - if (rotation == webrtc::kVideoRotation_90 || - rotation == webrtc::kVideoRotation_270) { - std::swap(rotated_width, rotated_height); - } - - rtc::scoped_refptr buffer = - I444Buffer::Create(rotated_width, rotated_height); - - RTC_CHECK_EQ(0, - libyuv::I444Rotate( - src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), - src.DataV(), src.StrideV(), buffer->MutableDataY(), - buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), - buffer->MutableDataV(), buffer->StrideV(), src.width(), - src.height(), static_cast(rotation))); - - return buffer; -} - -rtc::scoped_refptr I444Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); - libyuv::I444ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), - i420_buffer->MutableDataY(), i420_buffer->StrideY(), - i420_buffer->MutableDataU(), i420_buffer->StrideU(), - i420_buffer->MutableDataV(), i420_buffer->StrideV(), - width(), height()); - return i420_buffer; -} - -void I444Buffer::InitializeData() { - memset(data_.get(), 0, - I444DataSize(height_, stride_y_, stride_u_, stride_v_)); -} - -int I444Buffer::width() const { - return width_; -} - -int I444Buffer::height() const { - return height_; -} - -const uint8_t* I444Buffer::DataY() const { - return data_.get(); -} -const uint8_t* I444Buffer::DataU() const { - return data_.get() + stride_y_ * height_; -} -const uint8_t* I444Buffer::DataV() const { - return data_.get() + stride_y_ * height_ + stride_u_ * ((height_)); -} - -int I444Buffer::StrideY() const { - return stride_y_; -} -int I444Buffer::StrideU() const { - return stride_u_; -} -int I444Buffer::StrideV() const { - return stride_v_; -} - -uint8_t* I444Buffer::MutableDataY() { - return const_cast(DataY()); -} -uint8_t* I444Buffer::MutableDataU() { - return const_cast(DataU()); -} -uint8_t* I444Buffer::MutableDataV() { - return const_cast(DataV()); -} - -void I444Buffer::CropAndScaleFrom(const I444BufferInterface& src, - int offset_x, - int offset_y, - int crop_width, - int crop_height) { - RTC_CHECK_LE(crop_width, src.width()); - RTC_CHECK_LE(crop_height, src.height()); - RTC_CHECK_LE(crop_width + offset_x, src.width()); - RTC_CHECK_LE(crop_height + offset_y, src.height()); - RTC_CHECK_GE(offset_x, 0); - RTC_CHECK_GE(offset_y, 0); - - const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; - const uint8_t* u_plane = src.DataU() + src.StrideU() * offset_y + offset_x; - const uint8_t* v_plane = src.DataV() + src.StrideV() * offset_y + offset_x; - int res = - libyuv::I444Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, - src.StrideV(), crop_width, crop_height, MutableDataY(), - StrideY(), MutableDataU(), StrideU(), MutableDataV(), - StrideV(), width(), height(), libyuv::kFilterBox); - - RTC_DCHECK_EQ(res, 0); -} - -} // namespace webrtc diff --git a/api/video/i444_buffer.h b/api/video/i444_buffer.h deleted file mode 100644 index 557bf4f3e0..0000000000 --- a/api/video/i444_buffer.h +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_VIDEO_I444_BUFFER_H_ -#define API_VIDEO_I444_BUFFER_H_ - -#include - -#include - -#include "api/scoped_refptr.h" -#include "api/video/video_frame_buffer.h" -#include "api/video/video_rotation.h" -#include "rtc_base/memory/aligned_malloc.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// Plain I444 buffer in standard memory. -// I444 represents an image with in YUV format withouth any chroma subsampling. -// https://en.wikipedia.org/wiki/Chroma_subsampling#4:4:4 -class RTC_EXPORT I444Buffer : public I444BufferInterface { - public: - static rtc::scoped_refptr Create(int width, int height); - static rtc::scoped_refptr Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v); - - // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I444BufferInterface& buffer); - - static rtc::scoped_refptr Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v); - - // Returns a rotated copy of |src|. - static rtc::scoped_refptr Rotate(const I444BufferInterface& src, - VideoRotation rotation); - - rtc::scoped_refptr ToI420() final; - const I420BufferInterface* GetI420() const final { return nullptr; } - - // Sets all three planes to all zeros. Used to work around for - // quirks in memory checkers - // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and - // ffmpeg (http://crbug.com/390941). - // TODO(nisse): Deprecated. Should be deleted if/when those issues - // are resolved in a better way. Or in the mean time, use SetBlack. - void InitializeData(); - - int width() const override; - int height() const override; - const uint8_t* DataY() const override; - const uint8_t* DataU() const override; - const uint8_t* DataV() const override; - - int StrideY() const override; - int StrideU() const override; - int StrideV() const override; - - uint8_t* MutableDataY(); - uint8_t* MutableDataU(); - uint8_t* MutableDataV(); - - // Scale the cropped area of |src| to the size of |this| buffer, and - // write the result into |this|. - void CropAndScaleFrom(const I444BufferInterface& src, - int offset_x, - int offset_y, - int crop_width, - int crop_height); - - protected: - I444Buffer(int width, int height); - I444Buffer(int width, int height, int stride_y, int stride_u, int stride_v); - - ~I444Buffer() override; - - private: - const int width_; - const int height_; - const int stride_y_; - const int stride_u_; - const int stride_v_; - const std::unique_ptr data_; -}; - -} // namespace webrtc - -#endif // API_VIDEO_I444_BUFFER_H_ diff --git a/api/video/test/BUILD.gn b/api/video/test/BUILD.gn index 5b0d57b3c6..1573e7848f 100644 --- a/api/video/test/BUILD.gn +++ b/api/video/test/BUILD.gn @@ -12,7 +12,6 @@ rtc_library("rtc_api_video_unittests") { testonly = true sources = [ "color_space_unittest.cc", - "i444_buffer_unittest.cc", "nv12_buffer_unittest.cc", "video_adaptation_counters_unittest.cc", "video_bitrate_allocation_unittest.cc", diff --git a/api/video/test/i444_buffer_unittest.cc b/api/video/test/i444_buffer_unittest.cc deleted file mode 100644 index 9a1a9315aa..0000000000 --- a/api/video/test/i444_buffer_unittest.cc +++ /dev/null @@ -1,112 +0,0 @@ - -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/video/i444_buffer.h" - -#include "api/video/i420_buffer.h" -#include "test/frame_utils.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { -int GetY(rtc::scoped_refptr buf, int col, int row) { - return buf->DataY()[row * buf->StrideY() + col]; -} - -int GetU(rtc::scoped_refptr buf, int col, int row) { - return buf->DataU()[row * buf->StrideU() + col]; -} - -int GetV(rtc::scoped_refptr buf, int col, int row) { - return buf->DataV()[row * buf->StrideV() + col]; -} - -void FillI444Buffer(rtc::scoped_refptr buf) { - const uint8_t Y = 1; - const uint8_t U = 2; - const uint8_t V = 3; - for (int row = 0; row < buf->height(); ++row) { - for (int col = 0; col < buf->width(); ++col) { - buf->MutableDataY()[row * buf->StrideY() + col] = Y; - buf->MutableDataU()[row * buf->StrideU() + col] = U; - buf->MutableDataV()[row * buf->StrideV() + col] = V; - } - } -} - -} // namespace - -TEST(I444BufferTest, InitialData) { - constexpr int stride = 3; - constexpr int width = 3; - constexpr int height = 3; - - rtc::scoped_refptr i444_buffer(I444Buffer::Create(width, height)); - EXPECT_EQ(width, i444_buffer->width()); - EXPECT_EQ(height, i444_buffer->height()); - EXPECT_EQ(stride, i444_buffer->StrideY()); - EXPECT_EQ(stride, i444_buffer->StrideU()); - EXPECT_EQ(stride, i444_buffer->StrideV()); - EXPECT_EQ(3, i444_buffer->ChromaWidth()); - EXPECT_EQ(3, i444_buffer->ChromaHeight()); -} - -TEST(I444BufferTest, ReadPixels) { - constexpr int width = 3; - constexpr int height = 3; - - rtc::scoped_refptr i444_buffer(I444Buffer::Create(width, height)); - // Y = 1, U = 2, V = 3. - FillI444Buffer(i444_buffer); - for (int row = 0; row < height; row++) { - for (int col = 0; col < width; col++) { - EXPECT_EQ(1, GetY(i444_buffer, col, row)); - EXPECT_EQ(2, GetU(i444_buffer, col, row)); - EXPECT_EQ(3, GetV(i444_buffer, col, row)); - } - } -} - -TEST(I444BufferTest, ToI420) { - constexpr int width = 3; - constexpr int height = 3; - constexpr int size_y = width * height; - constexpr int size_u = (width + 1) / 2 * (height + 1) / 2; - constexpr int size_v = (width + 1) / 2 * (height + 1) / 2; - rtc::scoped_refptr reference(I420Buffer::Create(width, height)); - memset(reference->MutableDataY(), 8, size_y); - memset(reference->MutableDataU(), 4, size_u); - memset(reference->MutableDataV(), 2, size_v); - - rtc::scoped_refptr i444_buffer(I444Buffer::Create(width, height)); - // Convert the reference buffer to I444. - memset(i444_buffer->MutableDataY(), 8, size_y); - memset(i444_buffer->MutableDataU(), 4, size_y); - memset(i444_buffer->MutableDataV(), 2, size_y); - - // Confirm YUV values are as expected. - for (int row = 0; row < height; row++) { - for (int col = 0; col < width; col++) { - EXPECT_EQ(8, GetY(i444_buffer, col, row)); - EXPECT_EQ(4, GetU(i444_buffer, col, row)); - EXPECT_EQ(2, GetV(i444_buffer, col, row)); - } - } - - rtc::scoped_refptr i420_buffer(i444_buffer->ToI420()); - EXPECT_EQ(height, i420_buffer->height()); - EXPECT_EQ(width, i420_buffer->width()); - EXPECT_TRUE(test::FrameBufsEqual(reference, i420_buffer)); -} - -} // namespace webrtc diff --git a/api/video/video_frame_buffer.cc b/api/video/video_frame_buffer.cc index 6c46f782a0..f6904b5a08 100644 --- a/api/video/video_frame_buffer.cc +++ b/api/video/video_frame_buffer.cc @@ -11,7 +11,6 @@ #include "api/video/video_frame_buffer.h" #include "api/video/i420_buffer.h" -#include "api/video/i444_buffer.h" #include "api/video/nv12_buffer.h" #include "rtc_base/checks.h" @@ -118,19 +117,6 @@ int I444BufferInterface::ChromaHeight() const { return height(); } -rtc::scoped_refptr I444BufferInterface::CropAndScale( - int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) { - rtc::scoped_refptr result = - I444Buffer::Create(scaled_width, scaled_height); - result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); - return result; -} - VideoFrameBuffer::Type I010BufferInterface::type() const { return Type::kI010; } diff --git a/api/video/video_frame_buffer.h b/api/video/video_frame_buffer.h index 6098a48117..7b0782f9c4 100644 --- a/api/video/video_frame_buffer.h +++ b/api/video/video_frame_buffer.h @@ -184,13 +184,6 @@ class I444BufferInterface : public PlanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; - rtc::scoped_refptr CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override; - protected: ~I444BufferInterface() override {} }; diff --git a/api/video_codecs/h264_profile_level_id.cc b/api/video_codecs/h264_profile_level_id.cc index 02b43ba4f2..fa47758189 100644 --- a/api/video_codecs/h264_profile_level_id.cc +++ b/api/video_codecs/h264_profile_level_id.cc @@ -68,8 +68,7 @@ constexpr ProfilePattern kProfilePatterns[] = { {0x58, BitPattern("10xx0000"), H264Profile::kProfileBaseline}, {0x4D, BitPattern("0x0x0000"), H264Profile::kProfileMain}, {0x64, BitPattern("00000000"), H264Profile::kProfileHigh}, - {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh}, - {0xF4, BitPattern("00000000"), H264Profile::kProfilePredictiveHigh444}}; + {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh}}; struct LevelConstraint { const int max_macroblocks_per_second; @@ -229,9 +228,6 @@ absl::optional H264ProfileLevelIdToString( case H264Profile::kProfileHigh: profile_idc_iop_string = "6400"; break; - case H264Profile::kProfilePredictiveHigh444: - profile_idc_iop_string = "f400"; - break; // Unrecognized profile. default: return absl::nullopt; diff --git a/api/video_codecs/h264_profile_level_id.h b/api/video_codecs/h264_profile_level_id.h index 4b46ad329d..2eab474360 100644 --- a/api/video_codecs/h264_profile_level_id.h +++ b/api/video_codecs/h264_profile_level_id.h @@ -25,6 +25,7 @@ enum class H264Profile { kProfileMain, kProfileConstrainedHigh, kProfileHigh, + // TODO(https://crbug.com/1251096): Implement support for this profile. kProfilePredictiveHigh444, }; diff --git a/common_video/include/video_frame_buffer_pool.h b/common_video/include/video_frame_buffer_pool.h index f26a9f7be7..539a6cc0f3 100644 --- a/common_video/include/video_frame_buffer_pool.h +++ b/common_video/include/video_frame_buffer_pool.h @@ -17,7 +17,6 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" -#include "api/video/i444_buffer.h" #include "api/video/nv12_buffer.h" #include "rtc_base/race_checker.h" #include "rtc_base/ref_counted_object.h" @@ -44,7 +43,6 @@ class VideoFrameBufferPool { // and there are less than `max_number_of_buffers` pending, a buffer is // created. Returns null otherwise. rtc::scoped_refptr CreateI420Buffer(int width, int height); - rtc::scoped_refptr CreateI444Buffer(int width, int height); rtc::scoped_refptr CreateNV12Buffer(int width, int height); // Changes the max amount of buffers in the pool to the new value. diff --git a/common_video/video_frame_buffer_pool.cc b/common_video/video_frame_buffer_pool.cc index 267cab1a71..9c88f0b0df 100644 --- a/common_video/video_frame_buffer_pool.cc +++ b/common_video/video_frame_buffer_pool.cc @@ -20,17 +20,12 @@ namespace { bool HasOneRef(const rtc::scoped_refptr& buffer) { // Cast to rtc::RefCountedObject is safe because this function is only called // on locally created VideoFrameBuffers, which are either - // `rtc::RefCountedObject`, `rtc::RefCountedObject` or - // `rtc::RefCountedObject`. + // `rtc::RefCountedObject` or `rtc::RefCountedObject`. switch (buffer->type()) { case VideoFrameBuffer::Type::kI420: { return static_cast*>(buffer.get()) ->HasOneRef(); } - case VideoFrameBuffer::Type::kI444: { - return static_cast*>(buffer.get()) - ->HasOneRef(); - } case VideoFrameBuffer::Type::kNV12: { return static_cast*>(buffer.get()) ->HasOneRef(); @@ -121,37 +116,6 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI444Buffer( - int width, - int height) { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - - rtc::scoped_refptr existing_buffer = - GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI444); - if (existing_buffer) { - // Cast is safe because the only way kI444 buffer is created is - // in the same function below, where |RefCountedObject| - // is created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); - // Creates a new scoped_refptr, which is also pointing to the same - // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); - } - - if (buffers_.size() >= max_number_of_buffers_) - return nullptr; - // Allocate new buffer. - rtc::scoped_refptr buffer = - rtc::make_ref_counted(width, height); - - if (zero_initialize_) - buffer->InitializeData(); - - buffers_.push_back(buffer); - return buffer; -} - rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( int width, int height) { diff --git a/media/engine/internal_decoder_factory.cc b/media/engine/internal_decoder_factory.cc index c24c488546..75465bc2d3 100644 --- a/media/engine/internal_decoder_factory.cc +++ b/media/engine/internal_decoder_factory.cc @@ -47,7 +47,7 @@ std::vector InternalDecoderFactory::GetSupportedFormats() formats.push_back(SdpVideoFormat(cricket::kVp8CodecName)); for (const SdpVideoFormat& format : SupportedVP9DecoderCodecs()) formats.push_back(format); - for (const SdpVideoFormat& h264_format : SupportedH264DecoderCodecs()) + for (const SdpVideoFormat& h264_format : SupportedH264Codecs()) formats.push_back(h264_format); if (kIsLibaomAv1DecoderSupported || diff --git a/modules/video_coding/codecs/h264/h264.cc b/modules/video_coding/codecs/h264/h264.cc index 2ac19ba0b6..8324b7c74e 100644 --- a/modules/video_coding/codecs/h264/h264.cc +++ b/modules/video_coding/codecs/h264/h264.cc @@ -80,34 +80,18 @@ std::vector SupportedH264Codecs() { // // We support both packetization modes 0 (mandatory) and 1 (optional, // preferred). - return { - CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, - "1"), - CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, - "0"), - CreateH264Format(H264Profile::kProfileConstrainedBaseline, - H264Level::kLevel3_1, "1"), - CreateH264Format(H264Profile::kProfileConstrainedBaseline, - H264Level::kLevel3_1, "0"), - CreateH264Format(H264Profile::kProfileMain, H264Level::kLevel3_1, "1"), - CreateH264Format(H264Profile::kProfileMain, H264Level::kLevel3_1, "0")}; -} - -std::vector SupportedH264DecoderCodecs() { - TRACE_EVENT0("webrtc", __func__); - if (!IsH264CodecSupported()) - return std::vector(); - - std::vector supportedCodecs = SupportedH264Codecs(); - - // OpenH264 doesn't yet support High Predictive 4:4:4 encoding but it does - // support decoding. - supportedCodecs.push_back(CreateH264Format( - H264Profile::kProfilePredictiveHigh444, H264Level::kLevel3_1, "1")); - supportedCodecs.push_back(CreateH264Format( - H264Profile::kProfilePredictiveHigh444, H264Level::kLevel3_1, "0")); - - return supportedCodecs; + return {CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, + "1"), + CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, + "0"), + CreateH264Format(H264Profile::kProfileConstrainedBaseline, + H264Level::kLevel3_1, "1"), + CreateH264Format(H264Profile::kProfileConstrainedBaseline, + H264Level::kLevel3_1, "0"), + CreateH264Format(H264Profile::kProfileMain, + H264Level::kLevel3_1, "1"), + CreateH264Format(H264Profile::kProfileMain, + H264Level::kLevel3_1, "0")}; } std::unique_ptr H264Encoder::Create( diff --git a/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/modules/video_coding/codecs/h264/h264_decoder_impl.cc index 31279b7379..b42aac5330 100644 --- a/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -41,10 +41,8 @@ namespace webrtc { namespace { -constexpr std::array kPixelFormatsDefault = { - AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV444P}; -constexpr std::array kPixelFormatsFullRange = { - AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ444P}; +const AVPixelFormat kPixelFormatDefault = AV_PIX_FMT_YUV420P; +const AVPixelFormat kPixelFormatFullRange = AV_PIX_FMT_YUVJ420P; const size_t kYPlaneIndex = 0; const size_t kUPlaneIndex = 1; const size_t kVPlaneIndex = 2; @@ -78,17 +76,9 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // Necessary capability to be allowed to provide our own buffers. RTC_DCHECK(context->codec->capabilities | AV_CODEC_CAP_DR1); - // Limited or full range YUV420 or YUV444 is expected. - auto pixelFormatDefault = std::find_if( - kPixelFormatsDefault.begin(), kPixelFormatsDefault.end(), - [context](AVPixelFormat format) { return context->pix_fmt == format; }); - auto pixelFormatFullRange = std::find_if( - kPixelFormatsFullRange.begin(), kPixelFormatsFullRange.end(), - [context](AVPixelFormat format) { return context->pix_fmt == format; }); - // Limited or full range YUV420 is expected. - RTC_CHECK(pixelFormatDefault != kPixelFormatsDefault.end() || - pixelFormatFullRange != kPixelFormatsFullRange.end()); + RTC_CHECK(context->pix_fmt == kPixelFormatDefault || + context->pix_fmt == kPixelFormatFullRange); // `av_frame->width` and `av_frame->height` are set by FFmpeg. These are the // actual image's dimensions and may be different from `context->width` and @@ -122,43 +112,8 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. // TODO(nisse): Delete that feature from the video pool, instead add // an explicit call to InitializeData here. - rtc::scoped_refptr frame_buffer; - rtc::scoped_refptr i444_buffer; - rtc::scoped_refptr i420_buffer; - switch (context->pix_fmt) { - case AV_PIX_FMT_YUV420P: - case AV_PIX_FMT_YUVJ420P: - i420_buffer = - decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); - // Set `av_frame` members as required by FFmpeg. - av_frame->data[kYPlaneIndex] = i420_buffer->MutableDataY(); - av_frame->linesize[kYPlaneIndex] = i420_buffer->StrideY(); - av_frame->data[kUPlaneIndex] = i420_buffer->MutableDataU(); - av_frame->linesize[kUPlaneIndex] = i420_buffer->StrideU(); - av_frame->data[kVPlaneIndex] = i420_buffer->MutableDataV(); - av_frame->linesize[kVPlaneIndex] = i420_buffer->StrideV(); - RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); - frame_buffer = i420_buffer; - break; - case AV_PIX_FMT_YUV444P: - case AV_PIX_FMT_YUVJ444P: - i444_buffer = - decoder->ffmpeg_buffer_pool_.CreateI444Buffer(width, height); - // Set `av_frame` members as required by FFmpeg. - av_frame->data[kYPlaneIndex] = i444_buffer->MutableDataY(); - av_frame->linesize[kYPlaneIndex] = i444_buffer->StrideY(); - av_frame->data[kUPlaneIndex] = i444_buffer->MutableDataU(); - av_frame->linesize[kUPlaneIndex] = i444_buffer->StrideU(); - av_frame->data[kVPlaneIndex] = i444_buffer->MutableDataV(); - av_frame->linesize[kVPlaneIndex] = i444_buffer->StrideV(); - frame_buffer = i444_buffer; - break; - default: - RTC_LOG(LS_ERROR) << "Unsupported buffer type " << context->pix_fmt - << ". Check supported supported pixel formats!"; - decoder->ReportError(); - return -1; - } + rtc::scoped_refptr frame_buffer = + decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); int y_size = width * height; int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight(); @@ -170,6 +125,15 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, av_frame->format = context->pix_fmt; av_frame->reordered_opaque = context->reordered_opaque; + // Set `av_frame` members as required by FFmpeg. + av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY(); + av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY(); + av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU(); + av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU(); + av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV(); + av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV(); + RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); + // Create a VideoFrame object, to keep a reference to the buffer. // TODO(nisse): The VideoFrame's timestamp and rotation info is not used. // Refactor to do not use a VideoFrame object at all. @@ -233,6 +197,7 @@ bool H264DecoderImpl::Configure(const Settings& settings) { av_context_->coded_width = resolution.Width(); av_context_->coded_height = resolution.Height(); } + av_context_->pix_fmt = kPixelFormatDefault; av_context_->extradata = nullptr; av_context_->extradata_size = 0; @@ -352,103 +317,47 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, RTC_DCHECK(input_frame); rtc::scoped_refptr frame_buffer = input_frame->video_frame_buffer(); - - // Instantiate Planar YUV8 buffer according to video frame buffer type - const webrtc::PlanarYuv8Buffer* planar_yuv8_buffer = nullptr; - VideoFrameBuffer::Type video_frame_buffer_type = frame_buffer->type(); - switch (video_frame_buffer_type) { - case VideoFrameBuffer::Type::kI420: - planar_yuv8_buffer = frame_buffer->GetI420(); - break; - case VideoFrameBuffer::Type::kI444: - planar_yuv8_buffer = frame_buffer->GetI444(); - break; - default: - // If this code is changed to allow other video frame buffer type, - // make sure that the code below which wraps I420/I444 buffer and - // code which converts to NV12 is changed - // to work with new video frame buffer type - - RTC_LOG(LS_ERROR) << "frame_buffer type: " - << static_cast(video_frame_buffer_type) - << " is not supported!"; - ReportError(); - return WEBRTC_VIDEO_CODEC_ERROR; - } + const webrtc::I420BufferInterface* i420_buffer = frame_buffer->GetI420(); // When needed, FFmpeg applies cropping by moving plane pointers and adjusting // frame width/height. Ensure that cropped buffers lie within the allocated // memory. - RTC_DCHECK_LE(av_frame_->width, planar_yuv8_buffer->width()); - RTC_DCHECK_LE(av_frame_->height, planar_yuv8_buffer->height()); - RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], planar_yuv8_buffer->DataY()); - RTC_DCHECK_LE(av_frame_->data[kYPlaneIndex] + - av_frame_->linesize[kYPlaneIndex] * av_frame_->height, - planar_yuv8_buffer->DataY() + planar_yuv8_buffer->StrideY() * - planar_yuv8_buffer->height()); - RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], planar_yuv8_buffer->DataU()); + RTC_DCHECK_LE(av_frame_->width, i420_buffer->width()); + RTC_DCHECK_LE(av_frame_->height, i420_buffer->height()); + RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], i420_buffer->DataY()); + RTC_DCHECK_LE( + av_frame_->data[kYPlaneIndex] + + av_frame_->linesize[kYPlaneIndex] * av_frame_->height, + i420_buffer->DataY() + i420_buffer->StrideY() * i420_buffer->height()); + RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], i420_buffer->DataU()); RTC_DCHECK_LE(av_frame_->data[kUPlaneIndex] + av_frame_->linesize[kUPlaneIndex] * av_frame_->height / 2, - planar_yuv8_buffer->DataU() + planar_yuv8_buffer->StrideU() * - planar_yuv8_buffer->height() / - 2); - RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], planar_yuv8_buffer->DataV()); + i420_buffer->DataU() + + i420_buffer->StrideU() * i420_buffer->height() / 2); + RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], i420_buffer->DataV()); RTC_DCHECK_LE(av_frame_->data[kVPlaneIndex] + av_frame_->linesize[kVPlaneIndex] * av_frame_->height / 2, - planar_yuv8_buffer->DataV() + planar_yuv8_buffer->StrideV() * - planar_yuv8_buffer->height() / - 2); + i420_buffer->DataV() + + i420_buffer->StrideV() * i420_buffer->height() / 2); - rtc::scoped_refptr cropped_buffer; - if (video_frame_buffer_type == VideoFrameBuffer::Type::kI420) { - cropped_buffer = WrapI420Buffer( - av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], - av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], - av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], - av_frame_->linesize[kVPlaneIndex], - // To keep reference alive. - [frame_buffer] {}); - } else { - cropped_buffer = WrapI444Buffer( - av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], - av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], - av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], - av_frame_->linesize[kVPlaneIndex], - // To keep reference alive. - [frame_buffer] {}); - } + rtc::scoped_refptr cropped_buffer = WrapI420Buffer( + av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], + av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], + av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], + av_frame_->linesize[kVPlaneIndex], + // To keep reference alive. + [frame_buffer] {}); if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420(); auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer( - cropped_buffer->width(), cropped_buffer->height()); - - const PlanarYuv8Buffer* cropped_planar_yuv8_buffer = nullptr; - if (video_frame_buffer_type == VideoFrameBuffer::Type::kI420) { - cropped_planar_yuv8_buffer = cropped_buffer->GetI420(); - libyuv::I420ToNV12(cropped_planar_yuv8_buffer->DataY(), - cropped_planar_yuv8_buffer->StrideY(), - cropped_planar_yuv8_buffer->DataU(), - cropped_planar_yuv8_buffer->StrideU(), - cropped_planar_yuv8_buffer->DataV(), - cropped_planar_yuv8_buffer->StrideV(), - nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), - planar_yuv8_buffer->width(), - planar_yuv8_buffer->height()); - } else { - cropped_planar_yuv8_buffer = cropped_buffer->GetI444(); - libyuv::I444ToNV12(cropped_planar_yuv8_buffer->DataY(), - cropped_planar_yuv8_buffer->StrideY(), - cropped_planar_yuv8_buffer->DataU(), - cropped_planar_yuv8_buffer->StrideU(), - cropped_planar_yuv8_buffer->DataV(), - cropped_planar_yuv8_buffer->StrideV(), - nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), - planar_yuv8_buffer->width(), - planar_yuv8_buffer->height()); - } - + cropped_i420->width(), cropped_i420->height()); + libyuv::I420ToNV12(cropped_i420->DataY(), cropped_i420->StrideY(), + cropped_i420->DataU(), cropped_i420->StrideU(), + cropped_i420->DataV(), cropped_i420->StrideV(), + nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), + i420_buffer->width(), i420_buffer->height()); cropped_buffer = nv12_buffer; } diff --git a/modules/video_coding/codecs/h264/include/h264.h b/modules/video_coding/codecs/h264/include/h264.h index 8c201d2b6a..8d1eebc79b 100644 --- a/modules/video_coding/codecs/h264/include/h264.h +++ b/modules/video_coding/codecs/h264/include/h264.h @@ -38,15 +38,10 @@ CreateH264Format(H264Profile profile, // and is not thread-safe. RTC_EXPORT void DisableRtcUseH264(); -// Returns a vector with all supported internal H264 encode profiles that we can +// Returns a vector with all supported internal H264 profiles that we can // negotiate in SDP, in order of preference. std::vector SupportedH264Codecs(); -// Returns a vector with all supported internal H264 decode profiles that we can -// negotiate in SDP, in order of preference. This will be available for receive -// only connections. -std::vector SupportedH264DecoderCodecs(); - class RTC_EXPORT H264Encoder : public VideoEncoder { public: static std::unique_ptr Create(const cricket::VideoCodec& codec); diff --git a/pc/media_session.cc b/pc/media_session.cc index 637dbec73b..ac57459078 100644 --- a/pc/media_session.cc +++ b/pc/media_session.cc @@ -2902,12 +2902,8 @@ void MediaSessionDescriptionFactory::ComputeVideoCodecsIntersectionAndUnion() { video_sendrecv_codecs_.clear(); all_video_codecs_.clear(); // Compute the video codecs union. - // Keep track of payload types to avoid collisions. - UsedPayloadTypes used_payload_types; for (const VideoCodec& send : video_send_codecs_) { - VideoCodec send_mutable = send; - used_payload_types.FindAndSetIdUsed(&send_mutable); - all_video_codecs_.push_back(send_mutable); + all_video_codecs_.push_back(send); if (!FindMatchingCodec(video_send_codecs_, video_recv_codecs_, send, nullptr)) { // TODO(kron): This check is violated by the unit test: @@ -2919,11 +2915,12 @@ void MediaSessionDescriptionFactory::ComputeVideoCodecsIntersectionAndUnion() { // RTC_DCHECK(!IsRtxCodec(send)); } } - // Use MergeCodecs to merge the second half of our list as it already checks - // and fixes problems with duplicate payload types. - MergeCodecs(video_recv_codecs_, &all_video_codecs_, - &used_payload_types); - + for (const VideoCodec& recv : video_recv_codecs_) { + if (!FindMatchingCodec(video_recv_codecs_, video_send_codecs_, + recv, nullptr)) { + all_video_codecs_.push_back(recv); + } + } // Use NegotiateCodecs to merge our codec lists, since the operation is // essentially the same. Put send_codecs as the offered_codecs, which is the // order we'd like to follow. The reasoning is that encoding is usually more