Remove WebRTC-NV12Decode field trial
As mentioned in https://crbug.com/webrtc/11956, the results did not show any performance improvments. Bug: webrtc:11956 Change-Id: Ie050aa5a6083fcf0c776fb8d03e7d18644b37f97 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/272280 Reviewed-by: Philip Eliasson <philipel@webrtc.org> Commit-Queue: Evan Shrubsole <eshr@webrtc.org> Cr-Commit-Position: refs/heads/main@{#37833}
This commit is contained in:
committed by
WebRTC LUCI CQ
parent
df4dc3ca6b
commit
9f1f48bdd8
@ -33,9 +33,7 @@ extern "C" {
|
||||
#include "modules/video_coding/codecs/h264/h264_color_space.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -245,10 +243,7 @@ H264DecoderImpl::H264DecoderImpl()
|
||||
: ffmpeg_buffer_pool_(true),
|
||||
decoded_image_callback_(nullptr),
|
||||
has_reported_init_(false),
|
||||
has_reported_error_(false),
|
||||
preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode")
|
||||
? VideoFrameBuffer::Type::kNV12
|
||||
: VideoFrameBuffer::Type::kI420) {}
|
||||
has_reported_error_(false) {}
|
||||
|
||||
H264DecoderImpl::~H264DecoderImpl() {
|
||||
Release();
|
||||
@ -313,8 +308,7 @@ bool H264DecoderImpl::Configure(const Settings& settings) {
|
||||
av_frame_.reset(av_frame_alloc());
|
||||
|
||||
if (absl::optional<int> buffer_pool_size = settings.buffer_pool_size()) {
|
||||
if (!ffmpeg_buffer_pool_.Resize(*buffer_pool_size) ||
|
||||
!output_buffer_pool_.Resize(*buffer_pool_size)) {
|
||||
if (!ffmpeg_buffer_pool_.Resize(*buffer_pool_size)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -576,26 +570,6 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
|
||||
// Preference for NV12 output format is ignored if actual format isn't
|
||||
// trivially convertible to it.
|
||||
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12 &&
|
||||
video_frame_buffer_type == VideoFrameBuffer::Type::kI420) {
|
||||
auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer(
|
||||
cropped_buffer->width(), cropped_buffer->height());
|
||||
const PlanarYuv8Buffer* cropped_planar_yuv_buffer =
|
||||
cropped_buffer->GetI420();
|
||||
libyuv::I420ToNV12(cropped_planar_yuv_buffer->DataY(),
|
||||
cropped_planar_yuv_buffer->StrideY(),
|
||||
cropped_planar_yuv_buffer->DataU(),
|
||||
cropped_planar_yuv_buffer->StrideU(),
|
||||
cropped_planar_yuv_buffer->DataV(),
|
||||
cropped_planar_yuv_buffer->StrideV(),
|
||||
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(),
|
||||
planar_yuv_buffer->width(), planar_yuv_buffer->height());
|
||||
cropped_buffer = nv12_buffer;
|
||||
}
|
||||
|
||||
// Pass on color space from input frame if explicitly specified.
|
||||
const ColorSpace& color_space =
|
||||
input_image.ColorSpace() ? *input_image.ColorSpace()
|
||||
|
||||
@ -91,8 +91,6 @@ class H264DecoderImpl : public H264Decoder {
|
||||
|
||||
// Used by ffmpeg via `AVGetBuffer2()` to allocate I420 images.
|
||||
VideoFrameBufferPool ffmpeg_buffer_pool_;
|
||||
// Used to allocate NV12 images if NV12 output is preferred.
|
||||
VideoFrameBufferPool output_buffer_pool_;
|
||||
std::unique_ptr<AVCodecContext, AVCodecContextDeleter> av_context_;
|
||||
std::unique_ptr<AVFrame, AVFrameDeleter> av_frame_;
|
||||
|
||||
@ -102,9 +100,6 @@ class H264DecoderImpl : public H264Decoder {
|
||||
bool has_reported_error_;
|
||||
|
||||
webrtc::H264BitstreamParser h264_bitstream_parser_;
|
||||
|
||||
// Decoder should produce this format if possible.
|
||||
const VideoFrameBuffer::Type preferred_output_format_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -128,10 +128,7 @@ LibvpxVp8Decoder::LibvpxVp8Decoder()
|
||||
key_frame_required_(true),
|
||||
deblock_params_(use_postproc_ ? GetPostProcParamsFromFieldTrialGroup()
|
||||
: absl::nullopt),
|
||||
qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr),
|
||||
preferred_output_format_(field_trial::IsEnabled("WebRTC-NV12Decode")
|
||||
? VideoFrameBuffer::Type::kNV12
|
||||
: VideoFrameBuffer::Type::kI420) {}
|
||||
qp_smoother_(use_postproc_ ? new QpSmoother() : nullptr) {}
|
||||
|
||||
LibvpxVp8Decoder::~LibvpxVp8Decoder() {
|
||||
inited_ = true; // in order to do the actual release
|
||||
@ -188,7 +185,7 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image,
|
||||
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
|
||||
}
|
||||
|
||||
// Post process configurations.
|
||||
// Post process configurations.
|
||||
if (use_postproc_) {
|
||||
vp8_postproc_cfg_t ppcfg;
|
||||
// MFQE enabled to reduce key frame popping.
|
||||
@ -321,35 +318,17 @@ int LibvpxVp8Decoder::ReturnFrame(
|
||||
// Allocate memory for decoded image.
|
||||
rtc::scoped_refptr<VideoFrameBuffer> buffer;
|
||||
|
||||
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
|
||||
// Convert instead of making a copy.
|
||||
// Note: libvpx doesn't support creating NV12 image directly.
|
||||
// Due to the bitstream structure such a change would just hide the
|
||||
// conversion operation inside the decode call.
|
||||
rtc::scoped_refptr<NV12Buffer> nv12_buffer =
|
||||
buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h);
|
||||
buffer = nv12_buffer;
|
||||
if (nv12_buffer.get()) {
|
||||
libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(),
|
||||
img->d_w, img->d_h);
|
||||
}
|
||||
} else {
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
buffer_pool_.CreateI420Buffer(img->d_w, img->d_h);
|
||||
buffer = i420_buffer;
|
||||
if (i420_buffer.get()) {
|
||||
libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
|
||||
img->d_w, img->d_h);
|
||||
}
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer =
|
||||
buffer_pool_.CreateI420Buffer(img->d_w, img->d_h);
|
||||
buffer = i420_buffer;
|
||||
if (i420_buffer.get()) {
|
||||
libyuv::I420Copy(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
i420_buffer->MutableDataY(), i420_buffer->StrideY(),
|
||||
i420_buffer->MutableDataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->MutableDataV(), i420_buffer->StrideV(),
|
||||
img->d_w, img->d_h);
|
||||
}
|
||||
|
||||
if (!buffer.get()) {
|
||||
|
||||
@ -67,9 +67,6 @@ class LibvpxVp8Decoder : public VideoDecoder {
|
||||
bool key_frame_required_;
|
||||
const absl::optional<DeblockParams> deblock_params_;
|
||||
const std::unique_ptr<QpSmoother> qp_smoother_;
|
||||
|
||||
// Decoder should produce this format if possible.
|
||||
const VideoFrameBuffer::Type preferred_output_format_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
@ -95,16 +95,10 @@ ColorSpace ExtractVP9ColorSpace(vpx_color_space_t space_t,
|
||||
} // namespace
|
||||
|
||||
LibvpxVp9Decoder::LibvpxVp9Decoder()
|
||||
: LibvpxVp9Decoder(FieldTrialBasedConfig()) {}
|
||||
LibvpxVp9Decoder::LibvpxVp9Decoder(const FieldTrialsView& trials)
|
||||
: decode_complete_callback_(nullptr),
|
||||
inited_(false),
|
||||
decoder_(nullptr),
|
||||
key_frame_required_(true),
|
||||
preferred_output_format_(
|
||||
absl::StartsWith(trials.Lookup("WebRTC-NV12Decode"), "Enabled")
|
||||
? VideoFrameBuffer::Type::kNV12
|
||||
: VideoFrameBuffer::Type::kI420) {}
|
||||
key_frame_required_(true) {}
|
||||
|
||||
LibvpxVp9Decoder::~LibvpxVp9Decoder() {
|
||||
inited_ = true; // in order to do the actual release
|
||||
@ -177,8 +171,7 @@ bool LibvpxVp9Decoder::Configure(const Settings& settings) {
|
||||
// Always start with a complete key frame.
|
||||
key_frame_required_ = true;
|
||||
if (absl::optional<int> buffer_pool_size = settings.buffer_pool_size()) {
|
||||
if (!libvpx_buffer_pool_.Resize(*buffer_pool_size) ||
|
||||
!output_buffer_pool_.Resize(*buffer_pool_size)) {
|
||||
if (!libvpx_buffer_pool_.Resize(*buffer_pool_size)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -283,33 +276,15 @@ int LibvpxVp9Decoder::ReturnFrame(
|
||||
rtc::scoped_refptr<VideoFrameBuffer> img_wrapped_buffer;
|
||||
switch (img->fmt) {
|
||||
case VPX_IMG_FMT_I420:
|
||||
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
|
||||
rtc::scoped_refptr<NV12Buffer> nv12_buffer =
|
||||
output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h);
|
||||
if (!nv12_buffer.get()) {
|
||||
// Buffer pool is full.
|
||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||
}
|
||||
img_wrapped_buffer = nv12_buffer;
|
||||
libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
|
||||
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
|
||||
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
|
||||
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(),
|
||||
nv12_buffer->StrideUV(), img->d_w, img->d_h);
|
||||
// No holding onto img_buffer as it's no longer needed and can be
|
||||
// reused.
|
||||
} else {
|
||||
img_wrapped_buffer = WrapI420Buffer(
|
||||
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
||||
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
||||
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
||||
img->stride[VPX_PLANE_V],
|
||||
// WrappedI420Buffer's mechanism for allowing the release of its
|
||||
// frame buffer is through a callback function. This is where we
|
||||
// should release `img_buffer`.
|
||||
[img_buffer] {});
|
||||
}
|
||||
img_wrapped_buffer = WrapI420Buffer(
|
||||
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
|
||||
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
|
||||
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
|
||||
img->stride[VPX_PLANE_V],
|
||||
// WrappedI420Buffer's mechanism for allowing the release of its
|
||||
// frame buffer is through a callback function. This is where we
|
||||
// should release `img_buffer`.
|
||||
[img_buffer] {});
|
||||
break;
|
||||
case VPX_IMG_FMT_I422:
|
||||
img_wrapped_buffer = WrapI422Buffer(
|
||||
@ -398,7 +373,6 @@ int LibvpxVp9Decoder::Release() {
|
||||
// still referenced externally are deleted once fully released, not returning
|
||||
// to the pool.
|
||||
libvpx_buffer_pool_.ClearPool();
|
||||
output_buffer_pool_.Release();
|
||||
inited_ = false;
|
||||
return ret_val;
|
||||
}
|
||||
|
||||
@ -14,9 +14,7 @@
|
||||
|
||||
#ifdef RTC_ENABLE_VP9
|
||||
|
||||
#include "api/field_trials_view.h"
|
||||
#include "api/video_codecs/video_decoder.h"
|
||||
#include "common_video/include/video_frame_buffer_pool.h"
|
||||
#include "modules/video_coding/codecs/vp9/include/vp9.h"
|
||||
#include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
|
||||
#include "vpx/vp8cx.h"
|
||||
@ -26,8 +24,6 @@ namespace webrtc {
|
||||
class LibvpxVp9Decoder : public VP9Decoder {
|
||||
public:
|
||||
LibvpxVp9Decoder();
|
||||
explicit LibvpxVp9Decoder(const FieldTrialsView& trials);
|
||||
|
||||
virtual ~LibvpxVp9Decoder();
|
||||
|
||||
bool Configure(const Settings& settings) override;
|
||||
@ -51,16 +47,11 @@ class LibvpxVp9Decoder : public VP9Decoder {
|
||||
|
||||
// Memory pool used to share buffers between libvpx and webrtc.
|
||||
Vp9FrameBufferPool libvpx_buffer_pool_;
|
||||
// Buffer pool used to allocate additionally needed NV12 buffers.
|
||||
VideoFrameBufferPool output_buffer_pool_;
|
||||
DecodedImageCallback* decode_complete_callback_;
|
||||
bool inited_;
|
||||
vpx_codec_ctx_t* decoder_;
|
||||
bool key_frame_required_;
|
||||
Settings current_settings_;
|
||||
|
||||
// Decoder should produce this format if possible.
|
||||
const VideoFrameBuffer::Type preferred_output_format_;
|
||||
};
|
||||
} // namespace webrtc
|
||||
|
||||
|
||||
Reference in New Issue
Block a user