Add fix for 8-bit H264 HDR content
8-bit H264 HDR content is not rendered correctly in Chrome on Windows. This is a temporary fix that converts the 8-bit buffer to a 10-bit buffer if the color space indicates that the buffer should be rendered as HDR. Bug: webrtc:10575 Change-Id: I106612ec489c6371fa774424a4cf07d9bad40fc3 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/134040 Commit-Queue: Johannes Kron <kron@webrtc.org> Reviewed-by: Erik Språng <sprang@webrtc.org> Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org> Cr-Commit-Position: refs/heads/master@{#27766}
This commit is contained in:
committed by
Commit Bot
parent
4b1afbe60a
commit
042559fb92
@ -22,6 +22,7 @@ extern "C" {
|
||||
|
||||
#include "absl/memory/memory.h"
|
||||
#include "api/video/color_space.h"
|
||||
#include "api/video/i010_buffer.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "common_video/include/video_frame_buffer.h"
|
||||
#include "modules/video_coding/codecs/h264/h264_color_space.h"
|
||||
@ -29,6 +30,7 @@ extern "C" {
|
||||
#include "rtc_base/critical_section.h"
|
||||
#include "rtc_base/keep_ref_until_done.h"
|
||||
#include "rtc_base/logging.h"
|
||||
#include "system_wrappers/include/field_trial.h"
|
||||
#include "system_wrappers/include/metrics.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -142,11 +144,13 @@ void H264DecoderImpl::AVFreeBuffer2(void* opaque, uint8_t* data) {
|
||||
delete video_frame;
|
||||
}
|
||||
|
||||
H264DecoderImpl::H264DecoderImpl() : pool_(true),
|
||||
decoded_image_callback_(nullptr),
|
||||
has_reported_init_(false),
|
||||
has_reported_error_(false) {
|
||||
}
|
||||
H264DecoderImpl::H264DecoderImpl()
|
||||
: kEnable8bitHdrFix_(
|
||||
!field_trial::IsEnabled("WebRTC-8bitH264HdrKillSwitch")),
|
||||
pool_(true),
|
||||
decoded_image_callback_(nullptr),
|
||||
has_reported_init_(false),
|
||||
has_reported_error_(false) {}
|
||||
|
||||
H264DecoderImpl::~H264DecoderImpl() {
|
||||
Release();
|
||||
@ -285,19 +289,6 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
RTC_CHECK_EQ(av_frame_->data[kUPlaneIndex], i420_buffer->DataU());
|
||||
RTC_CHECK_EQ(av_frame_->data[kVPlaneIndex], i420_buffer->DataV());
|
||||
|
||||
// Pass on color space from input frame if explicitly specified.
|
||||
const ColorSpace& color_space =
|
||||
input_image.ColorSpace() ? *input_image.ColorSpace()
|
||||
: ExtractH264ColorSpace(av_context_.get());
|
||||
VideoFrame decoded_frame =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(input_frame->video_frame_buffer())
|
||||
.set_timestamp_us(input_frame->timestamp_us())
|
||||
.set_timestamp_rtp(input_image.Timestamp())
|
||||
.set_rotation(input_frame->rotation())
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
|
||||
absl::optional<uint8_t> qp;
|
||||
// TODO(sakal): Maybe it is possible to get QP directly from FFmpeg.
|
||||
h264_bitstream_parser_.ParseBitstream(input_image.data(), input_image.size());
|
||||
@ -306,31 +297,57 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
qp.emplace(qp_int);
|
||||
}
|
||||
|
||||
// The decoded image may be larger than what is supposed to be visible, see
|
||||
// |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
|
||||
// without copying the underlying buffer.
|
||||
if (av_frame_->width != i420_buffer->width() ||
|
||||
av_frame_->height != i420_buffer->height()) {
|
||||
rtc::scoped_refptr<VideoFrameBuffer> cropped_buf = WrapI420Buffer(
|
||||
rtc::scoped_refptr<VideoFrameBuffer> decoded_buffer;
|
||||
|
||||
// 8-bit HDR is currently not being rendered correctly in Chrome on Windows.
|
||||
// If the ColorSpace transfer function is set to ST2084, convert the 8-bit
|
||||
// buffer to a 10-bit buffer. This way 8-bit HDR content is rendered correctly
|
||||
// in Chrome. This is a temporary fix until the root cause has been fixed in
|
||||
// Chrome/WebRTC.
|
||||
// TODO(chromium:956468): Remove this code and fix the underlying problem.
|
||||
bool hdr_color_space =
|
||||
input_image.ColorSpace() && input_image.ColorSpace()->transfer() ==
|
||||
ColorSpace::TransferID::kSMPTEST2084;
|
||||
if (kEnable8bitHdrFix_ && hdr_color_space) {
|
||||
auto i010_buffer = I010Buffer::Copy(*i420_buffer);
|
||||
// Crop image, see comment below.
|
||||
decoded_buffer = WrapI010Buffer(
|
||||
av_frame_->width, av_frame_->height, i010_buffer->DataY(),
|
||||
i010_buffer->StrideY(), i010_buffer->DataU(), i010_buffer->StrideU(),
|
||||
i010_buffer->DataV(), i010_buffer->StrideV(),
|
||||
rtc::KeepRefUntilDone(i010_buffer));
|
||||
} else if (av_frame_->width != i420_buffer->width() ||
|
||||
av_frame_->height != i420_buffer->height()) {
|
||||
// The decoded image may be larger than what is supposed to be visible, see
|
||||
// |AVGetBuffer2|'s use of |avcodec_align_dimensions|. This crops the image
|
||||
// without copying the underlying buffer.
|
||||
decoded_buffer = WrapI420Buffer(
|
||||
av_frame_->width, av_frame_->height, i420_buffer->DataY(),
|
||||
i420_buffer->StrideY(), i420_buffer->DataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->DataV(), i420_buffer->StrideV(),
|
||||
rtc::KeepRefUntilDone(i420_buffer));
|
||||
VideoFrame cropped_frame =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(cropped_buf)
|
||||
.set_timestamp_ms(decoded_frame.render_time_ms())
|
||||
.set_timestamp_rtp(decoded_frame.timestamp())
|
||||
.set_rotation(decoded_frame.rotation())
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
// TODO(nisse): Timestamp and rotation are all zero here. Change decoder
|
||||
// interface to pass a VideoFrameBuffer instead of a VideoFrame?
|
||||
decoded_image_callback_->Decoded(cropped_frame, absl::nullopt, qp);
|
||||
} else {
|
||||
// Return decoded frame.
|
||||
decoded_image_callback_->Decoded(decoded_frame, absl::nullopt, qp);
|
||||
decoded_buffer = input_frame->video_frame_buffer();
|
||||
}
|
||||
|
||||
// Pass on color space from input frame if explicitly specified.
|
||||
const ColorSpace& color_space =
|
||||
input_image.ColorSpace() ? *input_image.ColorSpace()
|
||||
: ExtractH264ColorSpace(av_context_.get());
|
||||
|
||||
VideoFrame decoded_frame = VideoFrame::Builder()
|
||||
.set_video_frame_buffer(decoded_buffer)
|
||||
.set_timestamp_us(input_frame->timestamp_us())
|
||||
.set_timestamp_rtp(input_image.Timestamp())
|
||||
.set_rotation(input_frame->rotation())
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
|
||||
// Return decoded frame.
|
||||
// TODO(nisse): Timestamp and rotation are all zero here. Change decoder
|
||||
// interface to pass a VideoFrameBuffer instead of a VideoFrame?
|
||||
decoded_image_callback_->Decoded(decoded_frame, absl::nullopt, qp);
|
||||
|
||||
// Stop referencing it, possibly freeing |input_frame|.
|
||||
av_frame_unref(av_frame_.get());
|
||||
input_frame = nullptr;
|
||||
|
||||
Reference in New Issue
Block a user