Revert "Added support for H264 YUV444 (I444) decoding."
This reverts commit 7d8ed3437295bcad945dab69f042cf52a05c0985. Reason for revert: Breaks internal builds Original change's description: > Added support for H264 YUV444 (I444) decoding. > > Added Nutanix Inc. to the AUTHORS file. > > Bug: chromium:1251096 > Change-Id: Ib47c2b1f94797afb6c5090f3c46eae6f13110992 > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/234540 > Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org> > Reviewed-by: Stefan Holmer <stefan@webrtc.org> > Reviewed-by: Niels Moller <nisse@webrtc.org> > Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org> > Cr-Commit-Position: refs/heads/main@{#35200} TBR=ilnik@webrtc.org,nisse@webrtc.org,stefan@webrtc.org,peterhanspers@webrtc.org,webrtc-scoped@luci-project-accounts.iam.gserviceaccount.com,stefan.mitic@nutanix.com Change-Id: I3048c353a2b6b4f3d4e5e53a88f48b456f1ce593 No-Presubmit: true No-Tree-Checks: true No-Try: true Bug: chromium:1251096 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/235203 Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org> Reviewed-by: Artem Titov <titovartem@webrtc.org> Reviewed-by: Niels Moller <nisse@webrtc.org> Reviewed-by: Harald Alvestrand <hta@webrtc.org> Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org> Cr-Commit-Position: refs/heads/main@{#35207}
This commit is contained in:
committed by
WebRTC LUCI CQ
parent
389010438d
commit
54f377308f
@ -87,10 +87,6 @@ std::vector<SdpVideoFormat> SupportedH264Codecs() {
|
||||
CreateH264Format(H264Profile::kProfileConstrainedBaseline,
|
||||
H264Level::kLevel3_1, "1"),
|
||||
CreateH264Format(H264Profile::kProfileConstrainedBaseline,
|
||||
H264Level::kLevel3_1, "0"),
|
||||
CreateH264Format(H264Profile::kProfilePredictiveHigh444,
|
||||
H264Level::kLevel3_1, "1"),
|
||||
CreateH264Format(H264Profile::kProfilePredictiveHigh444,
|
||||
H264Level::kLevel3_1, "0")};
|
||||
}
|
||||
|
||||
|
||||
@ -41,10 +41,8 @@ namespace webrtc {
|
||||
|
||||
namespace {
|
||||
|
||||
constexpr std::array<AVPixelFormat, 2> kPixelFormatsDefault = {
|
||||
AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV444P};
|
||||
constexpr std::array<AVPixelFormat, 2> kPixelFormatsFullRange = {
|
||||
AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ444P};
|
||||
const AVPixelFormat kPixelFormatDefault = AV_PIX_FMT_YUV420P;
|
||||
const AVPixelFormat kPixelFormatFullRange = AV_PIX_FMT_YUVJ420P;
|
||||
const size_t kYPlaneIndex = 0;
|
||||
const size_t kUPlaneIndex = 1;
|
||||
const size_t kVPlaneIndex = 2;
|
||||
@ -78,17 +76,9 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
|
||||
// Necessary capability to be allowed to provide our own buffers.
|
||||
RTC_DCHECK(context->codec->capabilities | AV_CODEC_CAP_DR1);
|
||||
|
||||
// Limited or full range YUV420 or YUV444 is expected.
|
||||
auto pixelFormatDefault = std::find_if(
|
||||
kPixelFormatsDefault.begin(), kPixelFormatsDefault.end(),
|
||||
[context](AVPixelFormat format) { return context->pix_fmt == format; });
|
||||
auto pixelFormatFullRange = std::find_if(
|
||||
kPixelFormatsFullRange.begin(), kPixelFormatsFullRange.end(),
|
||||
[context](AVPixelFormat format) { return context->pix_fmt == format; });
|
||||
|
||||
// Limited or full range YUV420 is expected.
|
||||
RTC_CHECK(pixelFormatDefault != kPixelFormatsDefault.end() ||
|
||||
pixelFormatFullRange != kPixelFormatsFullRange.end());
|
||||
RTC_CHECK(context->pix_fmt == kPixelFormatDefault ||
|
||||
context->pix_fmt == kPixelFormatFullRange);
|
||||
|
||||
// `av_frame->width` and `av_frame->height` are set by FFmpeg. These are the
|
||||
// actual image's dimensions and may be different from `context->width` and
|
||||
@ -122,43 +112,8 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
|
||||
// http://crbug.com/390941. Our pool is set up to zero-initialize new buffers.
|
||||
// TODO(nisse): Delete that feature from the video pool, instead add
|
||||
// an explicit call to InitializeData here.
|
||||
rtc::scoped_refptr<PlanarYuv8Buffer> frame_buffer;
|
||||
rtc::scoped_refptr<I444Buffer> i444_buffer;
|
||||
rtc::scoped_refptr<I420Buffer> i420_buffer;
|
||||
switch (context->pix_fmt) {
|
||||
case AV_PIX_FMT_YUV420P:
|
||||
case AV_PIX_FMT_YUVJ420P:
|
||||
i420_buffer =
|
||||
decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height);
|
||||
// Set `av_frame` members as required by FFmpeg.
|
||||
av_frame->data[kYPlaneIndex] = i420_buffer->MutableDataY();
|
||||
av_frame->linesize[kYPlaneIndex] = i420_buffer->StrideY();
|
||||
av_frame->data[kUPlaneIndex] = i420_buffer->MutableDataU();
|
||||
av_frame->linesize[kUPlaneIndex] = i420_buffer->StrideU();
|
||||
av_frame->data[kVPlaneIndex] = i420_buffer->MutableDataV();
|
||||
av_frame->linesize[kVPlaneIndex] = i420_buffer->StrideV();
|
||||
RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
|
||||
frame_buffer = i420_buffer;
|
||||
break;
|
||||
case AV_PIX_FMT_YUV444P:
|
||||
case AV_PIX_FMT_YUVJ444P:
|
||||
i444_buffer =
|
||||
decoder->ffmpeg_buffer_pool_.CreateI444Buffer(width, height);
|
||||
// Set `av_frame` members as required by FFmpeg.
|
||||
av_frame->data[kYPlaneIndex] = i444_buffer->MutableDataY();
|
||||
av_frame->linesize[kYPlaneIndex] = i444_buffer->StrideY();
|
||||
av_frame->data[kUPlaneIndex] = i444_buffer->MutableDataU();
|
||||
av_frame->linesize[kUPlaneIndex] = i444_buffer->StrideU();
|
||||
av_frame->data[kVPlaneIndex] = i444_buffer->MutableDataV();
|
||||
av_frame->linesize[kVPlaneIndex] = i444_buffer->StrideV();
|
||||
frame_buffer = i444_buffer;
|
||||
break;
|
||||
default:
|
||||
RTC_LOG(LS_ERROR) << "Unsupported buffer type " << context->pix_fmt
|
||||
<< ". Check supported supported pixel formats!";
|
||||
decoder->ReportError();
|
||||
return -1;
|
||||
}
|
||||
rtc::scoped_refptr<I420Buffer> frame_buffer =
|
||||
decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height);
|
||||
|
||||
int y_size = width * height;
|
||||
int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight();
|
||||
@ -170,6 +125,15 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context,
|
||||
av_frame->format = context->pix_fmt;
|
||||
av_frame->reordered_opaque = context->reordered_opaque;
|
||||
|
||||
// Set `av_frame` members as required by FFmpeg.
|
||||
av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY();
|
||||
av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY();
|
||||
av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU();
|
||||
av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU();
|
||||
av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV();
|
||||
av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV();
|
||||
RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data);
|
||||
|
||||
// Create a VideoFrame object, to keep a reference to the buffer.
|
||||
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
|
||||
// Refactor to do not use a VideoFrame object at all.
|
||||
@ -233,6 +197,7 @@ bool H264DecoderImpl::Configure(const Settings& settings) {
|
||||
av_context_->coded_width = resolution.Width();
|
||||
av_context_->coded_height = resolution.Height();
|
||||
}
|
||||
av_context_->pix_fmt = kPixelFormatDefault;
|
||||
av_context_->extradata = nullptr;
|
||||
av_context_->extradata_size = 0;
|
||||
|
||||
@ -352,103 +317,47 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
RTC_DCHECK(input_frame);
|
||||
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer =
|
||||
input_frame->video_frame_buffer();
|
||||
|
||||
// Instantiate Planar YUV8 buffer according to video frame buffer type
|
||||
const webrtc::PlanarYuv8Buffer* planar_yuv8_buffer = nullptr;
|
||||
VideoFrameBuffer::Type video_frame_buffer_type = frame_buffer->type();
|
||||
switch (video_frame_buffer_type) {
|
||||
case VideoFrameBuffer::Type::kI420:
|
||||
planar_yuv8_buffer = frame_buffer->GetI420();
|
||||
break;
|
||||
case VideoFrameBuffer::Type::kI444:
|
||||
planar_yuv8_buffer = frame_buffer->GetI444();
|
||||
break;
|
||||
default:
|
||||
// If this code is changed to allow other video frame buffer type,
|
||||
// make sure that the code below which wraps I420/I444 buffer and
|
||||
// code which converts to NV12 is changed
|
||||
// to work with new video frame buffer type
|
||||
|
||||
RTC_LOG(LS_ERROR) << "frame_buffer type: "
|
||||
<< static_cast<int32_t>(video_frame_buffer_type)
|
||||
<< " is not supported!";
|
||||
ReportError();
|
||||
return WEBRTC_VIDEO_CODEC_ERROR;
|
||||
}
|
||||
const webrtc::I420BufferInterface* i420_buffer = frame_buffer->GetI420();
|
||||
|
||||
// When needed, FFmpeg applies cropping by moving plane pointers and adjusting
|
||||
// frame width/height. Ensure that cropped buffers lie within the allocated
|
||||
// memory.
|
||||
RTC_DCHECK_LE(av_frame_->width, planar_yuv8_buffer->width());
|
||||
RTC_DCHECK_LE(av_frame_->height, planar_yuv8_buffer->height());
|
||||
RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], planar_yuv8_buffer->DataY());
|
||||
RTC_DCHECK_LE(av_frame_->data[kYPlaneIndex] +
|
||||
av_frame_->linesize[kYPlaneIndex] * av_frame_->height,
|
||||
planar_yuv8_buffer->DataY() + planar_yuv8_buffer->StrideY() *
|
||||
planar_yuv8_buffer->height());
|
||||
RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], planar_yuv8_buffer->DataU());
|
||||
RTC_DCHECK_LE(av_frame_->width, i420_buffer->width());
|
||||
RTC_DCHECK_LE(av_frame_->height, i420_buffer->height());
|
||||
RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], i420_buffer->DataY());
|
||||
RTC_DCHECK_LE(
|
||||
av_frame_->data[kYPlaneIndex] +
|
||||
av_frame_->linesize[kYPlaneIndex] * av_frame_->height,
|
||||
i420_buffer->DataY() + i420_buffer->StrideY() * i420_buffer->height());
|
||||
RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], i420_buffer->DataU());
|
||||
RTC_DCHECK_LE(av_frame_->data[kUPlaneIndex] +
|
||||
av_frame_->linesize[kUPlaneIndex] * av_frame_->height / 2,
|
||||
planar_yuv8_buffer->DataU() + planar_yuv8_buffer->StrideU() *
|
||||
planar_yuv8_buffer->height() /
|
||||
2);
|
||||
RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], planar_yuv8_buffer->DataV());
|
||||
i420_buffer->DataU() +
|
||||
i420_buffer->StrideU() * i420_buffer->height() / 2);
|
||||
RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], i420_buffer->DataV());
|
||||
RTC_DCHECK_LE(av_frame_->data[kVPlaneIndex] +
|
||||
av_frame_->linesize[kVPlaneIndex] * av_frame_->height / 2,
|
||||
planar_yuv8_buffer->DataV() + planar_yuv8_buffer->StrideV() *
|
||||
planar_yuv8_buffer->height() /
|
||||
2);
|
||||
i420_buffer->DataV() +
|
||||
i420_buffer->StrideV() * i420_buffer->height() / 2);
|
||||
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> cropped_buffer;
|
||||
if (video_frame_buffer_type == VideoFrameBuffer::Type::kI420) {
|
||||
cropped_buffer = WrapI420Buffer(
|
||||
av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex],
|
||||
av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex],
|
||||
av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex],
|
||||
av_frame_->linesize[kVPlaneIndex],
|
||||
// To keep reference alive.
|
||||
[frame_buffer] {});
|
||||
} else {
|
||||
cropped_buffer = WrapI444Buffer(
|
||||
av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex],
|
||||
av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex],
|
||||
av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex],
|
||||
av_frame_->linesize[kVPlaneIndex],
|
||||
// To keep reference alive.
|
||||
[frame_buffer] {});
|
||||
}
|
||||
rtc::scoped_refptr<webrtc::VideoFrameBuffer> cropped_buffer = WrapI420Buffer(
|
||||
av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex],
|
||||
av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex],
|
||||
av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex],
|
||||
av_frame_->linesize[kVPlaneIndex],
|
||||
// To keep reference alive.
|
||||
[frame_buffer] {});
|
||||
|
||||
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
|
||||
const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420();
|
||||
auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer(
|
||||
cropped_buffer->width(), cropped_buffer->height());
|
||||
|
||||
const PlanarYuv8Buffer* cropped_planar_yuv8_buffer = nullptr;
|
||||
if (video_frame_buffer_type == VideoFrameBuffer::Type::kI420) {
|
||||
cropped_planar_yuv8_buffer = cropped_buffer->GetI420();
|
||||
libyuv::I420ToNV12(cropped_planar_yuv8_buffer->DataY(),
|
||||
cropped_planar_yuv8_buffer->StrideY(),
|
||||
cropped_planar_yuv8_buffer->DataU(),
|
||||
cropped_planar_yuv8_buffer->StrideU(),
|
||||
cropped_planar_yuv8_buffer->DataV(),
|
||||
cropped_planar_yuv8_buffer->StrideV(),
|
||||
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(),
|
||||
planar_yuv8_buffer->width(),
|
||||
planar_yuv8_buffer->height());
|
||||
} else {
|
||||
cropped_planar_yuv8_buffer = cropped_buffer->GetI444();
|
||||
libyuv::I444ToNV12(cropped_planar_yuv8_buffer->DataY(),
|
||||
cropped_planar_yuv8_buffer->StrideY(),
|
||||
cropped_planar_yuv8_buffer->DataU(),
|
||||
cropped_planar_yuv8_buffer->StrideU(),
|
||||
cropped_planar_yuv8_buffer->DataV(),
|
||||
cropped_planar_yuv8_buffer->StrideV(),
|
||||
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(),
|
||||
planar_yuv8_buffer->width(),
|
||||
planar_yuv8_buffer->height());
|
||||
}
|
||||
|
||||
cropped_i420->width(), cropped_i420->height());
|
||||
libyuv::I420ToNV12(cropped_i420->DataY(), cropped_i420->StrideY(),
|
||||
cropped_i420->DataU(), cropped_i420->StrideU(),
|
||||
cropped_i420->DataV(), cropped_i420->StrideV(),
|
||||
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
|
||||
nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(),
|
||||
i420_buffer->width(), i420_buffer->height());
|
||||
cropped_buffer = nv12_buffer;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user