add 422 8 and 10 bit decoding support

Bug: webrtc:14195
Change-Id: I2048d567850ae669d76d9e593752683f3c76499f
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/266180
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#37306}
This commit is contained in:
Sergio Garcia Murillo
2022-06-22 14:42:48 +02:00
committed by WebRTC LUCI CQ
parent 0ed3a2b6cb
commit 179f40e81a
8 changed files with 92 additions and 50 deletions

View File

@ -281,55 +281,59 @@ int LibvpxVp9Decoder::ReturnFrame(
// The buffer can be used directly by the VideoFrame (without copy) by
// using a Wrapped*Buffer.
rtc::scoped_refptr<VideoFrameBuffer> img_wrapped_buffer;
switch (img->bit_depth) {
case 8:
if (img->fmt == VPX_IMG_FMT_I420) {
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
rtc::scoped_refptr<NV12Buffer> nv12_buffer =
output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h);
if (!nv12_buffer.get()) {
// Buffer pool is full.
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
img_wrapped_buffer = nv12_buffer;
libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
nv12_buffer->MutableDataY(),
nv12_buffer->StrideY(),
nv12_buffer->MutableDataUV(),
nv12_buffer->StrideUV(), img->d_w, img->d_h);
// No holding onto img_buffer as it's no longer needed and can be
// reused.
} else {
img_wrapped_buffer = WrapI420Buffer(
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
img->stride[VPX_PLANE_V],
// WrappedI420Buffer's mechanism for allowing the release of its
// frame buffer is through a callback function. This is where we
// should release `img_buffer`.
[img_buffer] {});
switch (img->fmt) {
case VPX_IMG_FMT_I420:
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
rtc::scoped_refptr<NV12Buffer> nv12_buffer =
output_buffer_pool_.CreateNV12Buffer(img->d_w, img->d_h);
if (!nv12_buffer.get()) {
// Buffer pool is full.
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
} else if (img->fmt == VPX_IMG_FMT_I444) {
img_wrapped_buffer = WrapI444Buffer(
img_wrapped_buffer = nv12_buffer;
libyuv::I420ToNV12(img->planes[VPX_PLANE_Y], img->stride[VPX_PLANE_Y],
img->planes[VPX_PLANE_U], img->stride[VPX_PLANE_U],
img->planes[VPX_PLANE_V], img->stride[VPX_PLANE_V],
nv12_buffer->MutableDataY(), nv12_buffer->StrideY(),
nv12_buffer->MutableDataUV(),
nv12_buffer->StrideUV(), img->d_w, img->d_h);
// No holding onto img_buffer as it's no longer needed and can be
// reused.
} else {
img_wrapped_buffer = WrapI420Buffer(
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
img->stride[VPX_PLANE_V],
// WrappedI444Buffer's mechanism for allowing the release of its
// WrappedI420Buffer's mechanism for allowing the release of its
// frame buffer is through a callback function. This is where we
// should release `img_buffer`.
[img_buffer] {});
} else {
RTC_LOG(LS_ERROR)
<< "Unsupported pixel format produced by the decoder: "
<< static_cast<int>(img->fmt);
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
break;
case 10:
case VPX_IMG_FMT_I422:
img_wrapped_buffer = WrapI422Buffer(
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
img->stride[VPX_PLANE_V],
// WrappedI444Buffer's mechanism for allowing the release of its
// frame buffer is through a callback function. This is where we
// should release `img_buffer`.
[img_buffer] {});
break;
case VPX_IMG_FMT_I444:
img_wrapped_buffer = WrapI444Buffer(
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
img->stride[VPX_PLANE_V],
// WrappedI444Buffer's mechanism for allowing the release of its
// frame buffer is through a callback function. This is where we
// should release `img_buffer`.
[img_buffer] {});
break;
case VPX_IMG_FMT_I42016:
img_wrapped_buffer = WrapI010Buffer(
img->d_w, img->d_h,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_Y]),
@ -339,9 +343,19 @@ int LibvpxVp9Decoder::ReturnFrame(
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
img->stride[VPX_PLANE_V] / 2, [img_buffer] {});
break;
case VPX_IMG_FMT_I42216:
img_wrapped_buffer = WrapI210Buffer(
img->d_w, img->d_h,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_Y]),
img->stride[VPX_PLANE_Y] / 2,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_U]),
img->stride[VPX_PLANE_U] / 2,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
img->stride[VPX_PLANE_V] / 2, [img_buffer] {});
break;
default:
RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: "
<< img->bit_depth;
RTC_LOG(LS_ERROR) << "Unsupported pixel format produced by the decoder: "
<< static_cast<int>(img->fmt);
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}

View File

@ -628,6 +628,10 @@ int LibvpxVp9Encoder::InitEncode(const VideoCodec* inst,
config_->g_profile = 2;
config_->g_input_bit_depth = 10;
break;
case VP9Profile::kProfile3:
// Encoding of profile 3 is not implemented.
RTC_DCHECK_NOTREACHED();
break;
}
// Creating a wrapper to the image - setting image data to nullptr. Actual
@ -1194,6 +1198,10 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image,
raw_->stride[VPX_PLANE_V] = i010_buffer->StrideV() * 2;
break;
}
case VP9Profile::kProfile3: {
RTC_DCHECK_NOTREACHED();
break;
}
}
vpx_enc_frame_flags_t flags = 0;

View File

@ -54,12 +54,15 @@ std::vector<SdpVideoFormat> SupportedVP9Codecs() {
std::vector<SdpVideoFormat> SupportedVP9DecoderCodecs() {
#ifdef RTC_ENABLE_VP9
std::vector<SdpVideoFormat> supported_formats = SupportedVP9Codecs();
// The WebRTC internal decoder supports VP9 profile 1. However, there's
// currently no way of sending VP9 profile 1 using the internal encoder.
// The WebRTC internal decoder supports VP9 profile 1 and 3. However, there's
// currently no way of sending VP9 profile 1 or 3 using the internal encoder.
// It would require extended support for I444, I422, and I440 buffers.
supported_formats.push_back(SdpVideoFormat(
cricket::kVp9CodecName,
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile1)}}));
supported_formats.push_back(SdpVideoFormat(
cricket::kVp9CodecName,
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile3)}}));
return supported_formats;
#else
return std::vector<SdpVideoFormat>();