Partially revert of ColorSpace information copying around decoders
This partially reverts these 2 CLs: 1) Reland "Copy video frames metadata between encoded and plain frames in one place" https://webrtc.googlesource.com/src/+/2ebf5239782bf6b46d4aa812f34fa9f9e5a02be9 2) Don't copy video frame metadata in each encoder/decoder https://webrtc.googlesource.com/src/+/ab62b2ee51e622be6d0aade15e87e927fa60e6f2 The problem with them were that ColorSpace was made to always be copied from the EncodedImage in the GenericDecoder, which overwrote ColorSpace information from the decoder. If decoder applied color space transition or bitstream color space information was different from the WebRTC signaled one, the incorrect color space data were passed to the renderer. This CL removes introduced change regarding color space data: GenericDecoder doesn't copy or store it and software decoders are restored to copy it. Relevant tests are also removed. Bug: chromium:982486 Change-Id: I989e01476ff7f7df376c05578ab8f540b95a1dd2 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/145323 Reviewed-by: Philip Eliasson <philipel@webrtc.org> Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org> Cr-Commit-Position: refs/heads/master@{#28556}
This commit is contained in:

committed by
Commit Bot

parent
8b3e4e2d11
commit
a0e2609a08
@ -267,7 +267,7 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image,
|
|||||||
vpx_codec_err_t vpx_ret =
|
vpx_codec_err_t vpx_ret =
|
||||||
vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
|
vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
|
||||||
RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
|
RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
|
||||||
ret = ReturnFrame(img, input_image.Timestamp(), qp);
|
ret = ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace());
|
||||||
if (ret != 0) {
|
if (ret != 0) {
|
||||||
// Reset to avoid requesting key frames too often.
|
// Reset to avoid requesting key frames too often.
|
||||||
if (ret < 0 && propagation_cnt_ > 0)
|
if (ret < 0 && propagation_cnt_ > 0)
|
||||||
@ -283,9 +283,11 @@ int LibvpxVp8Decoder::Decode(const EncodedImage& input_image,
|
|||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
int LibvpxVp8Decoder::ReturnFrame(const vpx_image_t* img,
|
int LibvpxVp8Decoder::ReturnFrame(
|
||||||
|
const vpx_image_t* img,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
int qp) {
|
int qp,
|
||||||
|
const webrtc::ColorSpace* explicit_color_space) {
|
||||||
if (img == NULL) {
|
if (img == NULL) {
|
||||||
// Decoder OK and NULL image => No show frame
|
// Decoder OK and NULL image => No show frame
|
||||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||||
@ -320,6 +322,7 @@ int LibvpxVp8Decoder::ReturnFrame(const vpx_image_t* img,
|
|||||||
VideoFrame decoded_image = VideoFrame::Builder()
|
VideoFrame decoded_image = VideoFrame::Builder()
|
||||||
.set_video_frame_buffer(buffer)
|
.set_video_frame_buffer(buffer)
|
||||||
.set_timestamp_rtp(timestamp)
|
.set_timestamp_rtp(timestamp)
|
||||||
|
.set_color_space(explicit_color_space)
|
||||||
.build();
|
.build();
|
||||||
decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp);
|
decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp);
|
||||||
|
|
||||||
|
@ -48,7 +48,10 @@ class LibvpxVp8Decoder : public VideoDecoder {
|
|||||||
|
|
||||||
private:
|
private:
|
||||||
class QpSmoother;
|
class QpSmoother;
|
||||||
int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp, int qp);
|
int ReturnFrame(const vpx_image_t* img,
|
||||||
|
uint32_t timeStamp,
|
||||||
|
int qp,
|
||||||
|
const webrtc::ColorSpace* explicit_color_space);
|
||||||
const bool use_postproc_arm_;
|
const bool use_postproc_arm_;
|
||||||
|
|
||||||
I420BufferPool buffer_pool_;
|
I420BufferPool buffer_pool_;
|
||||||
|
@ -1664,16 +1664,19 @@ int VP9DecoderImpl::Decode(const EncodedImage& input_image,
|
|||||||
vpx_codec_err_t vpx_ret =
|
vpx_codec_err_t vpx_ret =
|
||||||
vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
|
vpx_codec_control(decoder_, VPXD_GET_LAST_QUANTIZER, &qp);
|
||||||
RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
|
RTC_DCHECK_EQ(vpx_ret, VPX_CODEC_OK);
|
||||||
int ret = ReturnFrame(img, input_image.Timestamp(), qp);
|
int ret =
|
||||||
|
ReturnFrame(img, input_image.Timestamp(), qp, input_image.ColorSpace());
|
||||||
if (ret != 0) {
|
if (ret != 0) {
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
int VP9DecoderImpl::ReturnFrame(
|
||||||
|
const vpx_image_t* img,
|
||||||
uint32_t timestamp,
|
uint32_t timestamp,
|
||||||
int qp) {
|
int qp,
|
||||||
|
const webrtc::ColorSpace* explicit_color_space) {
|
||||||
if (img == nullptr) {
|
if (img == nullptr) {
|
||||||
// Decoder OK and nullptr image => No show frame.
|
// Decoder OK and nullptr image => No show frame.
|
||||||
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
|
||||||
@ -1717,10 +1720,13 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
|
|||||||
|
|
||||||
auto builder = VideoFrame::Builder()
|
auto builder = VideoFrame::Builder()
|
||||||
.set_video_frame_buffer(img_wrapped_buffer)
|
.set_video_frame_buffer(img_wrapped_buffer)
|
||||||
.set_timestamp_rtp(timestamp)
|
.set_timestamp_rtp(timestamp);
|
||||||
.set_color_space(ExtractVP9ColorSpace(img->cs, img->range,
|
if (explicit_color_space) {
|
||||||
img->bit_depth));
|
builder.set_color_space(*explicit_color_space);
|
||||||
|
} else {
|
||||||
|
builder.set_color_space(
|
||||||
|
ExtractVP9ColorSpace(img->cs, img->range, img->bit_depth));
|
||||||
|
}
|
||||||
VideoFrame decoded_image = builder.build();
|
VideoFrame decoded_image = builder.build();
|
||||||
|
|
||||||
decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp);
|
decode_complete_callback_->Decoded(decoded_image, absl::nullopt, qp);
|
||||||
|
@ -196,7 +196,10 @@ class VP9DecoderImpl : public VP9Decoder {
|
|||||||
const char* ImplementationName() const override;
|
const char* ImplementationName() const override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int ReturnFrame(const vpx_image_t* img, uint32_t timestamp, int qp);
|
int ReturnFrame(const vpx_image_t* img,
|
||||||
|
uint32_t timestamp,
|
||||||
|
int qp,
|
||||||
|
const webrtc::ColorSpace* explicit_color_space);
|
||||||
|
|
||||||
// Memory pool used to share buffers between libvpx and webrtc.
|
// Memory pool used to share buffers between libvpx and webrtc.
|
||||||
Vp9FrameBufferPool frame_buffer_pool_;
|
Vp9FrameBufferPool frame_buffer_pool_;
|
||||||
|
@ -82,9 +82,6 @@ void VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage,
|
|||||||
}
|
}
|
||||||
|
|
||||||
decodedImage.set_ntp_time_ms(frameInfo->ntp_time_ms);
|
decodedImage.set_ntp_time_ms(frameInfo->ntp_time_ms);
|
||||||
if (frameInfo->color_space) {
|
|
||||||
decodedImage.set_color_space(frameInfo->color_space);
|
|
||||||
}
|
|
||||||
decodedImage.set_packet_infos(frameInfo->packet_infos);
|
decodedImage.set_packet_infos(frameInfo->packet_infos);
|
||||||
decodedImage.set_rotation(frameInfo->rotation);
|
decodedImage.set_rotation(frameInfo->rotation);
|
||||||
|
|
||||||
@ -207,11 +204,6 @@ int32_t VCMGenericDecoder::Decode(const VCMEncodedFrame& frame, int64_t nowMs) {
|
|||||||
_frameInfos[_nextFrameInfoIdx].timing = frame.video_timing();
|
_frameInfos[_nextFrameInfoIdx].timing = frame.video_timing();
|
||||||
_frameInfos[_nextFrameInfoIdx].ntp_time_ms =
|
_frameInfos[_nextFrameInfoIdx].ntp_time_ms =
|
||||||
frame.EncodedImage().ntp_time_ms_;
|
frame.EncodedImage().ntp_time_ms_;
|
||||||
if (frame.ColorSpace()) {
|
|
||||||
_frameInfos[_nextFrameInfoIdx].color_space = *frame.ColorSpace();
|
|
||||||
} else {
|
|
||||||
_frameInfos[_nextFrameInfoIdx].color_space = absl::nullopt;
|
|
||||||
}
|
|
||||||
_frameInfos[_nextFrameInfoIdx].packet_infos = frame.PacketInfos();
|
_frameInfos[_nextFrameInfoIdx].packet_infos = frame.PacketInfos();
|
||||||
|
|
||||||
// Set correctly only for key frames. Thus, use latest key frame
|
// Set correctly only for key frames. Thus, use latest key frame
|
||||||
|
@ -35,8 +35,8 @@ struct VCMFrameInformation {
|
|||||||
VideoContentType content_type;
|
VideoContentType content_type;
|
||||||
EncodedImage::Timing timing;
|
EncodedImage::Timing timing;
|
||||||
int64_t ntp_time_ms;
|
int64_t ntp_time_ms;
|
||||||
absl::optional<ColorSpace> color_space;
|
|
||||||
RtpPacketInfos packet_infos;
|
RtpPacketInfos packet_infos;
|
||||||
|
// ColorSpace is not storred here, as it might be modified by decoders.
|
||||||
};
|
};
|
||||||
|
|
||||||
class VCMDecodedFrameCallback : public DecodedImageCallback {
|
class VCMDecodedFrameCallback : public DecodedImageCallback {
|
||||||
|
@ -89,40 +89,6 @@ class GenericDecoderTest : public ::testing::Test {
|
|||||||
ReceiveCallback user_callback_;
|
ReceiveCallback user_callback_;
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST_F(GenericDecoderTest, PassesColorSpace) {
|
|
||||||
webrtc::ColorSpace color_space =
|
|
||||||
CreateTestColorSpace(/*with_hdr_metadata=*/true);
|
|
||||||
VCMEncodedFrame encoded_frame;
|
|
||||||
encoded_frame.SetColorSpace(color_space);
|
|
||||||
generic_decoder_.Decode(encoded_frame, clock_.TimeInMilliseconds());
|
|
||||||
absl::optional<VideoFrame> decoded_frame = user_callback_.WaitForFrame(10);
|
|
||||||
ASSERT_TRUE(decoded_frame.has_value());
|
|
||||||
absl::optional<webrtc::ColorSpace> decoded_color_space =
|
|
||||||
decoded_frame->color_space();
|
|
||||||
ASSERT_TRUE(decoded_color_space.has_value());
|
|
||||||
EXPECT_EQ(*decoded_color_space, color_space);
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(GenericDecoderTest, PassesColorSpaceForDelayedDecoders) {
|
|
||||||
webrtc::ColorSpace color_space =
|
|
||||||
CreateTestColorSpace(/*with_hdr_metadata=*/true);
|
|
||||||
decoder_.SetDelayedDecoding(100);
|
|
||||||
|
|
||||||
{
|
|
||||||
// Ensure the original frame is destroyed before the decoding is completed.
|
|
||||||
VCMEncodedFrame encoded_frame;
|
|
||||||
encoded_frame.SetColorSpace(color_space);
|
|
||||||
generic_decoder_.Decode(encoded_frame, clock_.TimeInMilliseconds());
|
|
||||||
}
|
|
||||||
|
|
||||||
absl::optional<VideoFrame> decoded_frame = user_callback_.WaitForFrame(200);
|
|
||||||
ASSERT_TRUE(decoded_frame.has_value());
|
|
||||||
absl::optional<webrtc::ColorSpace> decoded_color_space =
|
|
||||||
decoded_frame->color_space();
|
|
||||||
ASSERT_TRUE(decoded_color_space.has_value());
|
|
||||||
EXPECT_EQ(*decoded_color_space, color_space);
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(GenericDecoderTest, PassesPacketInfos) {
|
TEST_F(GenericDecoderTest, PassesPacketInfos) {
|
||||||
RtpPacketInfos packet_infos = CreatePacketInfos(3);
|
RtpPacketInfos packet_infos = CreatePacketInfos(3);
|
||||||
VCMEncodedFrame encoded_frame;
|
VCMEncodedFrame encoded_frame;
|
||||||
|
@ -299,22 +299,6 @@ TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesRotation) {
|
|||||||
EXPECT_EQ(kRotation, fake_renderer_.rotation());
|
EXPECT_EQ(kRotation, fake_renderer_.rotation());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesColorSpace) {
|
|
||||||
auto test_frame = absl::make_unique<FrameObjectFake>();
|
|
||||||
test_frame->SetPayloadType(99);
|
|
||||||
test_frame->id.picture_id = 0;
|
|
||||||
webrtc::ColorSpace color_space =
|
|
||||||
CreateTestColorSpace(/*with_hdr_metadata=*/true);
|
|
||||||
test_frame->SetColorSpace(color_space);
|
|
||||||
|
|
||||||
video_receive_stream_->Start();
|
|
||||||
video_receive_stream_->OnCompleteFrame(std::move(test_frame));
|
|
||||||
EXPECT_TRUE(fake_renderer_.WaitForRenderedFrame(kDefaultTimeOutMs));
|
|
||||||
|
|
||||||
ASSERT_TRUE(fake_renderer_.color_space().has_value());
|
|
||||||
EXPECT_EQ(color_space, *fake_renderer_.color_space());
|
|
||||||
}
|
|
||||||
|
|
||||||
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesPacketInfos) {
|
TEST_F(VideoReceiveStreamTestWithFakeDecoder, PassesPacketInfos) {
|
||||||
auto test_frame = absl::make_unique<FrameObjectFake>();
|
auto test_frame = absl::make_unique<FrameObjectFake>();
|
||||||
test_frame->SetPayloadType(99);
|
test_frame->SetPayloadType(99);
|
||||||
|
Reference in New Issue
Block a user