Extract color space from H264 decoder
Makes use of ColorSpace class to extract info from H264 stream. Bug: webrtc:9522 Change-Id: I651d16707260bb2867b1eda95dd4956d62c47279 Reviewed-on: https://webrtc-review.googlesource.com/90180 Reviewed-by: Erik Språng <sprang@webrtc.org> Reviewed-by: Niklas Enbom <niklas.enbom@webrtc.org> Commit-Queue: Emircan Uysaler <emircan@webrtc.org> Cr-Commit-Position: refs/heads/master@{#24085}
This commit is contained in:

committed by
Commit Bot

parent
e09e6ef947
commit
d4c16b131f
@ -20,8 +20,10 @@ extern "C" {
|
||||
#include "third_party/ffmpeg/libavutil/imgutils.h"
|
||||
} // extern "C"
|
||||
|
||||
#include "api/video/color_space.h"
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "common_video/include/video_frame_buffer.h"
|
||||
#include "modules/video_coding/codecs/h264/h264_color_space.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/criticalsection.h"
|
||||
#include "rtc_base/keep_ref_until_done.h"
|
||||
@ -288,15 +290,24 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
RTC_DCHECK_EQ(av_frame_->reordered_opaque, frame_timestamp_us);
|
||||
|
||||
// Obtain the |video_frame| containing the decoded image.
|
||||
VideoFrame* video_frame = static_cast<VideoFrame*>(
|
||||
av_buffer_get_opaque(av_frame_->buf[0]));
|
||||
RTC_DCHECK(video_frame);
|
||||
VideoFrame* input_frame =
|
||||
static_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0]));
|
||||
RTC_DCHECK(input_frame);
|
||||
rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer =
|
||||
video_frame->video_frame_buffer()->GetI420();
|
||||
input_frame->video_frame_buffer()->GetI420();
|
||||
RTC_CHECK_EQ(av_frame_->data[kYPlaneIndex], i420_buffer->DataY());
|
||||
RTC_CHECK_EQ(av_frame_->data[kUPlaneIndex], i420_buffer->DataU());
|
||||
RTC_CHECK_EQ(av_frame_->data[kVPlaneIndex], i420_buffer->DataV());
|
||||
video_frame->set_timestamp(input_image._timeStamp);
|
||||
|
||||
const ColorSpace& color_space = ExtractH264ColorSpace(av_context_.get());
|
||||
VideoFrame decoded_frame =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(input_frame->video_frame_buffer())
|
||||
.set_timestamp_us(input_frame->timestamp_us())
|
||||
.set_timestamp_rtp(input_image._timeStamp)
|
||||
.set_rotation(input_frame->rotation())
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
|
||||
absl::optional<uint8_t> qp;
|
||||
// TODO(sakal): Maybe it is possible to get QP directly from FFmpeg.
|
||||
@ -319,19 +330,24 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
|
||||
i420_buffer->DataU(), i420_buffer->StrideU(),
|
||||
i420_buffer->DataV(), i420_buffer->StrideV(),
|
||||
rtc::KeepRefUntilDone(i420_buffer)));
|
||||
VideoFrame cropped_frame(
|
||||
cropped_buf, video_frame->timestamp(), video_frame->render_time_ms(),
|
||||
video_frame->rotation());
|
||||
VideoFrame cropped_frame =
|
||||
VideoFrame::Builder()
|
||||
.set_video_frame_buffer(cropped_buf)
|
||||
.set_timestamp_ms(decoded_frame.render_time_ms())
|
||||
.set_timestamp_rtp(decoded_frame.timestamp())
|
||||
.set_rotation(decoded_frame.rotation())
|
||||
.set_color_space(color_space)
|
||||
.build();
|
||||
// TODO(nisse): Timestamp and rotation are all zero here. Change decoder
|
||||
// interface to pass a VideoFrameBuffer instead of a VideoFrame?
|
||||
decoded_image_callback_->Decoded(cropped_frame, absl::nullopt, qp);
|
||||
} else {
|
||||
// Return decoded frame.
|
||||
decoded_image_callback_->Decoded(*video_frame, absl::nullopt, qp);
|
||||
decoded_image_callback_->Decoded(decoded_frame, absl::nullopt, qp);
|
||||
}
|
||||
// Stop referencing it, possibly freeing |video_frame|.
|
||||
// Stop referencing it, possibly freeing |input_frame|.
|
||||
av_frame_unref(av_frame_.get());
|
||||
video_frame = nullptr;
|
||||
input_frame = nullptr;
|
||||
|
||||
return WEBRTC_VIDEO_CODEC_OK;
|
||||
}
|
||||
|
Reference in New Issue
Block a user