Introduce VideoFrame::id to keep track of frames inside application.

Also switch webrtc code from deprecated constructors to the builder API.

Change-Id: Ie325bf1e9b4ff1e413fef3431ced8ed9ff725107
Bug: webrtc:10138
Reviewed-on: https://webrtc-review.googlesource.com/c/114422
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26132}
This commit is contained in:
Artem Titov
2019-01-03 23:49:37 +01:00
committed by Commit Bot
parent 12cea05119
commit 1ebfb6aac7
47 changed files with 538 additions and 179 deletions

View File

@ -20,6 +20,7 @@ extern "C" {
#include "third_party/ffmpeg/libavutil/imgutils.h"
} // extern "C"
#include "absl/memory/memory.h"
#include "api/video/color_space.h"
#include "api/video/i420_buffer.h"
#include "common_video/include/video_frame_buffer.h"
@ -120,12 +121,14 @@ int H264DecoderImpl::AVGetBuffer2(
// TODO(nisse): The VideoFrame's timestamp and rotation info is not used.
// Refactor to do not use a VideoFrame object at all.
av_frame->buf[0] = av_buffer_create(
av_frame->data[kYPlaneIndex],
total_size,
AVFreeBuffer2,
static_cast<void*>(new VideoFrame(frame_buffer,
kVideoRotation_0,
0 /* timestamp_us */)),
av_frame->data[kYPlaneIndex], total_size, AVFreeBuffer2,
static_cast<void*>(absl::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(frame_buffer)
.set_rotation(kVideoRotation_0)
.set_timestamp_us(0)
.build())
.release()),
0);
RTC_CHECK(av_frame->buf[0]);
return 0;

View File

@ -58,10 +58,14 @@ class MultiplexDecoderAdapter::AdapterDecodedImageCallback
struct MultiplexDecoderAdapter::DecodedImageData {
explicit DecodedImageData(AlphaCodecStream stream_idx)
: stream_idx_(stream_idx),
decoded_image_(I420Buffer::Create(1 /* width */, 1 /* height */),
0,
0,
kVideoRotation_0) {
decoded_image_(
VideoFrame::Builder()
.set_video_frame_buffer(
I420Buffer::Create(1 /* width */, 1 /* height */))
.set_timestamp_rtp(0)
.set_timestamp_us(0)
.set_rotation(kVideoRotation_0)
.build()) {
RTC_DCHECK_EQ(kAXXStream, stream_idx);
}
DecodedImageData(AlphaCodecStream stream_idx,
@ -253,8 +257,13 @@ void MultiplexDecoderAdapter::MergeAlphaImages(
merged_buffer, std::move(augmenting_data), augmenting_data_length));
}
VideoFrame merged_image(merged_buffer, decoded_image->timestamp(),
0 /* render_time_ms */, decoded_image->rotation());
VideoFrame merged_image = VideoFrame::Builder()
.set_video_frame_buffer(merged_buffer)
.set_timestamp_rtp(decoded_image->timestamp())
.set_timestamp_us(0)
.set_rotation(decoded_image->rotation())
.set_id(decoded_image->id())
.build();
decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
}

View File

@ -197,8 +197,13 @@ int MultiplexEncoderAdapter::Encode(
multiplex_dummy_planes_.data(), yuva_buffer->StrideU(),
multiplex_dummy_planes_.data(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
VideoFrame alpha_image(alpha_buffer, input_image.timestamp(),
input_image.render_time_ms(), input_image.rotation());
VideoFrame alpha_image = VideoFrame::Builder()
.set_video_frame_buffer(alpha_buffer)
.set_timestamp_rtp(input_image.timestamp())
.set_timestamp_ms(input_image.render_time_ms())
.set_rotation(input_image.rotation())
.set_id(input_image.id())
.build();
rv = encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info,
&adjusted_frame_types);
return rv;

View File

@ -95,9 +95,14 @@ class TestMultiplexAdapter
rtc::scoped_refptr<AugmentedVideoFrameBuffer> augmented_video_frame_buffer =
new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
video_buffer, std::move(data), 16);
return absl::WrapUnique<VideoFrame>(
new VideoFrame(augmented_video_frame_buffer, video_frame->timestamp(),
video_frame->render_time_ms(), video_frame->rotation()));
return absl::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(augmented_video_frame_buffer)
.set_timestamp_rtp(video_frame->timestamp())
.set_timestamp_ms(video_frame->render_time_ms())
.set_rotation(video_frame->rotation())
.set_id(video_frame->id())
.build());
}
std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
@ -109,9 +114,13 @@ class TestMultiplexAdapter
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
return absl::WrapUnique<VideoFrame>(
new VideoFrame(yuva_buffer, 123 /* RTP timestamp */,
345 /* render_time_ms */, kVideoRotation_0));
return absl::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(yuva_buffer)
.set_timestamp_rtp(123)
.set_timestamp_ms(345)
.set_rotation(kVideoRotation_0)
.build());
}
std::unique_ptr<VideoFrame> CreateInputFrame(bool contains_alpha) {
@ -120,9 +129,14 @@ class TestMultiplexAdapter
video_frame = CreateI420AInputFrame();
} else {
VideoFrame* next_frame = NextInputFrame();
video_frame = absl::WrapUnique<VideoFrame>(new VideoFrame(
next_frame->video_frame_buffer(), next_frame->timestamp(),
next_frame->render_time_ms(), next_frame->rotation()));
video_frame = absl::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(next_frame->video_frame_buffer())
.set_timestamp_rtp(next_frame->timestamp())
.set_timestamp_ms(next_frame->render_time_ms())
.set_rotation(next_frame->rotation())
.set_id(next_frame->id())
.build());
}
if (supports_augmenting_data_) {
video_frame = CreateDataAugmentedInputFrame(video_frame.get());
@ -158,9 +172,12 @@ class TestMultiplexAdapter
yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
yuva_buffer->DataV(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(video_frame_buffer));
return absl::WrapUnique<VideoFrame>(
new VideoFrame(axx_buffer, 123 /* RTP timestamp */,
345 /* render_time_ms */, kVideoRotation_0));
return absl::make_unique<VideoFrame>(VideoFrame::Builder()
.set_video_frame_buffer(axx_buffer)
.set_timestamp_rtp(123)
.set_timestamp_ms(345)
.set_rotation(kVideoRotation_0)
.build());
}
private:

View File

@ -267,9 +267,13 @@ void VideoProcessor::ProcessFrame() {
RTC_CHECK(buffer) << "Tried to read too many frames from the file.";
const size_t timestamp =
last_inputed_timestamp_ + kVideoPayloadTypeFrequency / framerate_fps_;
VideoFrame input_frame(buffer, static_cast<uint32_t>(timestamp),
static_cast<int64_t>(timestamp / kMsToRtpTimestamp),
webrtc::kVideoRotation_0);
VideoFrame input_frame =
VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_timestamp_rtp(static_cast<uint32_t>(timestamp))
.set_timestamp_ms(static_cast<int64_t>(timestamp / kMsToRtpTimestamp))
.set_rotation(webrtc::kVideoRotation_0)
.build();
// Store input frame as a reference for quality calculations.
if (config_.decode && !config_.measure_cpu) {
if (input_frames_.size() == kMaxBufferedInputFrames) {
@ -323,8 +327,13 @@ int32_t VideoProcessor::VideoProcessorDecodeCompleteCallback::Decoded(
if (!task_queue_->IsCurrent()) {
// There might be a limited amount of output buffers, make a copy to make
// sure we don't block the decoder.
VideoFrame copy(I420Buffer::Copy(*image.video_frame_buffer()->ToI420()),
image.rotation(), image.timestamp_us());
VideoFrame copy = VideoFrame::Builder()
.set_video_frame_buffer(I420Buffer::Copy(
*image.video_frame_buffer()->ToI420()))
.set_rotation(image.rotation())
.set_timestamp_us(image.timestamp_us())
.set_id(image.id())
.build();
copy.set_timestamp(image.timestamp());
task_queue_->PostTask([this, copy]() {