Introduce VideoFrame::id to keep track of frames inside application.

Also switch webrtc code from deprecated constructors to the builder API.

Change-Id: Ie325bf1e9b4ff1e413fef3431ced8ed9ff725107
Bug: webrtc:10138
Reviewed-on: https://webrtc-review.googlesource.com/c/114422
Reviewed-by: Stefan Holmer <stefan@webrtc.org>
Reviewed-by: Niels Moller <nisse@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#26132}
This commit is contained in:
Artem Titov
2019-01-03 23:49:37 +01:00
committed by Commit Bot
parent 12cea05119
commit 1ebfb6aac7
47 changed files with 538 additions and 179 deletions

View File

@ -58,10 +58,14 @@ class MultiplexDecoderAdapter::AdapterDecodedImageCallback
struct MultiplexDecoderAdapter::DecodedImageData {
explicit DecodedImageData(AlphaCodecStream stream_idx)
: stream_idx_(stream_idx),
decoded_image_(I420Buffer::Create(1 /* width */, 1 /* height */),
0,
0,
kVideoRotation_0) {
decoded_image_(
VideoFrame::Builder()
.set_video_frame_buffer(
I420Buffer::Create(1 /* width */, 1 /* height */))
.set_timestamp_rtp(0)
.set_timestamp_us(0)
.set_rotation(kVideoRotation_0)
.build()) {
RTC_DCHECK_EQ(kAXXStream, stream_idx);
}
DecodedImageData(AlphaCodecStream stream_idx,
@ -253,8 +257,13 @@ void MultiplexDecoderAdapter::MergeAlphaImages(
merged_buffer, std::move(augmenting_data), augmenting_data_length));
}
VideoFrame merged_image(merged_buffer, decoded_image->timestamp(),
0 /* render_time_ms */, decoded_image->rotation());
VideoFrame merged_image = VideoFrame::Builder()
.set_video_frame_buffer(merged_buffer)
.set_timestamp_rtp(decoded_image->timestamp())
.set_timestamp_us(0)
.set_rotation(decoded_image->rotation())
.set_id(decoded_image->id())
.build();
decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
}

View File

@ -197,8 +197,13 @@ int MultiplexEncoderAdapter::Encode(
multiplex_dummy_planes_.data(), yuva_buffer->StrideU(),
multiplex_dummy_planes_.data(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
VideoFrame alpha_image(alpha_buffer, input_image.timestamp(),
input_image.render_time_ms(), input_image.rotation());
VideoFrame alpha_image = VideoFrame::Builder()
.set_video_frame_buffer(alpha_buffer)
.set_timestamp_rtp(input_image.timestamp())
.set_timestamp_ms(input_image.render_time_ms())
.set_rotation(input_image.rotation())
.set_id(input_image.id())
.build();
rv = encoders_[kAXXStream]->Encode(alpha_image, codec_specific_info,
&adjusted_frame_types);
return rv;

View File

@ -95,9 +95,14 @@ class TestMultiplexAdapter
rtc::scoped_refptr<AugmentedVideoFrameBuffer> augmented_video_frame_buffer =
new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
video_buffer, std::move(data), 16);
return absl::WrapUnique<VideoFrame>(
new VideoFrame(augmented_video_frame_buffer, video_frame->timestamp(),
video_frame->render_time_ms(), video_frame->rotation()));
return absl::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(augmented_video_frame_buffer)
.set_timestamp_rtp(video_frame->timestamp())
.set_timestamp_ms(video_frame->render_time_ms())
.set_rotation(video_frame->rotation())
.set_id(video_frame->id())
.build());
}
std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
@ -109,9 +114,13 @@ class TestMultiplexAdapter
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
return absl::WrapUnique<VideoFrame>(
new VideoFrame(yuva_buffer, 123 /* RTP timestamp */,
345 /* render_time_ms */, kVideoRotation_0));
return absl::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(yuva_buffer)
.set_timestamp_rtp(123)
.set_timestamp_ms(345)
.set_rotation(kVideoRotation_0)
.build());
}
std::unique_ptr<VideoFrame> CreateInputFrame(bool contains_alpha) {
@ -120,9 +129,14 @@ class TestMultiplexAdapter
video_frame = CreateI420AInputFrame();
} else {
VideoFrame* next_frame = NextInputFrame();
video_frame = absl::WrapUnique<VideoFrame>(new VideoFrame(
next_frame->video_frame_buffer(), next_frame->timestamp(),
next_frame->render_time_ms(), next_frame->rotation()));
video_frame = absl::make_unique<VideoFrame>(
VideoFrame::Builder()
.set_video_frame_buffer(next_frame->video_frame_buffer())
.set_timestamp_rtp(next_frame->timestamp())
.set_timestamp_ms(next_frame->render_time_ms())
.set_rotation(next_frame->rotation())
.set_id(next_frame->id())
.build());
}
if (supports_augmenting_data_) {
video_frame = CreateDataAugmentedInputFrame(video_frame.get());
@ -158,9 +172,12 @@ class TestMultiplexAdapter
yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
yuva_buffer->DataV(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(video_frame_buffer));
return absl::WrapUnique<VideoFrame>(
new VideoFrame(axx_buffer, 123 /* RTP timestamp */,
345 /* render_time_ms */, kVideoRotation_0));
return absl::make_unique<VideoFrame>(VideoFrame::Builder()
.set_video_frame_buffer(axx_buffer)
.set_timestamp_rtp(123)
.set_timestamp_ms(345)
.set_rotation(kVideoRotation_0)
.build());
}
private: