Delete rtc::Callback0 and friends.

Replaced with std::function.

Bug: webrtc:6424
Change-Id: Iacc43822cb854ddde3cb1e5ddd863676cb07510a
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/205005
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Reviewed-by: Tommi <tommi@webrtc.org>
Commit-Queue: Niels Moller <nisse@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#33281}
This commit is contained in:
Niels Möller
2021-02-02 11:37:39 +01:00
committed by Commit Bot
parent d6c81dbc20
commit f4e3e2b83f
20 changed files with 42 additions and 564 deletions

View File

@ -32,7 +32,6 @@ extern "C" {
#include "common_video/include/video_frame_buffer.h"
#include "modules/video_coding/codecs/h264/h264_color_space.h"
#include "rtc_base/checks.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h"
#include "system_wrappers/include/field_trial.h"
#include "system_wrappers/include/metrics.h"
@ -302,8 +301,9 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
VideoFrame* input_frame =
static_cast<VideoFrame*>(av_buffer_get_opaque(av_frame_->buf[0]));
RTC_DCHECK(input_frame);
const webrtc::I420BufferInterface* i420_buffer =
input_frame->video_frame_buffer()->GetI420();
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer =
input_frame->video_frame_buffer();
const webrtc::I420BufferInterface* i420_buffer = frame_buffer->GetI420();
// When needed, FFmpeg applies cropping by moving plane pointers and adjusting
// frame width/height. Ensure that cropped buffers lie within the allocated
@ -330,7 +330,9 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image,
av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex],
av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex],
av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex],
av_frame_->linesize[kVPlaneIndex], rtc::KeepRefUntilDone(i420_buffer));
av_frame_->linesize[kVPlaneIndex],
// To keep reference alive.
[frame_buffer] {});
if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) {
const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420();

View File

@ -17,7 +17,6 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
#include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h"
namespace webrtc {

View File

@ -18,7 +18,6 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/video_common.h"
#include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h"
namespace webrtc {
@ -204,16 +203,18 @@ int MultiplexEncoderAdapter::Encode(
return rv;
// Encode AXX
const I420ABufferInterface* yuva_buffer =
rtc::scoped_refptr<VideoFrameBuffer> frame_buffer =
supports_augmented_data_
? augmented_video_frame_buffer->GetVideoFrameBuffer()->GetI420A()
: input_image.video_frame_buffer()->GetI420A();
? augmented_video_frame_buffer->GetVideoFrameBuffer()
: input_image.video_frame_buffer();
const I420ABufferInterface* yuva_buffer = frame_buffer->GetI420A();
rtc::scoped_refptr<I420BufferInterface> alpha_buffer =
WrapI420Buffer(input_image.width(), input_image.height(),
yuva_buffer->DataA(), yuva_buffer->StrideA(),
multiplex_dummy_planes_.data(), yuva_buffer->StrideU(),
multiplex_dummy_planes_.data(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
// To keep reference alive.
[frame_buffer] {});
VideoFrame alpha_image = VideoFrame::Builder()
.set_video_frame_buffer(alpha_buffer)
.set_timestamp_rtp(input_image.timestamp())

View File

@ -38,7 +38,6 @@
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/ref_counted_object.h"
#include "test/gmock.h"
#include "test/gtest.h"
@ -112,7 +111,9 @@ class TestMultiplexAdapter : public VideoCodecUnitTest,
yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
yuv_buffer->StrideY(),
// To keep reference alive.
[yuv_buffer] {});
return std::make_unique<VideoFrame>(VideoFrame::Builder()
.set_video_frame_buffer(yuva_buffer)
.set_timestamp_rtp(123)
@ -168,8 +169,7 @@ class TestMultiplexAdapter : public VideoCodecUnitTest,
rtc::scoped_refptr<I420BufferInterface> axx_buffer = WrapI420Buffer(
yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(),
yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
yuva_buffer->DataV(), yuva_buffer->StrideV(),
rtc::KeepRefUntilDone(video_frame_buffer));
yuva_buffer->DataV(), yuva_buffer->StrideV(), [video_frame_buffer] {});
return std::make_unique<VideoFrame>(VideoFrame::Builder()
.set_video_frame_buffer(axx_buffer)
.set_timestamp_rtp(123)

View File

@ -22,7 +22,6 @@
#include "common_video/include/video_frame_buffer.h"
#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "rtc_base/checks.h"
#include "rtc_base/keep_ref_until_done.h"
#include "rtc_base/logging.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "vpx/vp8dx.h"
@ -277,7 +276,7 @@ int LibvpxVp9Decoder::ReturnFrame(
// This buffer contains all of |img|'s image data, a reference counted
// Vp9FrameBuffer. (libvpx is done with the buffers after a few
// vpx_codec_decode calls or vpx_codec_destroy).
Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer =
rtc::scoped_refptr<Vp9FrameBufferPool::Vp9FrameBuffer> img_buffer =
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
// The buffer can be used directly by the VideoFrame (without copy) by
@ -312,7 +311,7 @@ int LibvpxVp9Decoder::ReturnFrame(
// WrappedI420Buffer's mechanism for allowing the release of its
// frame buffer is through a callback function. This is where we
// should release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer));
[img_buffer] {});
}
} else if (img->fmt == VPX_IMG_FMT_I444) {
img_wrapped_buffer = WrapI444Buffer(
@ -323,7 +322,7 @@ int LibvpxVp9Decoder::ReturnFrame(
// WrappedI444Buffer's mechanism for allowing the release of its
// frame buffer is through a callback function. This is where we
// should release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer));
[img_buffer] {});
} else {
RTC_LOG(LS_ERROR)
<< "Unsupported pixel format produced by the decoder: "
@ -339,7 +338,7 @@ int LibvpxVp9Decoder::ReturnFrame(
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_U]),
img->stride[VPX_PLANE_U] / 2,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
img->stride[VPX_PLANE_V] / 2, rtc::KeepRefUntilDone(img_buffer));
img->stride[VPX_PLANE_V] / 2, [img_buffer] {});
break;
default:
RTC_LOG(LS_ERROR) << "Unsupported bit depth produced by the decoder: "