Revert "Reland "Add Profile 2 configuration to VP9 Encoder and Decoder""

This reverts commit cb853c8f90d3410a7f0ce07915aa20db0329259d.

Reason for revert: 
Broke Linux tester on FYI bots, https://ci.chromium.org/buildbot/chromium.webrtc.fyi/Linux%20Tester/46636 .

Original change's description:
> Reland "Add Profile 2 configuration to VP9 Encoder and Decoder"
> 
> This is a reland of fc9c4e88b5569f0d2cd1c64cbc27fd969ce2db17
> 
> Original change's description:
> > Add Profile 2 configuration to VP9 Encoder and Decoder
> >
> > Bug: webrtc:9376
> > Change-Id: I4f627fb2b6c146a90cfcaa815da459b09dc00003
> > Reviewed-on: https://webrtc-review.googlesource.com/81980
> > Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
> > Reviewed-by: Niklas Enbom <niklas.enbom@webrtc.org>
> > Reviewed-by: Erik Språng <sprang@webrtc.org>
> > Reviewed-by: Jerome Jiang <jianj@google.com>
> > Cr-Commit-Position: refs/heads/master@{#23917}
> 
> Bug: webrtc:9376
> Change-Id: I21fc44865af4e381f99dbc5ae2baf4a53ce834ca
> TBR: niklas.enbom@webrtc.org
> Reviewed-on: https://webrtc-review.googlesource.com/88341
> Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
> Reviewed-by: Emircan Uysaler <emircan@webrtc.org>
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#23974}

TBR=niklase@google.com,jianj@google.com,sprang@webrtc.org,marpan@google.com,niklas.enbom@webrtc.org,emircan@webrtc.org

Change-Id: I23062a0a2e5feafa29fd36e6b1c4a6e2734c4d68
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:9376
Reviewed-on: https://webrtc-review.googlesource.com/88600
Reviewed-by: Emircan Uysaler <emircan@webrtc.org>
Commit-Queue: Emircan Uysaler <emircan@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23976}
This commit is contained in:
Emircan Uysaler
2018-07-13 21:13:20 +00:00
committed by Commit Bot
parent a723ecc399
commit c528c0a07f
13 changed files with 38 additions and 235 deletions

View File

@ -57,7 +57,6 @@ class VideoQualityTestFixtureInterface {
bool automatic_scaling;
std::string clip_name; // "Generator" to generate frames instead.
size_t capture_device_index;
SdpVideoFormat::Parameters sdp_params;
} video[2];
struct Audio {
bool enabled;

View File

@ -44,7 +44,7 @@ std::unique_ptr<VideoEncoder> InternalEncoderFactory::CreateVideoEncoder(
if (cricket::CodecNamesEq(format.name, cricket::kVp8CodecName))
return VP8Encoder::Create();
if (cricket::CodecNamesEq(format.name, cricket::kVp9CodecName))
return VP9Encoder::Create(cricket::VideoCodec(format));
return VP9Encoder::Create();
if (cricket::CodecNamesEq(format.name, cricket::kH264CodecName))
return H264Encoder::Create(cricket::VideoCodec(format));
RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format "

View File

@ -479,7 +479,6 @@ rtc_static_library("webrtc_vp9") {
":webrtc_vp9_helpers",
"..:module_api",
"../..:webrtc_common",
"../../api/video:video_frame_i010",
"../../api/video_codecs:video_codecs_api",
"../../common_video",
"../../media:rtc_media_base",
@ -725,7 +724,6 @@ if (rtc_include_tests) {
"../../media:rtc_h264_profile_id",
"../../media:rtc_internal_video_codecs",
"../../media:rtc_media_base",
"../../media:rtc_vp9_profile",
"../../rtc_base:rtc_base",
"../../test:fileutils",
"../../test:test_common",
@ -734,7 +732,6 @@ if (rtc_include_tests) {
"../rtp_rtcp:rtp_rtcp_format",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
data = video_coding_modules_tests_resources

View File

@ -69,7 +69,8 @@ void VideoCodecUnitTest::SetUp() {
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
codec_settings_.width, codec_settings_.height,
test::FrameGenerator::OutputType::I420, absl::optional<int>());
absl::optional<test::FrameGenerator::OutputType>(),
absl::optional<int>());
encoder_ = CreateEncoder();
decoder_ = CreateDecoder();

View File

@ -105,7 +105,6 @@ class VideoCodecUnitTest : public ::testing::Test {
std::unique_ptr<VideoEncoder> encoder_;
std::unique_ptr<VideoDecoder> decoder_;
std::unique_ptr<test::FrameGenerator> input_frame_generator_;
private:
FakeEncodeCompleteCallback encode_complete_callback_;
@ -125,6 +124,7 @@ class VideoCodecUnitTest : public ::testing::Test {
RTC_GUARDED_BY(decoded_frame_section_);
absl::optional<uint8_t> decoded_qp_ RTC_GUARDED_BY(decoded_frame_section_);
std::unique_ptr<test::FrameGenerator> input_frame_generator_;
uint32_t last_input_frame_timestamp_;
};

View File

@ -10,7 +10,6 @@
#include "api/video/i420_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/vp9_profile.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
@ -466,62 +465,4 @@ TEST_F(TestVp9ImplFrameDropping, PreEncodeFrameDropping) {
max_abs_framerate_error_fps);
}
class TestVp9ImplProfile2 : public TestVp9Impl {
protected:
void SetUp() override {
// Profile 2 might not be available on some platforms until
// https://bugs.chromium.org/p/webm/issues/detail?id=1544 is solved.
bool profile_2_is_supported = false;
for (const auto& codec : SupportedVP9Codecs()) {
if (ParseSdpForVP9Profile(codec.parameters)
.value_or(VP9Profile::kProfile0) == VP9Profile::kProfile2) {
profile_2_is_supported = true;
}
}
if (!profile_2_is_supported)
return;
TestVp9Impl::SetUp();
input_frame_generator_ = test::FrameGenerator::CreateSquareGenerator(
codec_settings_.width, codec_settings_.height,
test::FrameGenerator::OutputType::I010, absl::optional<int>());
}
std::unique_ptr<VideoEncoder> CreateEncoder() override {
cricket::VideoCodec profile2_codec;
profile2_codec.SetParam(kVP9FmtpProfileId,
VP9ProfileToString(VP9Profile::kProfile2));
return VP9Encoder::Create(profile2_codec);
}
std::unique_ptr<VideoDecoder> CreateDecoder() override {
return VP9Decoder::Create();
}
};
TEST_F(TestVp9ImplProfile2, EncodeDecode) {
if (!encoder_)
return;
VideoFrame* input_frame = NextInputFrame();
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*input_frame, nullptr, nullptr));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
// First frame should be a key frame.
encoded_frame._frameType = kVideoFrameKey;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->Decode(encoded_frame, false, nullptr, 0));
std::unique_ptr<VideoFrame> decoded_frame;
absl::optional<uint8_t> decoded_qp;
ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
ASSERT_TRUE(decoded_frame);
// TODO(emircan): Add PSNR for different color depths.
EXPECT_GT(I420PSNR(*input_frame->video_frame_buffer()->ToI420(),
*decoded_frame->video_frame_buffer()->ToI420()),
31);
}
} // namespace webrtc

View File

@ -21,7 +21,6 @@
#include "vpx/vpx_encoder.h"
#include "absl/memory/memory.h"
#include "api/video/i010_buffer.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@ -54,23 +53,12 @@ int GetCpuSpeed(int width, int height) {
}
std::vector<SdpVideoFormat> SupportedVP9Codecs() {
// Profile 2 might not be available on some platforms until
// https://bugs.chromium.org/p/webm/issues/detail?id=1544 is solved.
static bool vpx_supports_high_bit_depth =
(vpx_codec_get_caps(vpx_codec_vp9_cx()) & VPX_CODEC_CAP_HIGHBITDEPTH) !=
0 &&
(vpx_codec_get_caps(vpx_codec_vp9_dx()) & VPX_CODEC_CAP_HIGHBITDEPTH) !=
0;
std::vector<SdpVideoFormat> supported_formats{SdpVideoFormat(
cricket::kVp9CodecName,
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})};
if (vpx_supports_high_bit_depth) {
supported_formats.push_back(SdpVideoFormat(
cricket::kVp9CodecName,
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}}));
}
return supported_formats;
return {SdpVideoFormat(
cricket::kVp9CodecName,
{{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}}),
SdpVideoFormat(cricket::kVp9CodecName,
{{kVP9FmtpProfileId,
VP9ProfileToString(VP9Profile::kProfile2)}})};
}
std::unique_ptr<VP9Encoder> VP9Encoder::Create() {
@ -112,6 +100,7 @@ VP9EncoderImpl::VP9EncoderImpl(const cricket::VideoCodec& codec)
is_flexible_mode_(false) {
memset(&codec_, 0, sizeof(codec_));
memset(&svc_params_, 0, sizeof(vpx_svc_extra_cfg_t));
RTC_DCHECK(VP9Profile::kProfile0 == profile_);
}
VP9EncoderImpl::~VP9EncoderImpl() {
@ -327,37 +316,15 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
CalcBufferSize(VideoType::kI420, codec_.width, codec_.height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_._completeFrame = true;
// Creating a wrapper to the image - setting image data to nullptr. Actual
// pointer will be set in encode. Setting align to 1, as it is meaningless
// (actual memory is not allocated).
raw_ = vpx_img_wrap(nullptr, VPX_IMG_FMT_I420, codec_.width, codec_.height, 1,
nullptr);
// Populate encoder configuration with default values.
if (vpx_codec_enc_config_default(vpx_codec_vp9_cx(), config_, 0)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
vpx_img_fmt img_fmt = VPX_IMG_FMT_NONE;
unsigned int bits_for_storage = 8;
switch (profile_) {
case VP9Profile::kProfile0:
img_fmt = VPX_IMG_FMT_I420;
bits_for_storage = 8;
config_->g_bit_depth = VPX_BITS_8;
config_->g_profile = 0;
config_->g_input_bit_depth = 8;
break;
case VP9Profile::kProfile2:
img_fmt = VPX_IMG_FMT_I42016;
bits_for_storage = 16;
config_->g_bit_depth = VPX_BITS_10;
config_->g_profile = 2;
config_->g_input_bit_depth = 10;
break;
}
// Creating a wrapper to the image - setting image data to nullptr. Actual
// pointer will be set in encode. Setting align to 1, as it is meaningless
// (actual memory is not allocated).
raw_ =
vpx_img_wrap(nullptr, img_fmt, codec_.width, codec_.height, 1, nullptr);
raw_->bit_depth = bits_for_storage;
config_->g_w = codec_.width;
config_->g_h = codec_.height;
config_->rc_target_bitrate = inst->startBitrate; // in kbit/s
@ -514,11 +481,7 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
const vpx_codec_err_t rv = vpx_codec_enc_init(
encoder_, vpx_codec_vp9_cx(), config_,
config_->g_bit_depth == VPX_BITS_8 ? 0 : VPX_CODEC_USE_HIGHBITDEPTH);
if (rv != VPX_CODEC_OK) {
RTC_LOG(LS_ERROR) << "Init error: " << vpx_codec_err_to_string(rv);
if (vpx_codec_enc_init(encoder_, vpx_codec_vp9_cx(), config_, 0)) {
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
@ -643,47 +606,16 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
// doing this.
input_image_ = &input_image;
// Keep reference to buffer until encode completes.
rtc::scoped_refptr<I420BufferInterface> i420_buffer;
rtc::scoped_refptr<I010BufferInterface> i010_buffer;
switch (profile_) {
case VP9Profile::kProfile0: {
i420_buffer = input_image.video_frame_buffer()->ToI420();
// Image in vpx_image_t format.
// Input image is const. VPX's raw image is not defined as const.
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(i420_buffer->DataY());
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(i420_buffer->DataU());
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(i420_buffer->DataV());
raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU();
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
break;
}
case VP9Profile::kProfile2: {
// We can inject kI010 frames directly for encode. All other formats
// should be converted to it.
switch (input_image.video_frame_buffer()->type()) {
case VideoFrameBuffer::Type::kI010: {
i010_buffer = input_image.video_frame_buffer()->GetI010();
break;
}
default: {
i010_buffer =
I010Buffer::Copy(*input_image.video_frame_buffer()->ToI420());
}
}
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(
reinterpret_cast<const uint8_t*>(i010_buffer->DataY()));
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(
reinterpret_cast<const uint8_t*>(i010_buffer->DataU()));
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(
reinterpret_cast<const uint8_t*>(i010_buffer->DataV()));
raw_->stride[VPX_PLANE_Y] = i010_buffer->StrideY() * 2;
raw_->stride[VPX_PLANE_U] = i010_buffer->StrideU() * 2;
raw_->stride[VPX_PLANE_V] = i010_buffer->StrideV() * 2;
break;
}
}
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
input_image.video_frame_buffer()->ToI420();
// Image in vpx_image_t format.
// Input image is const. VPX's raw image is not defined as const.
raw_->planes[VPX_PLANE_Y] = const_cast<uint8_t*>(i420_buffer->DataY());
raw_->planes[VPX_PLANE_U] = const_cast<uint8_t*>(i420_buffer->DataU());
raw_->planes[VPX_PLANE_V] = const_cast<uint8_t*>(i420_buffer->DataV());
raw_->stride[VPX_PLANE_Y] = i420_buffer->StrideY();
raw_->stride[VPX_PLANE_U] = i420_buffer->StrideU();
raw_->stride[VPX_PLANE_V] = i420_buffer->StrideV();
vpx_enc_frame_flags_t flags = 0;
if (force_key_frame_) {
@ -693,13 +625,8 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
RTC_CHECK_GT(codec_.maxFramerate, 0);
uint32_t duration =
90000 / target_framerate_fps_.value_or(codec_.maxFramerate);
const vpx_codec_err_t rv = vpx_codec_encode(encoder_, raw_, timestamp_,
duration, flags, VPX_DL_REALTIME);
if (rv != VPX_CODEC_OK) {
RTC_LOG(LS_ERROR) << "Encoding error: " << vpx_codec_err_to_string(rv)
<< "\n"
<< "Details: " << vpx_codec_error(encoder_) << "\n"
<< vpx_codec_error_detail(encoder_);
if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
VPX_DL_REALTIME)) {
return WEBRTC_VIDEO_CODEC_ERROR;
}
timestamp_ += duration;
@ -1160,13 +1087,10 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
// vpx_codec_decode calls or vpx_codec_destroy).
Vp9FrameBufferPool::Vp9FrameBuffer* img_buffer =
static_cast<Vp9FrameBufferPool::Vp9FrameBuffer*>(img->fb_priv);
// The buffer can be used directly by the VideoFrame (without copy) by
// using a Wrapped*Buffer.
rtc::scoped_refptr<VideoFrameBuffer> img_wrapped_buffer;
switch (img->bit_depth) {
case 8:
img_wrapped_buffer = WrapI420Buffer(
// using a WrappedI420Buffer.
rtc::scoped_refptr<WrappedI420Buffer> img_wrapped_buffer(
new rtc::RefCountedObject<webrtc::WrappedI420Buffer>(
img->d_w, img->d_h, img->planes[VPX_PLANE_Y],
img->stride[VPX_PLANE_Y], img->planes[VPX_PLANE_U],
img->stride[VPX_PLANE_U], img->planes[VPX_PLANE_V],
@ -1174,22 +1098,8 @@ int VP9DecoderImpl::ReturnFrame(const vpx_image_t* img,
// WrappedI420Buffer's mechanism for allowing the release of its frame
// buffer is through a callback function. This is where we should
// release |img_buffer|.
rtc::KeepRefUntilDone(img_buffer));
break;
case 10:
img_wrapped_buffer = WrapI010Buffer(
img->d_w, img->d_h,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_Y]),
img->stride[VPX_PLANE_Y] / 2,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_U]),
img->stride[VPX_PLANE_U] / 2,
reinterpret_cast<const uint16_t*>(img->planes[VPX_PLANE_V]),
img->stride[VPX_PLANE_V] / 2, rtc::KeepRefUntilDone(img_buffer));
break;
default:
RTC_NOTREACHED();
return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
}
rtc::KeepRefUntilDone(img_buffer)));
VideoFrame decoded_image(img_wrapped_buffer, timestamp,
0 /* render_time_ms */, webrtc::kVideoRotation_0);
decoded_image.set_ntp_time_ms(ntp_time_ms);

View File

@ -64,7 +64,6 @@ rtc_source_set("video_test_common") {
"../:typedefs",
"../api:libjingle_peerconnection_api",
"../api/video:video_frame",
"../api/video:video_frame_i010",
"../api/video:video_frame_i420",
"../api/video_codecs:video_codecs_api",
"../call:video_stream_api",

View File

@ -15,7 +15,6 @@
#include <memory>
#include "api/video/i010_buffer.h"
#include "api/video/i420_buffer.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
@ -69,8 +68,7 @@ class SquareGenerator : public FrameGenerator {
rtc::scoped_refptr<VideoFrameBuffer> buffer = nullptr;
switch (type_) {
case OutputType::I420:
case OutputType::I010: {
case OutputType::I420: {
buffer = CreateI420Buffer(width_, height_);
break;
}
@ -92,10 +90,6 @@ class SquareGenerator : public FrameGenerator {
for (const auto& square : squares_)
square->Draw(buffer);
if (type_ == OutputType::I010) {
buffer = I010Buffer::Copy(*buffer->ToI420());
}
frame_.reset(
new VideoFrame(buffer, webrtc::kVideoRotation_0, 0 /* timestamp_us */));
return frame_.get();

View File

@ -58,7 +58,7 @@ class FrameGenerator {
RTC_NOTREACHED();
}
enum class OutputType { I420, I420A, I010 };
enum class OutputType { I420, I420A };
// Creates a frame generator that produces frames with small squares that
// move randomly towards the lower right corner.

View File

@ -202,9 +202,7 @@ if (rtc_include_tests) {
]
deps = [
":video_quality_test",
"../media:rtc_vp9_profile",
"../modules/pacing:pacing",
"../modules/video_coding:webrtc_vp9",
"../rtc_base:rtc_base_approved",
"../rtc_base/experiments:alr_experiment",
"../test:field_trial",

View File

@ -9,8 +9,6 @@
*/
#include <stdio.h>
#include "media/base/vp9_profile.h"
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/experiments/alr_experiment.h"
#include "rtc_base/flags.h"
#include "test/field_trial.h"
@ -107,32 +105,6 @@ TEST(FullStackTest, ForemanCifPlr5Vp9) {
fixture->RunWithAnalyzer(foreman_cif);
}
TEST(FullStackTest, GeneratorWithoutPacketLossVp9Profile2) {
// Profile 2 might not be available on some platforms until
// https://bugs.chromium.org/p/webm/issues/detail?id=1544 is solved.
bool profile_2_is_supported = false;
for (const auto& codec : SupportedVP9Codecs()) {
if (ParseSdpForVP9Profile(codec.parameters)
.value_or(VP9Profile::kProfile0) == VP9Profile::kProfile2) {
profile_2_is_supported = true;
}
}
if (!profile_2_is_supported)
return;
auto fixture = CreateVideoQualityTestFixture();
SdpVideoFormat::Parameters vp92 = {
{kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}};
ParamsWithLogging generator;
generator.call.send_side_bwe = true;
generator.video[0] = {
true, 352, 288, 30, 700000, 700000, 700000, false, "VP9",
1, 0, 0, false, false, false, "GeneratorI010", 0, vp92};
generator.analyzer = {"generator_net_delay_0_0_plr_0_VP9Profile2", 0.0, 0.0,
kFullStackTestDurationSecs};
fixture->RunWithAnalyzer(generator);
}
TEST(FullStackTest, ForemanCifWithoutPacketLossMultiplexI420Frame) {
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging foreman_cif;

View File

@ -75,8 +75,9 @@ std::unique_ptr<VideoEncoder> VideoQualityTest::CreateVideoEncoder(
} else if (format.name == "multiplex") {
return absl::make_unique<MultiplexEncoderAdapter>(
&internal_encoder_factory_, SdpVideoFormat(cricket::kVp9CodecName));
} else {
return internal_encoder_factory_.CreateVideoEncoder(format);
}
return internal_encoder_factory_.CreateVideoEncoder(format);
}
VideoQualityTest::VideoQualityTest(
@ -420,9 +421,6 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
video_encoder_configs_[video_idx].video_format.name =
params_.video[video_idx].codec;
video_encoder_configs_[video_idx].video_format.parameters =
params_.video[video_idx].sdp_params;
video_encoder_configs_[video_idx].codec_type =
PayloadStringToCodecType(params_.video[video_idx].codec);
@ -720,12 +718,6 @@ void VideoQualityTest::CreateCapturers() {
static_cast<int>(params_.video[video_idx].height),
test::FrameGenerator::OutputType::I420A, absl::nullopt,
params_.video[video_idx].fps, clock_));
} else if (params_.video[video_idx].clip_name == "GeneratorI010") {
video_capturers_[video_idx].reset(test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video[video_idx].width),
static_cast<int>(params_.video[video_idx].height),
test::FrameGenerator::OutputType::I010, absl::nullopt,
params_.video[video_idx].fps, clock_));
} else if (params_.video[video_idx].clip_name.empty()) {
video_capturers_[video_idx].reset(test::VcmCapturer::Create(
params_.video[video_idx].width, params_.video[video_idx].height,