Revert "Inform VideoEncoder of negotiated capabilities"

This reverts commit 11dfff0878c949f2e19d95a0ddc209cdad94b3b4.

Reason for revert: Downstream import failure.

Original change's description:
> Inform VideoEncoder of negotiated capabilities
> 
> After this CL lands, an announcement will be made to
> discuss-webrtc about the deprecation of one version
> of InitEncode().
> 
> Bug: webrtc:10720
> Change-Id: Ib992af0272bbb16ae16ef7e69491f365702d179e
> Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/140884
> Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
> Reviewed-by: Sami Kalliomäki <sakal@webrtc.org>
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Commit-Queue: Elad Alon <eladalon@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#28224}

TBR=sakal@webrtc.org,kwiberg@webrtc.org,eladalon@webrtc.org,kthelgason@webrtc.org,sprang@webrtc.org

Change-Id: I7f833055c67f1f879b01dd8c156ba7b8840e8747
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:10720
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/141411
Reviewed-by: Philip Eliasson <philipel@webrtc.org>
Commit-Queue: Philip Eliasson <philipel@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#28225}
This commit is contained in:
Philip Eliasson
2019-06-11 11:55:47 +00:00
committed by Commit Bot
parent 11dfff0878
commit 49d661a7d3
59 changed files with 226 additions and 601 deletions

View File

@ -193,12 +193,6 @@ H264EncoderImpl::~H264EncoderImpl() {
int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst,
int32_t number_of_cores,
size_t max_payload_size) {
RTC_NOTREACHED();
return WEBRTC_VIDEO_CODEC_ERROR;
}
int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst,
const VideoEncoder::Settings& settings) {
ReportInit();
if (!inst || inst->codecType != kVideoCodecH264) {
ReportError();
@ -232,8 +226,8 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst,
pictures_.resize(number_of_streams);
configurations_.resize(number_of_streams);
number_of_cores_ = settings.number_of_cores;
max_payload_size_ = settings.max_payload_size;
number_of_cores_ = number_of_cores;
max_payload_size_ = max_payload_size;
codec_ = *inst;
// Code expects simulcastStream resolutions to be correct, make sure they are

View File

@ -25,7 +25,6 @@
#include <vector>
#include "api/video/i420_buffer.h"
#include "api/video_codecs/video_encoder.h"
#include "common_video/h264/h264_bitstream_parser.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
#include "modules/video_coding/utility/quality_scaler.h"
@ -67,8 +66,6 @@ class H264EncoderImpl : public H264Encoder {
int32_t InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
size_t max_payload_size) override;
int32_t InitEncode(const VideoCodec* codec_settings,
const VideoEncoder::Settings& settings) override;
int32_t Release() override;
int32_t RegisterEncodeCompleteCallback(

View File

@ -11,7 +11,6 @@
#include "modules/video_coding/codecs/h264/h264_encoder_impl.h"
#include "api/video_codecs/video_encoder.h"
#include "test/gtest.h"
namespace webrtc {
@ -21,11 +20,6 @@ namespace {
const int kMaxPayloadSize = 1024;
const int kNumCores = 1;
const VideoEncoder::Capabilities kCapabilities(false);
const VideoEncoder::Settings kSettings(kCapabilities,
kNumCores,
kMaxPayloadSize);
void SetDefaultSettings(VideoCodec* codec_settings) {
codec_settings->codecType = kVideoCodecH264;
codec_settings->maxFramerate = 60;
@ -43,7 +37,7 @@ TEST(H264EncoderImplTest, CanInitializeWithDefaultParameters) {
VideoCodec codec_settings;
SetDefaultSettings(&codec_settings);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder.InitEncode(&codec_settings, kSettings));
encoder.InitEncode(&codec_settings, kNumCores, kMaxPayloadSize));
EXPECT_EQ(H264PacketizationMode::NonInterleaved,
encoder.PacketizationModeForTesting());
}
@ -55,7 +49,7 @@ TEST(H264EncoderImplTest, CanInitializeWithNonInterleavedModeExplicitly) {
VideoCodec codec_settings;
SetDefaultSettings(&codec_settings);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder.InitEncode(&codec_settings, kSettings));
encoder.InitEncode(&codec_settings, kNumCores, kMaxPayloadSize));
EXPECT_EQ(H264PacketizationMode::NonInterleaved,
encoder.PacketizationModeForTesting());
}
@ -67,7 +61,7 @@ TEST(H264EncoderImplTest, CanInitializeWithSingleNalUnitModeExplicitly) {
VideoCodec codec_settings;
SetDefaultSettings(&codec_settings);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder.InitEncode(&codec_settings, kSettings));
encoder.InitEncode(&codec_settings, kNumCores, kMaxPayloadSize));
EXPECT_EQ(H264PacketizationMode::SingleNalUnit,
encoder.PacketizationModeForTesting());
}
@ -79,7 +73,7 @@ TEST(H264EncoderImplTest, CanInitializeWithRemovedParameter) {
VideoCodec codec_settings;
SetDefaultSettings(&codec_settings);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder.InitEncode(&codec_settings, kSettings));
encoder.InitEncode(&codec_settings, kNumCores, kMaxPayloadSize));
EXPECT_EQ(H264PacketizationMode::SingleNalUnit,
encoder.PacketizationModeForTesting());
}

View File

@ -42,8 +42,6 @@ class MultiplexEncoderAdapter : public VideoEncoder {
int InitEncode(const VideoCodec* inst,
int number_of_cores,
size_t max_payload_size) override;
int InitEncode(const VideoCodec* inst,
const VideoEncoder::Settings& settings) override;
int Encode(const VideoFrame& input_image,
const std::vector<VideoFrameType>* frame_types) override;
int RegisterEncodeCompleteCallback(EncodedImageCallback* callback) override;

View File

@ -13,7 +13,6 @@
#include <cstring>
#include "api/video/encoded_image.h"
#include "api/video_codecs/video_encoder.h"
#include "common_video/include/video_frame_buffer.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/include/module_common_types.h"
@ -64,13 +63,6 @@ MultiplexEncoderAdapter::~MultiplexEncoderAdapter() {
int MultiplexEncoderAdapter::InitEncode(const VideoCodec* inst,
int number_of_cores,
size_t max_payload_size) {
RTC_NOTREACHED();
return WEBRTC_VIDEO_CODEC_ERROR;
}
int MultiplexEncoderAdapter::InitEncode(
const VideoCodec* inst,
const VideoEncoder::Settings& settings) {
const size_t buffer_size =
CalcBufferSize(VideoType::kI420, inst->width, inst->height);
multiplex_dummy_planes_.resize(buffer_size);
@ -79,23 +71,23 @@ int MultiplexEncoderAdapter::InitEncode(
0x80);
RTC_DCHECK_EQ(kVideoCodecMultiplex, inst->codecType);
VideoCodec video_codec = *inst;
video_codec.codecType = PayloadStringToCodecType(associated_format_.name);
VideoCodec settings = *inst;
settings.codecType = PayloadStringToCodecType(associated_format_.name);
// Take over the key frame interval at adapter level, because we have to
// sync the key frames for both sub-encoders.
switch (video_codec.codecType) {
switch (settings.codecType) {
case kVideoCodecVP8:
key_frame_interval_ = video_codec.VP8()->keyFrameInterval;
video_codec.VP8()->keyFrameInterval = 0;
key_frame_interval_ = settings.VP8()->keyFrameInterval;
settings.VP8()->keyFrameInterval = 0;
break;
case kVideoCodecVP9:
key_frame_interval_ = video_codec.VP9()->keyFrameInterval;
video_codec.VP9()->keyFrameInterval = 0;
key_frame_interval_ = settings.VP9()->keyFrameInterval;
settings.VP9()->keyFrameInterval = 0;
break;
case kVideoCodecH264:
key_frame_interval_ = video_codec.H264()->keyFrameInterval;
video_codec.H264()->keyFrameInterval = 0;
key_frame_interval_ = settings.H264()->keyFrameInterval;
settings.H264()->keyFrameInterval = 0;
break;
default:
break;
@ -109,7 +101,8 @@ int MultiplexEncoderAdapter::InitEncode(
for (size_t i = 0; i < kAlphaCodecStreams; ++i) {
std::unique_ptr<VideoEncoder> encoder =
factory_->CreateVideoEncoder(associated_format_);
const int rv = encoder->InitEncode(&video_codec, settings);
const int rv =
encoder->InitEncode(&settings, number_of_cores, max_payload_size);
if (rv) {
RTC_LOG(LS_ERROR) << "Failed to create multiplex codec index " << i;
return rv;

View File

@ -10,7 +10,6 @@
#include <utility>
#include "api/video_codecs/video_encoder.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/test/video_codec_unittest.h"
#include "modules/video_coding/include/video_error_codes.h"
@ -26,9 +25,6 @@ static const int kHeight = 144; // Height of the input image.
static const int kMaxFramerate = 30; // Arbitrary value.
namespace webrtc {
namespace {
const VideoEncoder::Capabilities kCapabilities(false);
}
EncodedImageCallback::Result
VideoCodecUnitTest::FakeEncodeCompleteCallback::OnEncodedImage(
@ -82,10 +78,8 @@ void VideoCodecUnitTest::SetUp() {
decoder_->RegisterDecodeCompleteCallback(&decode_complete_callback_);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(
&codec_settings_,
VideoEncoder::Settings(kCapabilities, 1 /* number of cores */,
0 /* max payload size (unused) */)));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
decoder_->InitDecode(&codec_settings_, 1 /* number of cores */));
}

View File

@ -28,7 +28,6 @@ namespace webrtc {
namespace test {
namespace {
const VideoEncoder::Capabilities kCapabilities(false);
int32_t InitEncoder(VideoCodecType codec_type, VideoEncoder* encoder) {
VideoCodec codec;
@ -37,9 +36,8 @@ int32_t InitEncoder(VideoCodecType codec_type, VideoEncoder* encoder) {
codec.height = 480;
codec.maxFramerate = 30;
RTC_CHECK(encoder);
return encoder->InitEncode(
&codec, VideoEncoder::Settings(kCapabilities, 1 /* number_of_cores */,
1200 /* max_payload_size */));
return encoder->InitEncode(&codec, 1 /* number_of_cores */,
1200 /* max_payload_size */);
}
int32_t InitDecoder(VideoCodecType codec_type, VideoDecoder* decoder) {

View File

@ -24,7 +24,6 @@
#include "api/video/video_frame_buffer.h"
#include "api/video/video_rotation.h"
#include "api/video_codecs/video_codec.h"
#include "api/video_codecs/video_encoder.h"
#include "common_video/h264/h264_common.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@ -46,8 +45,6 @@ namespace {
const int kMsToRtpTimestamp = kVideoPayloadTypeFrequency / 1000;
const int kMaxBufferedInputFrames = 20;
const VideoEncoder::Capabilities kCapabilities(false);
size_t GetMaxNaluSizeBytes(const EncodedImage& encoded_frame,
const VideoCodecTestFixture::Config& config) {
if (config.codec_settings.codecType != kVideoCodecH264)
@ -210,11 +207,9 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
WEBRTC_VIDEO_CODEC_OK);
// Initialize codecs so that they are ready to receive frames.
RTC_CHECK_EQ(encoder_->InitEncode(
&config_.codec_settings,
VideoEncoder::Settings(
kCapabilities, static_cast<int>(config_.NumberOfCores()),
config_.max_payload_size_bytes)),
RTC_CHECK_EQ(encoder_->InitEncode(&config_.codec_settings,
static_cast<int>(config_.NumberOfCores()),
config_.max_payload_size_bytes),
WEBRTC_VIDEO_CODEC_OK);
for (size_t i = 0; i < num_simulcast_or_spatial_layers_; ++i) {

View File

@ -66,7 +66,7 @@ class VideoProcessorTest : public ::testing::Test {
}
void ExpectInit() {
EXPECT_CALL(encoder_mock_, InitEncode(_, _)).Times(1);
EXPECT_CALL(encoder_mock_, InitEncode(_, _, _)).Times(1);
EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_)).Times(1);
EXPECT_CALL(*decoder_mock_, InitDecode(_, _)).Times(1);
EXPECT_CALL(*decoder_mock_, RegisterDecodeCompleteCallback(_)).Times(1);

View File

@ -450,17 +450,9 @@ void LibvpxVp8Encoder::SetStreamState(bool send_stream, int stream_idx) {
send_stream_[stream_idx] = send_stream;
}
int LibvpxVp8Encoder::InitEncode(const VideoCodec* codec_settings,
int number_of_cores,
size_t max_payload_size) {
RTC_NOTREACHED();
return WEBRTC_VIDEO_CODEC_ERROR;
}
// TODO(eladalon): s/inst/codec_settings/g.
// TODO(bugs.webrtc.org/10720): Pass |capabilities| to frame buffer controller.
int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
const VideoEncoder::Settings& settings) {
int number_of_cores,
size_t /*maxPayloadSize */) {
if (inst == NULL) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
@ -474,7 +466,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
if (inst->width < 1 || inst->height < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (settings.number_of_cores < 1) {
if (number_of_cores < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) {
@ -500,7 +492,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
}
RTC_DCHECK(frame_buffer_controller_);
number_of_cores_ = settings.number_of_cores;
number_of_cores_ = number_of_cores;
timestamp_ = 0;
codec_ = *inst;
@ -619,7 +611,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
// Determine number of threads based on the image size and #cores.
// TODO(fbarchard): Consider number of Simulcast layers.
vpx_configs_[0].g_threads = NumberOfThreads(
vpx_configs_[0].g_w, vpx_configs_[0].g_h, settings.number_of_cores);
vpx_configs_[0].g_w, vpx_configs_[0].g_h, number_of_cores);
// Creating a wrapper to the image - setting image data to NULL.
// Actual pointer will be set in encode. Setting align to 1, as it

View File

@ -48,8 +48,6 @@ class LibvpxVp8Encoder : public VideoEncoder {
int InitEncode(const VideoCodec* codec_settings,
int number_of_cores,
size_t max_payload_size) override;
int InitEncode(const VideoCodec* codec_settings,
const VideoEncoder::Settings& settings) override;
int Encode(const VideoFrame& input_image,
const std::vector<VideoFrameType>* frame_types) override;

View File

@ -14,7 +14,6 @@
#include "api/test/mock_video_decoder.h"
#include "api/test/mock_video_encoder.h"
#include "api/video_codecs/video_encoder.h"
#include "api/video_codecs/vp8_temporal_layers.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "common_video/test/utilities.h"
@ -52,11 +51,6 @@ constexpr int kDefaultMinPixelsPerFrame = 320 * 180;
constexpr int kWidth = 172;
constexpr int kHeight = 144;
constexpr float kFramerateFps = 30;
const VideoEncoder::Capabilities kCapabilities(false);
const VideoEncoder::Settings kSettings(kCapabilities,
kNumCores,
kMaxPayloadSize);
} // namespace
class TestVp8Impl : public VideoCodecUnitTest {
@ -123,8 +117,7 @@ TEST_F(TestVp8Impl, SetRates) {
auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder.InitEncode(&codec_settings_,
VideoEncoder::Settings(kCapabilities, 1, 1000)));
encoder.InitEncode(&codec_settings_, 1, 1000));
const uint32_t kBitrateBps = 300000;
VideoBitrateAllocation bitrate_allocation;
@ -150,8 +143,7 @@ TEST_F(TestVp8Impl, DynamicSetRates) {
auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder.InitEncode(&codec_settings_,
VideoEncoder::Settings(kCapabilities, 1, 1000)));
encoder.InitEncode(&codec_settings_, 1, 1000));
const uint32_t kBitrateBps = 300000;
VideoEncoder::RateControlParameters rate_settings;
@ -207,7 +199,7 @@ TEST_F(TestVp8Impl, DynamicSetRates) {
TEST_F(TestVp8Impl, EncodeFrameAndRelease) {
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
EncodedImage encoded_frame;
CodecSpecificInfo codec_specific_info;
@ -265,7 +257,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.simulcastStream[1] = {kWidth / 2, kHeight / 2, 30, 3,
4000, 3000, 2000, 80};
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
codec_settings_.numberOfSimulcastStreams = 3;
// Resolutions are not in ascending order.
codec_settings_.simulcastStream[0] = {
@ -275,7 +267,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.simulcastStream[2] = {kWidth, kHeight, 30, 1,
4000, 3000, 2000, 80};
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
// Resolutions are not in ascending order.
codec_settings_.simulcastStream[0] = {kWidth, kHeight, kFramerateFps, 1,
4000, 3000, 2000, 80};
@ -284,7 +276,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.simulcastStream[2] = {
kWidth - 1, kHeight - 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
// Temporal layers do not match.
codec_settings_.simulcastStream[0] = {
kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
@ -293,7 +285,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 3,
4000, 3000, 2000, 80};
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
// Resolutions do not match codec config.
codec_settings_.simulcastStream[0] = {
kWidth / 4 + 1, kHeight / 4 + 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
@ -302,7 +294,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.simulcastStream[2] = {
kWidth + 4, kHeight + 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
// Everything fine: scaling by 2, top resolution matches video, temporal
// settings are the same for all layers.
codec_settings_.simulcastStream[0] = {
@ -312,7 +304,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 1,
4000, 3000, 2000, 80};
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
// Everything fine: custom scaling, top resolution matches video, temporal
// settings are the same for all layers.
codec_settings_.simulcastStream[0] = {
@ -322,7 +314,7 @@ TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 1,
4000, 3000, 2000, 80};
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
}
#if defined(WEBRTC_ANDROID)
@ -386,7 +378,7 @@ TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) {
codec_settings_.VP8()->numberOfTemporalLayers = 2;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
// Temporal layer 0.
EncodedImage encoded_frame;
@ -407,7 +399,7 @@ TEST_F(TestVp8Impl, ScalingDisabledIfAutomaticResizeOff) {
codec_settings_.VP8()->frameDroppingOn = true;
codec_settings_.VP8()->automaticResizeOn = false;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
VideoEncoder::ScalingSettings settings =
encoder_->GetEncoderInfo().scaling_settings;
@ -418,7 +410,7 @@ TEST_F(TestVp8Impl, ScalingEnabledIfAutomaticResizeOn) {
codec_settings_.VP8()->frameDroppingOn = true;
codec_settings_.VP8()->automaticResizeOn = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
VideoEncoder::ScalingSettings settings =
encoder_->GetEncoderInfo().scaling_settings;
@ -450,7 +442,7 @@ TEST_F(TestVp8Impl, DontDropKeyframes) {
/* num_squares = */ absl::optional<int>(300));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
VideoBitrateAllocation bitrate_allocation;
// Bitrate only enough for TL0.
@ -488,8 +480,7 @@ TEST_F(TestVp8Impl, KeepsTimestampOnReencode) {
return img;
}));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder.InitEncode(&codec_settings_,
VideoEncoder::Settings(kCapabilities, 1, 1000)));
encoder.InitEncode(&codec_settings_, 1, 1000));
MockEncodedImageCallback callback;
encoder.RegisterEncodeCompleteCallback(&callback);
@ -521,7 +512,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationTwoTemporalLayers) {
codec_settings_.simulcastStream[0].maxBitrate = 100;
codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
@ -539,7 +530,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationThreeTemporalLayers) {
codec_settings_.simulcastStream[0].maxBitrate = 100;
codec_settings_.simulcastStream[0].numberOfTemporalLayers = 3;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
@ -562,7 +553,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationScreenshareLayers) {
kLegacyScreenshareTl1BitrateKbps;
codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
// Expect empty vector, since this mode doesn't have a fixed framerate.
FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
@ -590,7 +581,7 @@ TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) {
}
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, kNumCores, kMaxPayloadSize));
FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);

View File

@ -10,7 +10,6 @@
#include "api/video/color_space.h"
#include "api/video/i420_buffer.h"
#include "api/video_codecs/video_encoder.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "media/base/vp9_profile.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
@ -32,11 +31,6 @@ using FramerateFractions =
namespace {
const size_t kWidth = 1280;
const size_t kHeight = 720;
const VideoEncoder::Capabilities kCapabilities(false);
const VideoEncoder::Settings kSettings(kCapabilities,
/*number_of_cores=*/1,
/*max_payload_size=*/0);
} // namespace
class TestVp9Impl : public VideoCodecUnitTest {
@ -205,7 +199,8 @@ TEST_F(TestVp9Impl, EncoderWith2TemporalLayers) {
// Tl0PidIdx is only used in non-flexible mode.
codec_settings_.VP9()->flexibleMode = false;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// Temporal layer 0.
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
@ -234,7 +229,8 @@ TEST_F(TestVp9Impl, EncoderWith2TemporalLayers) {
TEST_F(TestVp9Impl, EncoderWith2SpatialLayers) {
codec_settings_.VP9()->numberOfSpatialLayers = 2;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
SetWaitForEncodedFramesThreshold(2);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
@ -277,7 +273,8 @@ TEST_F(TestVp9Impl, EncoderExplicitLayering) {
codec_settings_.spatialLayers[1].maxFramerate = codec_settings_.maxFramerate;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// Ensure it fails if scaling factors in horz/vert dimentions are different.
codec_settings_.spatialLayers[0].width = codec_settings_.width;
@ -285,7 +282,8 @@ TEST_F(TestVp9Impl, EncoderExplicitLayering) {
codec_settings_.spatialLayers[1].width = codec_settings_.width;
codec_settings_.spatialLayers[1].height = codec_settings_.height;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_PARAMETER,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// Ensure it fails if scaling factor is not power of two.
codec_settings_.spatialLayers[0].width = codec_settings_.width / 3;
@ -293,7 +291,8 @@ TEST_F(TestVp9Impl, EncoderExplicitLayering) {
codec_settings_.spatialLayers[1].width = codec_settings_.width;
codec_settings_.spatialLayers[1].height = codec_settings_.height;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_PARAMETER,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
}
TEST_F(TestVp9Impl, EnableDisableSpatialLayers) {
@ -310,7 +309,8 @@ TEST_F(TestVp9Impl, EnableDisableSpatialLayers) {
codec_settings_.VP9()->frameDroppingOn = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
VideoBitrateAllocation bitrate_allocation;
for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) {
@ -357,7 +357,8 @@ TEST_F(TestVp9Impl, EndOfPicture) {
ConfigureSvc(num_spatial_layers);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// Encode both base and upper layers. Check that end-of-superframe flag is
// set on upper layer frame but not on base layer frame.
@ -384,7 +385,8 @@ TEST_F(TestVp9Impl, EndOfPicture) {
encoder_->SetRates(VideoEncoder::RateControlParameters(
bitrate_allocation, codec_settings_.maxFramerate));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
SetWaitForEncodedFramesThreshold(1);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
@ -413,7 +415,8 @@ TEST_F(TestVp9Impl, InterLayerPred) {
for (const InterLayerPredMode inter_layer_pred : inter_layer_pred_modes) {
codec_settings_.VP9()->interLayerPred = inter_layer_pred;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
encoder_->SetRates(VideoEncoder::RateControlParameters(
bitrate_allocation, codec_settings_.maxFramerate));
@ -482,7 +485,8 @@ TEST_F(TestVp9Impl,
for (const InterLayerPredMode inter_layer_pred : inter_layer_pred_modes) {
codec_settings_.VP9()->interLayerPred = inter_layer_pred;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
VideoBitrateAllocation bitrate_allocation;
for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) {
@ -540,7 +544,8 @@ TEST_F(TestVp9Impl,
for (const InterLayerPredMode inter_layer_pred : inter_layer_pred_modes) {
codec_settings_.VP9()->interLayerPred = inter_layer_pred;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
VideoBitrateAllocation bitrate_allocation;
for (size_t sl_idx = 0; sl_idx < num_spatial_layers; ++sl_idx) {
@ -590,7 +595,8 @@ TEST_F(TestVp9Impl, EnablingDisablingUpperLayerInTheSameGof) {
codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
VideoBitrateAllocation bitrate_allocation;
@ -664,7 +670,8 @@ TEST_F(TestVp9Impl, EnablingDisablingUpperLayerAccrossGof) {
codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
VideoBitrateAllocation bitrate_allocation;
@ -754,7 +761,8 @@ TEST_F(TestVp9Impl, EnablingNewLayerIsDelayedInScreenshareAndAddsSsInfo) {
codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn;
codec_settings_.VP9()->flexibleMode = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// Enable all but the last layer.
VideoBitrateAllocation bitrate_allocation;
@ -822,7 +830,8 @@ TEST_F(TestVp9Impl, ScreenshareFrameDropping) {
codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn;
codec_settings_.VP9()->flexibleMode = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// Enable all but the last layer.
VideoBitrateAllocation bitrate_allocation;
@ -915,7 +924,8 @@ TEST_F(TestVp9Impl, RemovingLayerIsNotDelayedInScreenshareAndAddsSsInfo) {
codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn;
codec_settings_.VP9()->flexibleMode = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// All layers are enabled from the start.
VideoBitrateAllocation bitrate_allocation;
@ -997,7 +1007,8 @@ TEST_F(TestVp9Impl, DisableNewLayerInVideoDelaysSsInfoTillTL0) {
codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOnKeyPic;
codec_settings_.VP9()->flexibleMode = false;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// Enable all the layers.
VideoBitrateAllocation bitrate_allocation;
@ -1058,7 +1069,8 @@ TEST_F(TestVp9Impl,
codec_settings_.VP9()->interLayerPred = InterLayerPredMode::kOn;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
VideoBitrateAllocation bitrate_allocation;
bitrate_allocation.SetBitrate(
@ -1077,7 +1089,8 @@ TEST_F(TestVp9Impl,
TEST_F(TestVp9Impl, ScalabilityStructureIsAvailableInFlexibleMode) {
codec_settings_.VP9()->flexibleMode = true;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->Encode(*NextInputFrame(), nullptr));
@ -1110,7 +1123,8 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocation) {
}
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
@ -1147,7 +1161,8 @@ TEST_F(TestVp9Impl, EncoderInfoFpsAllocationFlexibleMode) {
}
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
// No temporal layers allowed when spatial layers have different fps targets.
FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
@ -1180,7 +1195,8 @@ TEST_P(TestVp9ImplWithLayering, FlexibleMode) {
codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_;
codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
GofInfoVP9 gof;
if (num_temporal_layers_ == 1) {
@ -1218,7 +1234,8 @@ TEST_P(TestVp9ImplWithLayering, ExternalRefControl) {
codec_settings_.VP9()->numberOfSpatialLayers = num_spatial_layers_;
codec_settings_.VP9()->numberOfTemporalLayers = num_temporal_layers_;
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
GofInfoVP9 gof;
if (num_temporal_layers_ == 1) {
@ -1274,7 +1291,8 @@ TEST_F(TestVp9ImplFrameDropping, PreEncodeFrameDropping) {
codec_settings_.maxFramerate = static_cast<uint32_t>(expected_framerate_fps);
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
VideoFrame* input_frame = NextInputFrame();
for (size_t frame_num = 0; frame_num < num_frames_to_encode; ++frame_num) {
@ -1323,7 +1341,8 @@ TEST_F(TestVp9ImplFrameDropping, DifferentFrameratePerSpatialLayer) {
}
EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
encoder_->InitEncode(&codec_settings_, kSettings));
encoder_->InitEncode(&codec_settings_, 1 /* number of cores */,
0 /* max payload size (unused) */));
encoder_->SetRates(VideoEncoder::RateControlParameters(
bitrate_allocation, codec_settings_.maxFramerate));

View File

@ -401,16 +401,9 @@ void VP9EncoderImpl::SetRates(const RateControlParameters& parameters) {
return;
}
int VP9EncoderImpl::InitEncode(const VideoCodec* codec_settings,
int number_of_cores,
size_t max_payload_size) {
RTC_NOTREACHED();
return WEBRTC_VIDEO_CODEC_ERROR;
}
// TODO(eladalon): s/inst/codec_settings/g.
int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
const Settings& settings) {
int number_of_cores,
size_t /*max_payload_size*/) {
if (inst == nullptr) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
@ -424,7 +417,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
if (inst->width < 1 || inst->height < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (settings.number_of_cores < 1) {
if (number_of_cores < 1) {
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (inst->VP9().numberOfTemporalLayers > 3) {
@ -533,7 +526,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
config_->rc_resize_allowed = inst->VP9().automaticResizeOn ? 1 : 0;
// Determine number of threads based on the image size and #cores.
config_->g_threads =
NumberOfThreads(config_->g_w, config_->g_h, settings.number_of_cores);
NumberOfThreads(config_->g_w, config_->g_h, number_of_cores);
cpu_speed_ = GetCpuSpeed(config_->g_w, config_->g_h);

View File

@ -21,7 +21,6 @@
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "api/video_codecs/video_encoder.h"
#include "media/base/vp9_profile.h"
#include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
#include "modules/video_coding/utility/framerate_controller.h"
@ -43,8 +42,6 @@ class VP9EncoderImpl : public VP9Encoder {
int InitEncode(const VideoCodec* codec_settings,
int number_of_cores,
size_t max_payload_size) override;
int InitEncode(const VideoCodec* codec_settings,
const Settings& settings) override;
int Encode(const VideoFrame& input_image,
const std::vector<VideoFrameType>* frame_types) override;