LibvpxVp9Encoder: add option to configure resolution_bitrate_limits.

Bug: none
Change-Id: Icdd7333296d652b1e0c159226df702084303475c
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/204701
Reviewed-by: Sergey Silkin <ssilkin@webrtc.org>
Commit-Queue: Åsa Persson <asapersson@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#33121}
This commit is contained in:
Åsa Persson
2021-02-01 09:20:05 +01:00
committed by Commit Bot
parent 989e6e7d22
commit c91c4233e3
11 changed files with 214 additions and 18 deletions

View File

@ -586,6 +586,7 @@ rtc_library("webrtc_vp9") {
"../../media:rtc_vp9_profile", "../../media:rtc_vp9_profile",
"../../rtc_base", "../../rtc_base",
"../../rtc_base:checks", "../../rtc_base:checks",
"../../rtc_base/experiments:encoder_info_settings",
"../../rtc_base/experiments:field_trial_parser", "../../rtc_base/experiments:field_trial_parser",
"../../rtc_base/experiments:rate_control_settings", "../../rtc_base/experiments:rate_control_settings",
"../../rtc_base/synchronization:mutex", "../../rtc_base/synchronization:mutex",

View File

@ -1718,6 +1718,10 @@ VideoEncoder::EncoderInfo LibvpxVp9Encoder::GetEncoderInfo() const {
VideoFrameBuffer::Type::kNV12}; VideoFrameBuffer::Type::kNV12};
} }
} }
if (!encoder_info_override_.resolution_bitrate_limits().empty()) {
info.resolution_bitrate_limits =
encoder_info_override_.resolution_bitrate_limits();
}
return info; return info;
} }

View File

@ -28,6 +28,7 @@
#include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h"
#include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/svc/scalable_video_controller.h"
#include "modules/video_coding/utility/framerate_controller.h" #include "modules/video_coding/utility/framerate_controller.h"
#include "rtc_base/experiments/encoder_info_settings.h"
#include "vpx/vp8cx.h" #include "vpx/vp8cx.h"
namespace webrtc { namespace webrtc {
@ -230,6 +231,8 @@ class LibvpxVp9Encoder : public VP9Encoder {
int num_steady_state_frames_; int num_steady_state_frames_;
// Only set config when this flag is set. // Only set config when this flag is set.
bool config_changed_; bool config_changed_;
const LibvpxVp9EncoderInfoSettings encoder_info_override_;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -1636,6 +1636,27 @@ TEST_F(TestVp9Impl, Profile0PreferredPixelFormats) {
VideoFrameBuffer::Type::kI420)); VideoFrameBuffer::Type::kI420));
} }
TEST_F(TestVp9Impl, EncoderInfoWithoutResolutionBitrateLimits) {
EXPECT_TRUE(encoder_->GetEncoderInfo().resolution_bitrate_limits.empty());
}
TEST_F(TestVp9Impl, EncoderInfoWithBitrateLimitsFromFieldTrial) {
test::ScopedFieldTrials field_trials(
"WebRTC-LibvpxVp9Encoder-GetEncoderInfoOverride/"
"frame_size_pixels:123|456|789,"
"min_start_bitrate_bps:11000|22000|33000,"
"min_bitrate_bps:44000|55000|66000,"
"max_bitrate_bps:77000|88000|99000/");
SetUp();
EXPECT_THAT(
encoder_->GetEncoderInfo().resolution_bitrate_limits,
::testing::ElementsAre(
VideoEncoder::ResolutionBitrateLimits{123, 11000, 44000, 77000},
VideoEncoder::ResolutionBitrateLimits{456, 22000, 55000, 88000},
VideoEncoder::ResolutionBitrateLimits{789, 33000, 66000, 99000}));
}
TEST_F(TestVp9Impl, EncoderInfoFpsAllocation) { TEST_F(TestVp9Impl, EncoderInfoFpsAllocation) {
const uint8_t kNumSpatialLayers = 3; const uint8_t kNumSpatialLayers = 3;
const uint8_t kNumTemporalLayers = 3; const uint8_t kNumTemporalLayers = 3;

View File

@ -75,4 +75,7 @@ SimulcastEncoderAdapterEncoderInfoSettings::
: EncoderInfoSettings( : EncoderInfoSettings(
"WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride") {} "WebRTC-SimulcastEncoderAdapter-GetEncoderInfoOverride") {}
LibvpxVp9EncoderInfoSettings::LibvpxVp9EncoderInfoSettings()
: EncoderInfoSettings("WebRTC-LibvpxVp9Encoder-GetEncoderInfoOverride") {}
} // namespace webrtc } // namespace webrtc

View File

@ -57,6 +57,13 @@ class SimulcastEncoderAdapterEncoderInfoSettings : public EncoderInfoSettings {
~SimulcastEncoderAdapterEncoderInfoSettings() override {} ~SimulcastEncoderAdapterEncoderInfoSettings() override {}
}; };
// EncoderInfo settings for LibvpxVp9Encoder.
class LibvpxVp9EncoderInfoSettings : public EncoderInfoSettings {
public:
LibvpxVp9EncoderInfoSettings();
~LibvpxVp9EncoderInfoSettings() override {}
};
} // namespace webrtc } // namespace webrtc
#endif // RTC_BASE_EXPERIMENTS_ENCODER_INFO_SETTINGS_H_ #endif // RTC_BASE_EXPERIMENTS_ENCODER_INFO_SETTINGS_H_

View File

@ -19,23 +19,6 @@
namespace webrtc { namespace webrtc {
namespace {
bool IsSimulcast(const VideoEncoderConfig& encoder_config) {
const std::vector<VideoStream>& simulcast_layers =
encoder_config.simulcast_layers;
bool is_simulcast = simulcast_layers.size() > 1;
bool is_lowest_layer_active = simulcast_layers[0].active;
int num_active_layers =
std::count_if(simulcast_layers.begin(), simulcast_layers.end(),
[](const VideoStream& layer) { return layer.active; });
// We can't distinguish between simulcast and singlecast when only the
// lowest spatial layer is active. Treat this case as simulcast.
return is_simulcast && (num_active_layers > 1 || is_lowest_layer_active);
}
} // namespace
BitrateConstraint::BitrateConstraint() BitrateConstraint::BitrateConstraint()
: encoder_settings_(absl::nullopt), : encoder_settings_(absl::nullopt),
encoder_target_bitrate_bps_(absl::nullopt) { encoder_target_bitrate_bps_(absl::nullopt) {
@ -70,7 +53,8 @@ bool BitrateConstraint::IsAdaptationUpAllowed(
return true; return true;
} }
if (IsSimulcast(encoder_settings_->encoder_config())) { if (VideoStreamEncoderResourceManager::IsSimulcast(
encoder_settings_->encoder_config())) {
// Resolution bitrate limits usage is restricted to singlecast. // Resolution bitrate limits usage is restricted to singlecast.
return true; return true;
} }

View File

@ -706,4 +706,20 @@ VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels(
return pixels; return pixels;
} }
bool VideoStreamEncoderResourceManager::IsSimulcast(
const VideoEncoderConfig& encoder_config) {
const std::vector<VideoStream>& simulcast_layers =
encoder_config.simulcast_layers;
bool is_simulcast = simulcast_layers.size() > 1;
bool is_lowest_layer_active = simulcast_layers[0].active;
int num_active_layers =
std::count_if(simulcast_layers.begin(), simulcast_layers.end(),
[](const VideoStream& layer) { return layer.active; });
// We can't distinguish between simulcast and singlecast when only the
// lowest spatial layer is active. Treat this case as simulcast.
return is_simulcast && (num_active_layers > 1 || is_lowest_layer_active);
}
} // namespace webrtc } // namespace webrtc

View File

@ -148,6 +148,7 @@ class VideoStreamEncoderResourceManager
static absl::optional<uint32_t> GetSingleActiveLayerPixels( static absl::optional<uint32_t> GetSingleActiveLayerPixels(
const VideoCodec& codec); const VideoCodec& codec);
static bool IsSimulcast(const VideoEncoderConfig& encoder_config);
private: private:
class InitialFrameDropper; class InitialFrameDropper;

View File

@ -348,6 +348,41 @@ int NumActiveStreams(const std::vector<VideoStream>& streams) {
return num_active; return num_active;
} }
void ApplyVp9BitrateLimits(const VideoEncoder::EncoderInfo& encoder_info,
const VideoEncoderConfig& encoder_config,
VideoCodec* codec) {
if (codec->codecType != VideoCodecType::kVideoCodecVP9 ||
VideoStreamEncoderResourceManager::IsSimulcast(encoder_config)) {
// Resolution bitrate limits usage is restricted to singlecast.
return;
}
// Get bitrate limits for active stream.
absl::optional<uint32_t> pixels =
VideoStreamEncoderResourceManager::GetSingleActiveLayerPixels(*codec);
if (!pixels.has_value()) {
return;
}
absl::optional<VideoEncoder::ResolutionBitrateLimits> bitrate_limits =
encoder_info.GetEncoderBitrateLimitsForResolution(*pixels);
if (!bitrate_limits.has_value()) {
return;
}
for (int i = 0; i < codec->VP9()->numberOfSpatialLayers; ++i) {
if (codec->spatialLayers[i].active) {
codec->spatialLayers[i].minBitrate =
bitrate_limits->min_bitrate_bps / 1000;
codec->spatialLayers[i].maxBitrate =
bitrate_limits->max_bitrate_bps / 1000;
codec->spatialLayers[i].targetBitrate =
std::min(codec->spatialLayers[i].targetBitrate,
codec->spatialLayers[i].maxBitrate);
break;
}
}
}
void ApplyEncoderBitrateLimitsIfSingleActiveStream( void ApplyEncoderBitrateLimitsIfSingleActiveStream(
const VideoEncoder::EncoderInfo& encoder_info, const VideoEncoder::EncoderInfo& encoder_info,
const std::vector<VideoStream>& encoder_config_layers, const std::vector<VideoStream>& encoder_config_layers,
@ -901,6 +936,10 @@ void VideoStreamEncoder::ReconfigureEncoder() {
// thus some cropping might be needed. // thus some cropping might be needed.
crop_width_ = last_frame_info_->width - codec.width; crop_width_ = last_frame_info_->width - codec.width;
crop_height_ = last_frame_info_->height - codec.height; crop_height_ = last_frame_info_->height - codec.height;
if (encoder_bitrate_limits_) {
ApplyVp9BitrateLimits(encoder_->GetEncoderInfo(), encoder_config_,
&codec);
}
} }
char log_stream_buf[4 * 1024]; char log_stream_buf[4 * 1024];

View File

@ -5356,6 +5356,123 @@ TEST_F(VideoStreamEncoderTest, InitialFrameDropActivatesWhenSVCLayersChange) {
video_stream_encoder_->Stop(); video_stream_encoder_->Stop();
} }
TEST_F(VideoStreamEncoderTest,
EncoderMaxAndMinBitratesUsedIfMiddleStreamActive) {
const VideoEncoder::ResolutionBitrateLimits kEncoderLimits270p(
480 * 270, 34 * 1000, 12 * 1000, 1234 * 1000);
const VideoEncoder::ResolutionBitrateLimits kEncoderLimits360p(
640 * 360, 43 * 1000, 21 * 1000, 2345 * 1000);
const VideoEncoder::ResolutionBitrateLimits kEncoderLimits720p(
1280 * 720, 54 * 1000, 31 * 1000, 2500 * 1000);
fake_encoder_.SetResolutionBitrateLimits(
{kEncoderLimits270p, kEncoderLimits360p, kEncoderLimits720p});
VideoEncoderConfig video_encoder_config;
test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1,
&video_encoder_config);
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.numberOfSpatialLayers = 3;
// Since only one layer is active - automatic resize should be enabled.
vp9_settings.automaticResizeOn = true;
video_encoder_config.encoder_specific_settings =
new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
video_encoder_config.content_type =
VideoEncoderConfig::ContentType::kRealtimeVideo;
// Simulcast layers are used to indicate which spatial layers are active.
video_encoder_config.simulcast_layers.resize(3);
video_encoder_config.simulcast_layers[0].active = false;
video_encoder_config.simulcast_layers[1].active = true;
video_encoder_config.simulcast_layers[2].active = false;
video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
kMaxPayloadLength);
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
// The encoder bitrate limits for 360p should be used.
video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
EXPECT_FALSE(WaitForFrame(1000));
EXPECT_EQ(fake_encoder_.video_codec().numberOfSimulcastStreams, 1);
EXPECT_EQ(fake_encoder_.video_codec().codecType,
VideoCodecType::kVideoCodecVP9);
EXPECT_EQ(fake_encoder_.video_codec().VP9()->numberOfSpatialLayers, 2);
EXPECT_TRUE(fake_encoder_.video_codec().spatialLayers[0].active);
EXPECT_EQ(640, fake_encoder_.video_codec().spatialLayers[0].width);
EXPECT_EQ(360, fake_encoder_.video_codec().spatialLayers[0].height);
EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.min_bitrate_bps),
fake_encoder_.video_codec().spatialLayers[0].minBitrate * 1000);
EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits360p.max_bitrate_bps),
fake_encoder_.video_codec().spatialLayers[0].maxBitrate * 1000);
// The encoder bitrate limits for 270p should be used.
video_source_.IncomingCapturedFrame(CreateFrame(2, 960, 540));
EXPECT_FALSE(WaitForFrame(1000));
EXPECT_EQ(fake_encoder_.video_codec().numberOfSimulcastStreams, 1);
EXPECT_EQ(fake_encoder_.video_codec().codecType,
VideoCodecType::kVideoCodecVP9);
EXPECT_EQ(fake_encoder_.video_codec().VP9()->numberOfSpatialLayers, 2);
EXPECT_TRUE(fake_encoder_.video_codec().spatialLayers[0].active);
EXPECT_EQ(480, fake_encoder_.video_codec().spatialLayers[0].width);
EXPECT_EQ(270, fake_encoder_.video_codec().spatialLayers[0].height);
EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.min_bitrate_bps),
fake_encoder_.video_codec().spatialLayers[0].minBitrate * 1000);
EXPECT_EQ(static_cast<uint32_t>(kEncoderLimits270p.max_bitrate_bps),
fake_encoder_.video_codec().spatialLayers[0].maxBitrate * 1000);
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest,
EncoderMaxAndMinBitratesNotUsedIfLowestStreamActive) {
const VideoEncoder::ResolutionBitrateLimits kEncoderLimits180p(
320 * 180, 34 * 1000, 12 * 1000, 1234 * 1000);
const VideoEncoder::ResolutionBitrateLimits kEncoderLimits720p(
1280 * 720, 54 * 1000, 31 * 1000, 2500 * 1000);
fake_encoder_.SetResolutionBitrateLimits(
{kEncoderLimits180p, kEncoderLimits720p});
VideoEncoderConfig video_encoder_config;
test::FillEncoderConfiguration(PayloadStringToCodecType("VP9"), 1,
&video_encoder_config);
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.numberOfSpatialLayers = 3;
// Since only one layer is active - automatic resize should be enabled.
vp9_settings.automaticResizeOn = true;
video_encoder_config.encoder_specific_settings =
new rtc::RefCountedObject<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
vp9_settings);
video_encoder_config.max_bitrate_bps = kSimulcastTargetBitrateBps;
video_encoder_config.content_type =
VideoEncoderConfig::ContentType::kRealtimeVideo;
// Simulcast layers are used to indicate which spatial layers are active.
video_encoder_config.simulcast_layers.resize(3);
video_encoder_config.simulcast_layers[0].active = true;
video_encoder_config.simulcast_layers[1].active = false;
video_encoder_config.simulcast_layers[2].active = false;
video_stream_encoder_->ConfigureEncoder(video_encoder_config.Copy(),
kMaxPayloadLength);
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
// Limits not applied on lowest stream, limits for 180p should not be used.
video_source_.IncomingCapturedFrame(CreateFrame(1, 1280, 720));
EXPECT_FALSE(WaitForFrame(1000));
EXPECT_EQ(fake_encoder_.video_codec().numberOfSimulcastStreams, 1);
EXPECT_EQ(fake_encoder_.video_codec().codecType,
VideoCodecType::kVideoCodecVP9);
EXPECT_EQ(fake_encoder_.video_codec().VP9()->numberOfSpatialLayers, 3);
EXPECT_TRUE(fake_encoder_.video_codec().spatialLayers[0].active);
EXPECT_EQ(320, fake_encoder_.video_codec().spatialLayers[0].width);
EXPECT_EQ(180, fake_encoder_.video_codec().spatialLayers[0].height);
EXPECT_NE(static_cast<uint32_t>(kEncoderLimits180p.min_bitrate_bps),
fake_encoder_.video_codec().spatialLayers[0].minBitrate * 1000);
EXPECT_NE(static_cast<uint32_t>(kEncoderLimits180p.max_bitrate_bps),
fake_encoder_.video_codec().spatialLayers[0].maxBitrate * 1000);
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, TEST_F(VideoStreamEncoderTest,
InitialFrameDropActivatesWhenResolutionIncreases) { InitialFrameDropActivatesWhenResolutionIncreases) {
const int kWidth = 640; const int kWidth = 640;