Fix perf regression in screenshare temporal layer bitrate allocation
A recent cl (https://codereview.webrtc.org/2510583002) introduced an issue where temporal layers may return incorrect bitrates, given that they are stateful and that the GetPreferredBitrateBps is called. The fix is to use a temporary simulcast rate allocator instance, without temporal layers, and get the preferred bitrate from that. Additionally, some regression in bitrate allocated stems from overly often reconfiguring the encoder, which yields suboptimal rate control. The fix here is to limit encoder updates to when values have actually changed. As a bonus, dchecks added by this cl found a bug in the (unused) RealtimeTemporalLayers implementation. Fixed that as well. BUG=webrtc:6301, chromium:666654 Review-Url: https://codereview.webrtc.org/2529073003 Cr-Commit-Position: refs/heads/master@{#15250}
This commit is contained in:
@ -179,11 +179,11 @@ class RealTimeTemporalLayers : public TemporalLayers {
|
||||
uint32_t layer_bitrate = bitrates[i];
|
||||
RTC_DCHECK_LE(sum, bitrates[i]);
|
||||
bitrates[i] -= sum;
|
||||
sum += layer_bitrate;
|
||||
sum = layer_bitrate;
|
||||
|
||||
if (sum == static_cast<uint32_t>(bitrate_kbps)) {
|
||||
if (sum >= static_cast<uint32_t>(bitrate_kbps)) {
|
||||
// Sum adds up; any subsequent layers will be 0.
|
||||
bitrates.resize(i);
|
||||
bitrates.resize(i + 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@ -162,10 +162,13 @@ int ScreenshareLayers::EncodeFlags(uint32_t timestamp) {
|
||||
std::vector<uint32_t> ScreenshareLayers::OnRatesUpdated(int bitrate_kbps,
|
||||
int max_bitrate_kbps,
|
||||
int framerate) {
|
||||
bitrate_updated_ =
|
||||
bitrate_kbps != static_cast<int>(layers_[0].target_rate_kbps_) ||
|
||||
max_bitrate_kbps != static_cast<int>(layers_[1].target_rate_kbps_) ||
|
||||
framerate != framerate_;
|
||||
layers_[0].target_rate_kbps_ = bitrate_kbps;
|
||||
layers_[1].target_rate_kbps_ = max_bitrate_kbps;
|
||||
framerate_ = framerate;
|
||||
bitrate_updated_ = true;
|
||||
|
||||
std::vector<uint32_t> allocation;
|
||||
allocation.push_back(bitrate_kbps);
|
||||
@ -262,23 +265,27 @@ bool ScreenshareLayers::TimeToSync(int64_t timestamp) const {
|
||||
return false;
|
||||
}
|
||||
|
||||
uint32_t ScreenshareLayers::GetCodecTargetBitrateKbps() const {
|
||||
uint32_t target_bitrate_kbps = layers_[0].target_rate_kbps_;
|
||||
|
||||
if (number_of_temporal_layers_ > 1) {
|
||||
// Calculate a codec target bitrate. This may be higher than TL0, gaining
|
||||
// quality at the expense of frame rate at TL0. Constraints:
|
||||
// - TL0 frame rate no less than framerate / kMaxTL0FpsReduction.
|
||||
// - Target rate * kAcceptableTargetOvershoot should not exceed TL1 rate.
|
||||
target_bitrate_kbps =
|
||||
std::min(layers_[0].target_rate_kbps_ * kMaxTL0FpsReduction,
|
||||
layers_[1].target_rate_kbps_ / kAcceptableTargetOvershoot);
|
||||
}
|
||||
|
||||
return std::max(layers_[0].target_rate_kbps_, target_bitrate_kbps);
|
||||
}
|
||||
|
||||
bool ScreenshareLayers::UpdateConfiguration(vpx_codec_enc_cfg_t* cfg) {
|
||||
bool cfg_updated = false;
|
||||
if (bitrate_updated_) {
|
||||
uint32_t target_bitrate_kbps = layers_[0].target_rate_kbps_;
|
||||
|
||||
if (number_of_temporal_layers_ > 1) {
|
||||
// Calculate a codec target bitrate. This may be higher than TL0, gaining
|
||||
// quality at the expense of frame rate at TL0. Constraints:
|
||||
// - TL0 frame rate no less than framerate / kMaxTL0FpsReduction.
|
||||
// - Target rate * kAcceptableTargetOvershoot should not exceed TL1 rate.
|
||||
target_bitrate_kbps =
|
||||
std::min(layers_[0].target_rate_kbps_ * kMaxTL0FpsReduction,
|
||||
layers_[1].target_rate_kbps_ / kAcceptableTargetOvershoot);
|
||||
|
||||
cfg->rc_target_bitrate =
|
||||
std::max(layers_[0].target_rate_kbps_, target_bitrate_kbps);
|
||||
}
|
||||
uint32_t target_bitrate_kbps = GetCodecTargetBitrateKbps();
|
||||
if (bitrate_updated_ || cfg->rc_target_bitrate != target_bitrate_kbps) {
|
||||
cfg->rc_target_bitrate = target_bitrate_kbps;
|
||||
|
||||
// Don't reconfigure qp limits during quality boost frames.
|
||||
if (active_layer_ == -1 ||
|
||||
@ -329,6 +336,7 @@ bool ScreenshareLayers::UpdateConfiguration(vpx_codec_enc_cfg_t* cfg) {
|
||||
|
||||
cfg->rc_max_quantizer = adjusted_max_qp;
|
||||
cfg_updated = true;
|
||||
|
||||
return cfg_updated;
|
||||
}
|
||||
|
||||
|
||||
@ -59,6 +59,7 @@ class ScreenshareLayers : public TemporalLayers {
|
||||
|
||||
private:
|
||||
bool TimeToSync(int64_t timestamp) const;
|
||||
uint32_t GetCodecTargetBitrateKbps() const;
|
||||
|
||||
Clock* const clock_;
|
||||
|
||||
|
||||
@ -418,6 +418,8 @@ TEST_F(ScreenshareLayerTest, EncoderDrop) {
|
||||
ConfigureBitrates();
|
||||
CodecSpecificInfoVP8 vp8_info;
|
||||
vpx_codec_enc_cfg_t cfg = GetConfig();
|
||||
// Updates cfg with current target bitrate.
|
||||
EXPECT_TRUE(layers_->UpdateConfiguration(&cfg));
|
||||
|
||||
uint32_t timestamp = RunGracePeriod();
|
||||
timestamp = SkipUntilTl(0, timestamp);
|
||||
|
||||
Reference in New Issue
Block a user