Fix perf regression in screenshare temporal layer bitrate allocation

A recent cl (https://codereview.webrtc.org/2510583002) introduced an
issue where temporal layers may return incorrect bitrates, given that
they are stateful and that the GetPreferredBitrateBps is called.
The fix is to use a temporary simulcast rate allocator instance, without
temporal layers, and get the preferred bitrate from that.

Additionally, some regression in bitrate allocated stems from overly
often reconfiguring the encoder, which yields suboptimal rate control.
The fix here is to limit encoder updates to when values have actually
changed.

As a bonus, dchecks added by this cl found a bug in the (unused) RealtimeTemporalLayers implementation. Fixed that as well.

BUG=webrtc:6301, chromium:666654

Review-Url: https://codereview.webrtc.org/2529073003
Cr-Commit-Position: refs/heads/master@{#15250}
This commit is contained in:
sprang
2016-11-25 08:09:43 -08:00
committed by Commit bot
parent fd34d30579
commit c7805dbd0e
6 changed files with 65 additions and 25 deletions

View File

@ -179,11 +179,11 @@ class RealTimeTemporalLayers : public TemporalLayers {
uint32_t layer_bitrate = bitrates[i];
RTC_DCHECK_LE(sum, bitrates[i]);
bitrates[i] -= sum;
sum += layer_bitrate;
sum = layer_bitrate;
if (sum == static_cast<uint32_t>(bitrate_kbps)) {
if (sum >= static_cast<uint32_t>(bitrate_kbps)) {
// Sum adds up; any subsequent layers will be 0.
bitrates.resize(i);
bitrates.resize(i + 1);
break;
}
}

View File

@ -162,10 +162,13 @@ int ScreenshareLayers::EncodeFlags(uint32_t timestamp) {
std::vector<uint32_t> ScreenshareLayers::OnRatesUpdated(int bitrate_kbps,
int max_bitrate_kbps,
int framerate) {
bitrate_updated_ =
bitrate_kbps != static_cast<int>(layers_[0].target_rate_kbps_) ||
max_bitrate_kbps != static_cast<int>(layers_[1].target_rate_kbps_) ||
framerate != framerate_;
layers_[0].target_rate_kbps_ = bitrate_kbps;
layers_[1].target_rate_kbps_ = max_bitrate_kbps;
framerate_ = framerate;
bitrate_updated_ = true;
std::vector<uint32_t> allocation;
allocation.push_back(bitrate_kbps);
@ -262,23 +265,27 @@ bool ScreenshareLayers::TimeToSync(int64_t timestamp) const {
return false;
}
uint32_t ScreenshareLayers::GetCodecTargetBitrateKbps() const {
uint32_t target_bitrate_kbps = layers_[0].target_rate_kbps_;
if (number_of_temporal_layers_ > 1) {
// Calculate a codec target bitrate. This may be higher than TL0, gaining
// quality at the expense of frame rate at TL0. Constraints:
// - TL0 frame rate no less than framerate / kMaxTL0FpsReduction.
// - Target rate * kAcceptableTargetOvershoot should not exceed TL1 rate.
target_bitrate_kbps =
std::min(layers_[0].target_rate_kbps_ * kMaxTL0FpsReduction,
layers_[1].target_rate_kbps_ / kAcceptableTargetOvershoot);
}
return std::max(layers_[0].target_rate_kbps_, target_bitrate_kbps);
}
bool ScreenshareLayers::UpdateConfiguration(vpx_codec_enc_cfg_t* cfg) {
bool cfg_updated = false;
if (bitrate_updated_) {
uint32_t target_bitrate_kbps = layers_[0].target_rate_kbps_;
if (number_of_temporal_layers_ > 1) {
// Calculate a codec target bitrate. This may be higher than TL0, gaining
// quality at the expense of frame rate at TL0. Constraints:
// - TL0 frame rate no less than framerate / kMaxTL0FpsReduction.
// - Target rate * kAcceptableTargetOvershoot should not exceed TL1 rate.
target_bitrate_kbps =
std::min(layers_[0].target_rate_kbps_ * kMaxTL0FpsReduction,
layers_[1].target_rate_kbps_ / kAcceptableTargetOvershoot);
cfg->rc_target_bitrate =
std::max(layers_[0].target_rate_kbps_, target_bitrate_kbps);
}
uint32_t target_bitrate_kbps = GetCodecTargetBitrateKbps();
if (bitrate_updated_ || cfg->rc_target_bitrate != target_bitrate_kbps) {
cfg->rc_target_bitrate = target_bitrate_kbps;
// Don't reconfigure qp limits during quality boost frames.
if (active_layer_ == -1 ||
@ -329,6 +336,7 @@ bool ScreenshareLayers::UpdateConfiguration(vpx_codec_enc_cfg_t* cfg) {
cfg->rc_max_quantizer = adjusted_max_qp;
cfg_updated = true;
return cfg_updated;
}

View File

@ -59,6 +59,7 @@ class ScreenshareLayers : public TemporalLayers {
private:
bool TimeToSync(int64_t timestamp) const;
uint32_t GetCodecTargetBitrateKbps() const;
Clock* const clock_;

View File

@ -418,6 +418,8 @@ TEST_F(ScreenshareLayerTest, EncoderDrop) {
ConfigureBitrates();
CodecSpecificInfoVP8 vp8_info;
vpx_codec_enc_cfg_t cfg = GetConfig();
// Updates cfg with current target bitrate.
EXPECT_TRUE(layers_->UpdateConfiguration(&cfg));
uint32_t timestamp = RunGracePeriod();
timestamp = SkipUntilTl(0, timestamp);

View File

@ -99,16 +99,18 @@ BitrateAllocation SimulcastRateAllocator::GetAllocation(
uint32_t target_bitrate_kbps =
allocated_bitrates_bps.GetBitrate(simulcast_id, 0) / 1000;
const uint32_t expected_allocated_bitrate_kbps = target_bitrate_kbps;
RTC_DCHECK_EQ(
target_bitrate_kbps,
allocated_bitrates_bps.GetSpatialLayerSum(simulcast_id) / 1000);
const int num_temporal_streams = std::max<uint8_t>(
1, codec_.numberOfSimulcastStreams == 0
? codec_.VP8().numberOfTemporalLayers
: codec_.simulcastStream[0].numberOfTemporalLayers);
uint32_t max_bitrate_kbps;
if (num_spatial_streams == 1) {
max_bitrate_kbps = codec_.maxBitrate;
const int num_temporal_streams =
codec_.numberOfSimulcastStreams == 0
? codec_.VP8().numberOfTemporalLayers
: codec_.simulcastStream[0].numberOfTemporalLayers;
// TODO(holmer): This is a temporary hack for screensharing, where we
// interpret the startBitrate as the encoder target bitrate. This is
@ -127,19 +129,28 @@ BitrateAllocation SimulcastRateAllocator::GetAllocation(
std::vector<uint32_t> tl_allocation = tl_it->second->OnRatesUpdated(
target_bitrate_kbps, max_bitrate_kbps, framerate);
RTC_DCHECK_GT(tl_allocation.size(), 0);
RTC_DCHECK_LE(tl_allocation.size(), num_temporal_streams);
uint64_t tl_allocation_sum_kbps = 0;
for (size_t tl_index = 0; tl_index < tl_allocation.size(); ++tl_index) {
uint32_t layer_rate_kbps = tl_allocation[tl_index];
allocated_bitrates_bps.SetBitrate(simulcast_id, tl_index,
tl_allocation[tl_index] * 1000);
layer_rate_kbps * 1000);
tl_allocation_sum_kbps += layer_rate_kbps;
}
RTC_DCHECK_LE(tl_allocation_sum_kbps, expected_allocated_bitrate_kbps);
}
return allocated_bitrates_bps;
}
uint32_t SimulcastRateAllocator::GetPreferredBitrateBps(uint32_t framerate) {
// Create a temporary instance without temporal layers, as they may be
// stateful, and updating the bitrate to max here can cause side effects.
SimulcastRateAllocator temp_allocator(codec_, nullptr);
BitrateAllocation allocation =
GetAllocation(codec_.maxBitrate * 1000, framerate);
temp_allocator.GetAllocation(codec_.maxBitrate * 1000, framerate);
return allocation.get_sum_bps();
}

View File

@ -15,14 +15,28 @@
#include <utility>
#include <vector>
#include "webrtc/test/gmock.h"
#include "webrtc/test/gtest.h"
namespace webrtc {
namespace {
using ::testing::_;
constexpr uint32_t kMinBitrateKbps = 50;
constexpr uint32_t kTargetBitrateKbps = 100;
constexpr uint32_t kMaxBitrateKbps = 1000;
constexpr uint32_t kFramerateFps = 5;
class MockTemporalLayers : public TemporalLayers {
public:
MOCK_METHOD1(EncodeFlags, int(uint32_t));
MOCK_METHOD3(OnRatesUpdated, std::vector<uint32_t>(int, int, int));
MOCK_METHOD1(UpdateConfiguration, bool(vpx_codec_enc_cfg_t*));
MOCK_METHOD3(PopulateCodecSpecific,
void(bool, CodecSpecificInfoVP8*, uint32_t));
MOCK_METHOD3(FrameEncoded, void(unsigned int, uint32_t, int));
MOCK_CONST_METHOD0(CurrentLayerId, int());
};
} // namespace
class SimulcastRateAllocatorTest : public ::testing::TestWithParam<bool> {
@ -251,6 +265,10 @@ TEST_F(SimulcastRateAllocatorTest, OneToThreeStreams) {
}
TEST_F(SimulcastRateAllocatorTest, GetPreferredBitrateBps) {
MockTemporalLayers mock_layers;
allocator_.reset(new SimulcastRateAllocator(codec_, nullptr));
allocator_->OnTemporalLayersCreated(0, &mock_layers);
EXPECT_CALL(mock_layers, OnRatesUpdated(_, _, _)).Times(0);
EXPECT_EQ(codec_.maxBitrate * 1000,
allocator_->GetPreferredBitrateBps(codec_.maxFramerate));
}