Experimental improvements for simulcast screenshare

* Make shorter 4-frame pattern default if 2 temporal layers are used.
* Make DefaultTemporalLayers usable by upper simulcast stream with 2tl.
* If experimental settings are enable, bump the max bitrate for the top
  stream. Since we're now using probing everywhere the rampup should be
  less of an issue.
* Additionally, fixes an issue in full stack tests, where
  ScopedFieldTrials in an experiment would override the
  --force_fieldtrials specified at command line. Some trials added by
  the test bots caused timeouts without this.

Bug: webrtc:9477
Change-Id: I42410605d416b51c4fbfe5b6b850997484af583c
Reviewed-on: https://webrtc-review.googlesource.com/92883
Reviewed-by: Sergey Silkin <ssilkin@webrtc.org>
Commit-Queue: Erik Språng <sprang@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24252}
This commit is contained in:
Erik Språng
2018-08-09 16:12:54 +02:00
committed by Commit Bot
parent d2b9740f48
commit b6b1cacd09
7 changed files with 62 additions and 27 deletions

View File

@ -24,9 +24,15 @@ namespace cricket {
namespace {
// Limits for legacy conference screensharing mode. Currently used for the
// lower of the two simulcast streams.
constexpr int kScreenshareDefaultTl0BitrateKbps = 200;
constexpr int kScreenshareDefaultTl1BitrateKbps = 1000;
// Max bitrate for the higher one of the two simulcast stream used for screen
// content.
constexpr int kScreenshareHighStreamMaxBitrateBps = 1600000;
} // namespace
struct SimulcastFormat {
@ -311,21 +317,38 @@ std::vector<webrtc::VideoStream> GetScreenshareLayers(
// restrictions. The base simulcast layer will still use legacy setup.
if (num_simulcast_layers == kMaxScreenshareSimulcastLayers) {
// Add optional upper simulcast layer.
const int num_temporal_layers = DefaultNumberOfTemporalLayers(1);
int max_bitrate_bps;
if (!temporal_layers_supported) {
// Set the max bitrate to where the base layer would have been if temporal
// layer were enabled.
max_bitrate_bps = static_cast<int>(
kScreenshareHighStreamMaxBitrateBps *
webrtc::SimulcastRateAllocator::GetTemporalRateAllocation(
num_temporal_layers, 0));
} else if (DefaultNumberOfTemporalLayers(1) != 3 ||
webrtc::field_trial::IsEnabled("WebRTC-UseShortVP8TL3Pattern")) {
// Experimental temporal layer mode used, use increased max bitrate.
max_bitrate_bps = kScreenshareHighStreamMaxBitrateBps;
} else {
// Keep current bitrates with default 3tl/8 frame settings.
// Lowest temporal layers of a 3 layer setup will have 40% of the total
// bitrate allocation for that simulcast layer. Make sure the gap between
// the target of the lower simulcast layer and first temporal layer of the
// higher one is at most 2x the bitrate, so that upswitching is not hampered
// by stalled bitrate estimates.
int max_bitrate_bps = 2 * ((layers[0].target_bitrate_bps * 10) / 4);
// higher one is at most 2x the bitrate, so that upswitching is not
// hampered by stalled bitrate estimates.
max_bitrate_bps = 2 * ((layers[0].target_bitrate_bps * 10) / 4);
}
// Cap max bitrate so it isn't overly high for the given resolution.
max_bitrate_bps = std::min<int>(max_bitrate_bps,
FindSimulcastMaxBitrateBps(width, height));
layers[1].width = width;
layers[1].height = height;
layers[1].max_qp = max_qp;
layers[1].max_framerate = max_framerate;
layers[1].num_temporal_layers = 3;
layers[1].num_temporal_layers =
temporal_layers_supported ? DefaultNumberOfTemporalLayers(1) : 0;
layers[1].min_bitrate_bps = layers[0].target_bitrate_bps * 2;
layers[1].target_bitrate_bps = max_bitrate_bps;
layers[1].max_bitrate_bps = max_bitrate_bps;

View File

@ -93,7 +93,7 @@ std::vector<bool> GetTemporalLayerSync(size_t num_layers) {
case 1:
return {false};
case 2:
if (field_trial::IsEnabled("WebRTC-UseShortVP8TL2Pattern")) {
if (!field_trial::IsDisabled("WebRTC-UseShortVP8TL2Pattern")) {
return {false, true, false, false};
} else {
return {false, true, false, false, false, false, false, false};
@ -139,7 +139,7 @@ std::vector<TemporalLayers::FrameConfig> GetTemporalPattern(size_t num_layers) {
// that the 'alt' buffer reference is effectively the last keyframe.
// TL0 also references and updates the 'last' buffer.
// TL1 also references 'last' and references and updates 'golden'.
if (field_trial::IsEnabled("WebRTC-UseShortVP8TL2Pattern")) {
if (!field_trial::IsDisabled("WebRTC-UseShortVP8TL2Pattern")) {
// Shortened 4-frame pattern:
// 1---1 1---1 ...
// / / / /

View File

@ -81,26 +81,26 @@ TEST(TemporalLayersTest, 2Layers) {
kTemporalUpdateLastRefAltRef,
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
kTemporalUpdateLastRefAltRef,
kTemporalUpdateGoldenRefAltRef,
kTemporalUpdateNone,
kTemporalUpdateLastRefAltRef,
kTemporalUpdateGoldenRefAltRef,
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
kTemporalUpdateLastRefAltRef,
kTemporalUpdateNone,
kTemporalUpdateLastRefAltRef,
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
kTemporalUpdateLastRefAltRef,
kTemporalUpdateGoldenRefAltRef,
kTemporalUpdateNone,
kTemporalUpdateLastRefAltRef,
kTemporalUpdateGoldenRefAltRef,
kTemporalUpdateGoldenWithoutDependencyRefAltRef,
kTemporalUpdateLastRefAltRef,
kTemporalUpdateNone,
};
int expected_temporal_idx[16] = {0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 1, 0, 1};
bool expected_layer_sync[16] = {false, true, false, false, false, false,
bool expected_layer_sync[16] = {false, true, false, false, false, true,
false, false, false, true, false, false,
false, false, false, false};
false, true, false, false};
uint32_t timestamp = 0;
for (int i = 0; i < 16; ++i) {

View File

@ -39,10 +39,15 @@ uint8_t NumTemporalLayers(const VideoCodec& codec, int spatial_id) {
}
bool IsConferenceModeScreenshare(const VideoCodec& codec) {
if (codec.mode != VideoCodecMode::kScreensharing) {
if (codec.mode != VideoCodecMode::kScreensharing ||
NumTemporalLayers(codec, 0) != 2) {
return false;
}
return NumTemporalLayers(codec, 0) == 2;
// Fixed default bitrates for legacy screenshare layers mode.
return (codec.numberOfSimulcastStreams == 0 && codec.maxBitrate == 1000) ||
(codec.numberOfSimulcastStreams >= 1 &&
codec.simulcastStream[0].maxBitrate == 1000 &&
codec.simulcastStream[0].targetBitrate == 200);
}
} // namespace

View File

@ -778,7 +778,7 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() {
input_frame_->set_timestamp(input_frame_->timestamp() + 3000);
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, NULL));
SetExpectedValues3<int>(2, 1, 255, expected_temporal_idx);
SetExpectedValues3<bool>(false, false, false, expected_layer_sync);
SetExpectedValues3<bool>(false, true, false, expected_layer_sync);
VerifyTemporalIdxAndSyncForAllSpatialLayers(
&encoder_callback, expected_temporal_idx, expected_layer_sync, 3);
}

View File

@ -249,6 +249,7 @@ if (rtc_include_tests) {
"../modules/video_coding:webrtc_vp9",
"../rtc_base:rtc_base_approved",
"../rtc_base/experiments:alr_experiment",
"../system_wrappers:field_trial_default",
"../test:field_trial",
"../test:test_common",
"../test:test_support",

View File

@ -13,6 +13,7 @@
#include "modules/video_coding/codecs/vp9/include/vp9.h"
#include "rtc_base/experiments/alr_experiment.h"
#include "rtc_base/flags.h"
#include "system_wrappers/include/field_trial_default.h"
#include "test/field_trial.h"
#include "test/gtest.h"
#include "video/video_quality_test.h"
@ -66,6 +67,10 @@ CreateVideoQualityTestFixture() {
return absl::make_unique<VideoQualityTest>(nullptr);
}
// Takes the current active field trials set, and appends some new trials.
std::string AppendFieldTrials(std::string new_trial_string) {
return std::string(field_trial::GetFieldTrialString()) + new_trial_string;
}
} // namespace
// VideoQualityTest::Params params = {
@ -333,7 +338,7 @@ TEST(FullStackTest, ForemanCifPlr5H264) {
TEST(FullStackTest, ForemanCifPlr5H264SpsPpsIdrIsKeyframe) {
auto fixture = CreateVideoQualityTestFixture();
test::ScopedFieldTrials override_field_trials(
"WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/");
AppendFieldTrials("WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/"));
ParamsWithLogging foreman_cif;
foreman_cif.call.send_side_bwe = true;
@ -556,7 +561,8 @@ TEST(FullStackTest, ConferenceMotionHd4TLModerateLimits) {
TEST(FullStackTest, ConferenceMotionHd3TLModerateLimitsAltTLPattern) {
auto fixture = CreateVideoQualityTestFixture();
test::ScopedFieldTrials field_trial("WebRTC-UseShortVP8TL3Pattern/Enabled/");
test::ScopedFieldTrials field_trial(
AppendFieldTrials("WebRTC-UseShortVP8TL3Pattern/Enabled/"));
ParamsWithLogging conf_motion_hd;
conf_motion_hd.call.send_side_bwe = true;
conf_motion_hd.video[0] = {
@ -873,8 +879,8 @@ TEST(FullStackTest, MAYBE_SimulcastFullHdOveruse) {
simulcast.ss[0] = {
streams, 2, 1, 0, InterLayerPredMode::kOn, std::vector<SpatialLayer>(),
true};
webrtc::test::ScopedFieldTrials override_trials(
"WebRTC-ForceSimulatedOveruseIntervalMs/1000-50000-300/");
webrtc::test::ScopedFieldTrials override_trials(AppendFieldTrials(
"WebRTC-ForceSimulatedOveruseIntervalMs/1000-50000-300/"));
fixture->RunWithAnalyzer(simulcast);
}
@ -1054,8 +1060,8 @@ class DualStreamsTest : public ::testing::TestWithParam<int> {};
TEST_P(DualStreamsTest,
ModeratelyRestricted_SlidesVp8_3TL_Simulcast_Video_Simulcast_High) {
test::ScopedFieldTrials field_trial(
std::string(kRoundRobinPacingQueueExperiment) +
std::string(kPacerPushBackExperiment));
AppendFieldTrials(std::string(kRoundRobinPacingQueueExperiment) +
std::string(kPacerPushBackExperiment)));
const int first_stream = GetParam();
ParamsWithLogging dual_streams;
@ -1118,8 +1124,8 @@ TEST_P(DualStreamsTest,
TEST_P(DualStreamsTest, Conference_Restricted) {
test::ScopedFieldTrials field_trial(
std::string(kRoundRobinPacingQueueExperiment) +
std::string(kPacerPushBackExperiment));
AppendFieldTrials(std::string(kRoundRobinPacingQueueExperiment) +
std::string(kPacerPushBackExperiment)));
const int first_stream = GetParam();
ParamsWithLogging dual_streams;