Throttle frame-rate In VP8 encoder in steady state for screenshare

If minQP is reached and encoder undershoot consistently, we consider the
quality good enough and throttle encode frame rate.

Bug: webrtc:10310
Change-Id: Ifd07280040dd67ef6e544efdd4619d47bff951e8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/125461
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#27003}
This commit is contained in:
Ilya Nikolaevskiy
2019-03-06 16:40:42 +01:00
committed by Commit Bot
parent 2ecc8c8be2
commit 7b41225156
4 changed files with 156 additions and 23 deletions

View File

@ -32,6 +32,7 @@
#include "modules/video_coding/utility/simulcast_utility.h" #include "modules/video_coding/utility/simulcast_utility.h"
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/experiments/field_trial_parser.h"
#include "rtc_base/experiments/field_trial_units.h"
#include "rtc_base/trace_event.h" #include "rtc_base/trace_event.h"
#include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/field_trial.h"
#include "third_party/libyuv/include/libyuv/scale.h" #include "third_party/libyuv/include/libyuv/scale.h"
@ -53,6 +54,9 @@ constexpr int kHighVp8QpThreshold = 95;
constexpr int kTokenPartitions = VP8_ONE_TOKENPARTITION; constexpr int kTokenPartitions = VP8_ONE_TOKENPARTITION;
constexpr uint32_t kVp832ByteAlign = 32u; constexpr uint32_t kVp832ByteAlign = 32u;
constexpr int kRtpTicksPerSecond = 90000;
constexpr int kRtpTicksPerMs = kRtpTicksPerSecond / 1000;
// VP8 denoiser states. // VP8 denoiser states.
enum denoiserState : uint32_t { enum denoiserState : uint32_t {
kDenoiserOff, kDenoiserOff,
@ -173,7 +177,11 @@ LibvpxVp8Encoder::LibvpxVp8Encoder(std::unique_ptr<LibvpxInterface> interface)
cpu_speed_default_(-6), cpu_speed_default_(-6),
number_of_cores_(0), number_of_cores_(0),
rc_max_intra_target_(0), rc_max_intra_target_(0),
key_frame_request_(kMaxSimulcastStreams, false) { key_frame_request_(kMaxSimulcastStreams, false),
variable_framerate_experiment_(ParseVariableFramerateConfig(
"WebRTC-VP8VariableFramerateScreenshare")),
framerate_controller_(variable_framerate_experiment_.framerate_limit),
num_steady_state_frames_(0) {
temporal_layers_.reserve(kMaxSimulcastStreams); temporal_layers_.reserve(kMaxSimulcastStreams);
raw_images_.reserve(kMaxSimulcastStreams); raw_images_.reserve(kMaxSimulcastStreams);
encoded_images_.reserve(kMaxSimulcastStreams); encoded_images_.reserve(kMaxSimulcastStreams);
@ -392,7 +400,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
} }
// setting the time base of the codec // setting the time base of the codec
configurations_[0].g_timebase.num = 1; configurations_[0].g_timebase.num = 1;
configurations_[0].g_timebase.den = 90000; configurations_[0].g_timebase.den = kRtpTicksPerSecond;
configurations_[0].g_lag_in_frames = 0; // 0- no frame lagging configurations_[0].g_lag_in_frames = 0; // 0- no frame lagging
// Set the error resilience mode for temporal layers (but not simulcast). // Set the error resilience mode for temporal layers (but not simulcast).
@ -690,6 +698,39 @@ uint32_t LibvpxVp8Encoder::FrameDropThreshold(size_t spatial_idx) const {
return enable_frame_dropping ? 30 : 0; return enable_frame_dropping ? 30 : 0;
} }
size_t LibvpxVp8Encoder::SteadyStateSize(int sid, int tid) {
const int encoder_id = encoders_.size() - 1 - sid;
size_t bitrate_bps;
float fps;
if (SimulcastUtility::IsConferenceModeScreenshare(codec_) ||
configurations_[encoder_id].ts_number_layers <= 1) {
// In conference screenshare there's no defined per temporal layer bitrate
// and framerate.
bitrate_bps = configurations_[encoder_id].rc_target_bitrate * 1000;
fps = codec_.maxFramerate;
} else {
bitrate_bps = configurations_[encoder_id].ts_target_bitrate[tid] * 1000;
fps = codec_.maxFramerate /
fmax(configurations_[encoder_id].ts_rate_decimator[tid], 1.0);
if (tid > 0) {
// Layer bitrate and fps are counted as a partial sums.
bitrate_bps -=
configurations_[encoder_id].ts_target_bitrate[tid - 1] * 1000;
fps = codec_.maxFramerate /
fmax(configurations_[encoder_id].ts_rate_decimator[tid - 1], 1.0);
}
}
if (fps < 1e-9)
return 0;
return static_cast<size_t>(
bitrate_bps / (8 * fps) *
(100 -
variable_framerate_experiment_.steady_state_undershoot_percentage) /
100 +
0.5);
}
int LibvpxVp8Encoder::Encode(const VideoFrame& frame, int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
const CodecSpecificInfo* codec_specific_info, const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) { const std::vector<FrameType>* frame_types) {
@ -701,6 +742,33 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
if (encoded_complete_callback_ == NULL) if (encoded_complete_callback_ == NULL)
return WEBRTC_VIDEO_CODEC_UNINITIALIZED; return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
bool send_key_frame = false;
for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size();
++i) {
if (key_frame_request_[i] && send_stream_[i]) {
send_key_frame = true;
break;
}
}
if (!send_key_frame && frame_types) {
for (size_t i = 0; i < frame_types->size() && i < send_stream_.size();
++i) {
if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) {
send_key_frame = true;
break;
}
}
}
if (frame.update_rect().IsEmpty() && num_steady_state_frames_ >= 3 &&
!send_key_frame) {
if (variable_framerate_experiment_.enabled &&
framerate_controller_.DropFrame(frame.timestamp() / kRtpTicksPerMs)) {
return WEBRTC_VIDEO_CODEC_OK;
}
framerate_controller_.AddFrame(frame.timestamp() / kRtpTicksPerMs);
}
rtc::scoped_refptr<I420BufferInterface> input_image = rtc::scoped_refptr<I420BufferInterface> input_image =
frame.video_frame_buffer()->ToI420(); frame.video_frame_buffer()->ToI420();
// Since we are extracting raw pointers from |input_image| to // Since we are extracting raw pointers from |input_image| to
@ -736,23 +804,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w, raw_images_[i].stride[VPX_PLANE_V], raw_images_[i].d_w,
raw_images_[i].d_h, libyuv::kFilterBilinear); raw_images_[i].d_h, libyuv::kFilterBilinear);
} }
bool send_key_frame = false;
for (size_t i = 0; i < key_frame_request_.size() && i < send_stream_.size();
++i) {
if (key_frame_request_[i] && send_stream_[i]) {
send_key_frame = true;
break;
}
}
if (!send_key_frame && frame_types) {
for (size_t i = 0; i < frame_types->size() && i < send_stream_.size();
++i) {
if ((*frame_types)[i] == kVideoFrameKey && send_stream_[i]) {
send_key_frame = true;
break;
}
}
}
vpx_enc_frame_flags_t flags[kMaxSimulcastStreams]; vpx_enc_frame_flags_t flags[kMaxSimulcastStreams];
Vp8FrameConfig tl_configs[kMaxSimulcastStreams]; Vp8FrameConfig tl_configs[kMaxSimulcastStreams];
for (size_t i = 0; i < encoders_.size(); ++i) { for (size_t i = 0; i < encoders_.size(); ++i) {
@ -812,7 +864,7 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
// rate control seems to be off with that setup. Using the average input // rate control seems to be off with that setup. Using the average input
// frame rate to calculate an average duration for now. // frame rate to calculate an average duration for now.
assert(codec_.maxFramerate > 0); assert(codec_.maxFramerate > 0);
uint32_t duration = 90000 / codec_.maxFramerate; uint32_t duration = kRtpTicksPerSecond / codec_.maxFramerate;
int error = WEBRTC_VIDEO_CODEC_OK; int error = WEBRTC_VIDEO_CODEC_OK;
int num_tries = 0; int num_tries = 0;
@ -924,6 +976,14 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image) {
encoded_images_[encoder_idx].qp_ = qp_128; encoded_images_[encoder_idx].qp_ = qp_128;
encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx], encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
&codec_specific, nullptr); &codec_specific, nullptr);
const size_t steady_state_size = SteadyStateSize(
stream_idx, codec_specific.codecSpecific.VP8.temporalIdx);
if (qp_128 > variable_framerate_experiment_.steady_state_qp ||
encoded_images_[encoder_idx].size() > steady_state_size) {
num_steady_state_frames_ = 0;
} else {
++num_steady_state_frames_;
}
} else if (!temporal_layers_[stream_idx] } else if (!temporal_layers_[stream_idx]
->SupportsEncoderFrameDropping()) { ->SupportsEncoderFrameDropping()) {
result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT; result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
@ -989,4 +1049,22 @@ int LibvpxVp8Encoder::RegisterEncodeCompleteCallback(
return WEBRTC_VIDEO_CODEC_OK; return WEBRTC_VIDEO_CODEC_OK;
} }
// static
LibvpxVp8Encoder::VariableFramerateExperiment
LibvpxVp8Encoder::ParseVariableFramerateConfig(std::string group_name) {
FieldTrialFlag enabled = FieldTrialFlag("Enabled");
FieldTrialParameter<double> framerate_limit("min_fps", 5.0);
FieldTrialParameter<int> qp("min_qp", 15);
FieldTrialParameter<int> undershoot_percentage("undershoot", 30);
ParseFieldTrial({&enabled, &framerate_limit, &qp, &undershoot_percentage},
field_trial::FindFullName(group_name));
VariableFramerateExperiment config;
config.enabled = enabled.Get();
config.framerate_limit = framerate_limit.Get();
config.steady_state_qp = qp.Get();
config.steady_state_undershoot_percentage = undershoot_percentage.Get();
return config;
}
} // namespace webrtc } // namespace webrtc

View File

@ -12,6 +12,7 @@
#define MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_VP8_ENCODER_H_ #define MODULES_VIDEO_CODING_CODECS_VP8_LIBVPX_VP8_ENCODER_H_
#include <memory> #include <memory>
#include <string>
#include <vector> #include <vector>
#include "api/video/encoded_image.h" #include "api/video/encoded_image.h"
@ -23,6 +24,7 @@
#include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp8/libvpx_interface.h" #include "modules/video_coding/codecs/vp8/libvpx_interface.h"
#include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/utility/framerate_controller.h"
#include "rtc_base/experiments/cpu_speed_experiment.h" #include "rtc_base/experiments/cpu_speed_experiment.h"
#include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/experiments/rate_control_settings.h"
@ -83,6 +85,8 @@ class LibvpxVp8Encoder : public VideoEncoder {
uint32_t FrameDropThreshold(size_t spatial_idx) const; uint32_t FrameDropThreshold(size_t spatial_idx) const;
size_t SteadyStateSize(int sid, int tid);
const std::unique_ptr<LibvpxInterface> libvpx_; const std::unique_ptr<LibvpxInterface> libvpx_;
const absl::optional<std::vector<CpuSpeedExperiment::Config>> const absl::optional<std::vector<CpuSpeedExperiment::Config>>
@ -106,6 +110,22 @@ class LibvpxVp8Encoder : public VideoEncoder {
std::vector<vpx_codec_ctx_t> encoders_; std::vector<vpx_codec_ctx_t> encoders_;
std::vector<vpx_codec_enc_cfg_t> configurations_; std::vector<vpx_codec_enc_cfg_t> configurations_;
std::vector<vpx_rational_t> downsampling_factors_; std::vector<vpx_rational_t> downsampling_factors_;
// Variable frame-rate screencast related fields and methods.
const struct VariableFramerateExperiment {
bool enabled = false;
// Framerate is limited to this value in steady state.
float framerate_limit = 5.0;
// This qp or below is considered a steady state.
int steady_state_qp = 15;
// Frames of at least this percentage below ideal for configured bitrate are
// considered in a steady state.
int steady_state_undershoot_percentage = 30;
} variable_framerate_experiment_;
static VariableFramerateExperiment ParseVariableFramerateConfig(
std::string group_name);
FramerateController framerate_controller_;
int num_steady_state_frames_;
}; };
} // namespace webrtc } // namespace webrtc

View File

@ -86,6 +86,7 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
kDefaultOutlierFrameSizePercent}; kDefaultOutlierFrameSizePercent};
RTC_DCHECK_LE(streams.size(), kMaxSimulcastStreams); RTC_DCHECK_LE(streams.size(), kMaxSimulcastStreams);
int max_framerate = 0;
for (size_t i = 0; i < streams.size(); ++i) { for (size_t i = 0; i < streams.size(); ++i) {
SimulcastStream* sim_stream = &video_codec.simulcastStream[i]; SimulcastStream* sim_stream = &video_codec.simulcastStream[i];
RTC_DCHECK_GT(streams[i].width, 0); RTC_DCHECK_GT(streams[i].width, 0);
@ -105,6 +106,7 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
sim_stream->width = static_cast<uint16_t>(streams[i].width); sim_stream->width = static_cast<uint16_t>(streams[i].width);
sim_stream->height = static_cast<uint16_t>(streams[i].height); sim_stream->height = static_cast<uint16_t>(streams[i].height);
sim_stream->maxFramerate = streams[i].max_framerate; sim_stream->maxFramerate = streams[i].max_framerate;
max_framerate = std::max(max_framerate, streams[i].max_framerate);
sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000; sim_stream->minBitrate = streams[i].min_bitrate_bps / 1000;
sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000; sim_stream->targetBitrate = streams[i].target_bitrate_bps / 1000;
sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000; sim_stream->maxBitrate = streams[i].max_bitrate_bps / 1000;
@ -134,8 +136,8 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
if (video_codec.maxBitrate < kEncoderMinBitrateKbps) if (video_codec.maxBitrate < kEncoderMinBitrateKbps)
video_codec.maxBitrate = kEncoderMinBitrateKbps; video_codec.maxBitrate = kEncoderMinBitrateKbps;
RTC_DCHECK_GT(streams[0].max_framerate, 0); RTC_DCHECK_GT(max_framerate, 0);
video_codec.maxFramerate = streams[0].max_framerate; video_codec.maxFramerate = max_framerate;
// Set codec specific options // Set codec specific options
if (config.encoder_specific_settings) if (config.encoder_specific_settings)

View File

@ -741,9 +741,12 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL) {
// All the tests using this constant are disabled on Mac. // All the tests using this constant are disabled on Mac.
const char kScreenshareSimulcastExperiment[] = const char kScreenshareSimulcastExperiment[] =
"WebRTC-SimulcastScreenshare/Enabled/"; "WebRTC-SimulcastScreenshare/Enabled/";
// TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac. // TODO(bugs.webrtc.org/9840): Investigate why is this test flaky on Win/Mac.
#if !defined(WEBRTC_WIN) #if !defined(WEBRTC_WIN)
const char kScreenshareSimulcastVariableFramerateExperiment[] =
"WebRTC-SimulcastScreenshare/Enabled/"
"WebRTC-VP8VariableFramerateScreenshare/"
"Enabled,min_fps:5.0,min_qp:15,undershoot:30/";
TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) { TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) {
test::ScopedFieldTrials field_trial( test::ScopedFieldTrials field_trial(
AppendFieldTrials(kScreenshareSimulcastExperiment)); AppendFieldTrials(kScreenshareSimulcastExperiment));
@ -773,6 +776,36 @@ TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast) {
false}; false};
fixture->RunWithAnalyzer(screenshare); fixture->RunWithAnalyzer(screenshare);
} }
TEST(FullStackTest, ScreenshareSlidesVP8_2TL_Simulcast_Variable_Framerate) {
test::ScopedFieldTrials field_trial(
AppendFieldTrials(kScreenshareSimulcastVariableFramerateExperiment));
auto fixture = CreateVideoQualityTestFixture();
ParamsWithLogging screenshare;
screenshare.call.send_side_bwe = true;
screenshare.screenshare[0] = {true, false, 10};
screenshare.video[0] = {true, 1850, 1110, 30, 800000, 2500000,
2500000, false, "VP8", 2, 1, 400000,
false, false, false, ""};
screenshare.analyzer = {"screenshare_slides_simulcast_variable_framerate",
0.0, 0.0, kFullStackTestDurationSecs};
ParamsWithLogging screenshare_params_high;
screenshare_params_high.video[0] = {
true, 1850, 1110, 60, 600000, 1250000, 1250000, false,
"VP8", 2, 0, 400000, false, false, false, ""};
VideoQualityTest::Params screenshare_params_low;
screenshare_params_low.video[0] = {true, 1850, 1110, 5, 30000, 200000,
1000000, false, "VP8", 2, 0, 400000,
false, false, false, ""};
std::vector<VideoStream> streams = {
VideoQualityTest::DefaultVideoStream(screenshare_params_low, 0),
VideoQualityTest::DefaultVideoStream(screenshare_params_high, 0)};
screenshare.ss[0] = {
streams, 1, 1, 0, InterLayerPredMode::kOn, std::vector<SpatialLayer>(),
false};
fixture->RunWithAnalyzer(screenshare);
}
#endif // !defined(WEBRTC_WIN) #endif // !defined(WEBRTC_WIN)
#endif // !defined(WEBRTC_MAC) #endif // !defined(WEBRTC_MAC)