Add cropping to VIEEncoder to match simulcast streams resolution

Detect when simulcaststreamfactory adjust resolution and remeber cropping
parameters in VIEEncoder.
Expose EncoderStreamFactory in webrtcvideoengine2.

BUG=webrtc:7375, webrtc:6958

Review-Url: https://codereview.webrtc.org/2936393002
Cr-Commit-Position: refs/heads/master@{#18632}
This commit is contained in:
ilnik
2017-06-16 06:53:48 -07:00
committed by Commit Bot
parent f79dbadc09
commit 6b826ef66d
8 changed files with 280 additions and 93 deletions

View File

@ -286,69 +286,6 @@ int GetDefaultVp9TemporalLayers() {
}
return 1;
}
class EncoderStreamFactory
: public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface {
public:
EncoderStreamFactory(std::string codec_name,
int max_qp,
int max_framerate,
bool is_screencast,
bool conference_mode)
: codec_name_(codec_name),
max_qp_(max_qp),
max_framerate_(max_framerate),
is_screencast_(is_screencast),
conference_mode_(conference_mode) {}
private:
std::vector<webrtc::VideoStream> CreateEncoderStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) override {
if (is_screencast_ &&
(!conference_mode_ || !cricket::UseSimulcastScreenshare())) {
RTC_DCHECK_EQ(1, encoder_config.number_of_streams);
}
if (encoder_config.number_of_streams > 1 ||
(CodecNamesEq(codec_name_, kVp8CodecName) && is_screencast_ &&
conference_mode_)) {
return GetSimulcastConfig(encoder_config.number_of_streams, width, height,
encoder_config.max_bitrate_bps, max_qp_,
max_framerate_, is_screencast_);
}
// For unset max bitrates set default bitrate for non-simulcast.
int max_bitrate_bps =
(encoder_config.max_bitrate_bps > 0)
? encoder_config.max_bitrate_bps
: GetMaxDefaultVideoBitrateKbps(width, height) * 1000;
webrtc::VideoStream stream;
stream.width = width;
stream.height = height;
stream.max_framerate = max_framerate_;
stream.min_bitrate_bps = kMinVideoBitrateKbps * 1000;
stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate_bps;
stream.max_qp = max_qp_;
if (CodecNamesEq(codec_name_, kVp9CodecName) && !is_screencast_) {
stream.temporal_layer_thresholds_bps.resize(
GetDefaultVp9TemporalLayers() - 1);
}
std::vector<webrtc::VideoStream> streams;
streams.push_back(stream);
return streams;
}
const std::string codec_name_;
const int max_qp_;
const int max_framerate_;
const bool is_screencast_;
const bool conference_mode_;
};
} // namespace
// Constants defined in webrtc/media/engine/constants.h
@ -2676,4 +2613,55 @@ WebRtcVideoChannel::MapCodecs(const std::vector<VideoCodec>& codecs) {
return video_codecs;
}
EncoderStreamFactory::EncoderStreamFactory(std::string codec_name,
int max_qp,
int max_framerate,
bool is_screencast,
bool conference_mode)
: codec_name_(codec_name),
max_qp_(max_qp),
max_framerate_(max_framerate),
is_screencast_(is_screencast),
conference_mode_(conference_mode) {}
std::vector<webrtc::VideoStream> EncoderStreamFactory::CreateEncoderStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) {
if (is_screencast_ &&
(!conference_mode_ || !cricket::UseSimulcastScreenshare())) {
RTC_DCHECK_EQ(1, encoder_config.number_of_streams);
}
if (encoder_config.number_of_streams > 1 ||
(CodecNamesEq(codec_name_, kVp8CodecName) && is_screencast_ &&
conference_mode_)) {
return GetSimulcastConfig(encoder_config.number_of_streams, width, height,
encoder_config.max_bitrate_bps, max_qp_,
max_framerate_, is_screencast_);
}
// For unset max bitrates set default bitrate for non-simulcast.
int max_bitrate_bps =
(encoder_config.max_bitrate_bps > 0)
? encoder_config.max_bitrate_bps
: GetMaxDefaultVideoBitrateKbps(width, height) * 1000;
webrtc::VideoStream stream;
stream.width = width;
stream.height = height;
stream.max_framerate = max_framerate_;
stream.min_bitrate_bps = kMinVideoBitrateKbps * 1000;
stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate_bps;
stream.max_qp = max_qp_;
if (CodecNamesEq(codec_name_, kVp9CodecName) && !is_screencast_) {
stream.temporal_layer_thresholds_bps.resize(GetDefaultVp9TemporalLayers() -
1);
}
std::vector<webrtc::VideoStream> streams;
streams.push_back(stream);
return streams;
}
} // namespace cricket

View File

@ -525,6 +525,28 @@ class WebRtcVideoChannel : public VideoMediaChannel, public webrtc::Transport {
int64_t last_stats_log_ms_;
};
class EncoderStreamFactory
: public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface {
public:
EncoderStreamFactory(std::string codec_name,
int max_qp,
int max_framerate,
bool is_screencast,
bool conference_mode);
private:
std::vector<webrtc::VideoStream> CreateEncoderStreams(
int width,
int height,
const webrtc::VideoEncoderConfig& encoder_config) override;
const std::string codec_name_;
const int max_qp_;
const int max_framerate_;
const bool is_screencast_;
const bool conference_mode_;
};
} // namespace cricket
#endif // WEBRTC_MEDIA_ENGINE_WEBRTCVIDEOENGINE_H_

View File

@ -331,7 +331,7 @@ TEST_F(FullStackTest, ScreenshareSlidesVP8_3TL_Simulcast) {
std::vector<VideoStream> streams = {
DefaultVideoStream(screenshare_params_low),
DefaultVideoStream(screenshare_params_high)};
screenshare.ss = {streams, 1, 1, 0};
screenshare.ss = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
RunTest(screenshare);
}
@ -423,7 +423,8 @@ TEST_F(FullStackTest, ScreenshareSlidesVP9_2SL) {
screenshare.analyzer = {"screenshare_slides_vp9_2sl", 0.0, 0.0,
kFullStackTestDurationSecs};
screenshare.logs = false;
screenshare.ss = {std::vector<VideoStream>(), 0, 2, 1};
screenshare.ss = {std::vector<VideoStream>(), 0, 2, 1,
std::vector<SpatialLayer>(), false};
RunTest(screenshare);
}
@ -434,7 +435,8 @@ TEST_F(FullStackTest, VP9SVC_3SL_High) {
simulcast.analyzer = {"vp9svc_3sl_high", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.logs = false;
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 2};
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 2,
std::vector<SpatialLayer>(), false};
RunTest(simulcast);
}
@ -445,7 +447,8 @@ TEST_F(FullStackTest, VP9SVC_3SL_Medium) {
simulcast.analyzer = {"vp9svc_3sl_medium", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.logs = false;
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 1};
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 1,
std::vector<SpatialLayer>(), false};
RunTest(simulcast);
}
@ -455,11 +458,38 @@ TEST_F(FullStackTest, VP9SVC_3SL_Low) {
simulcast.video = kSvcVp9Video;
simulcast.analyzer = {"vp9svc_3sl_low", 0.0, 0.0, kFullStackTestDurationSecs};
simulcast.logs = false;
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 0};
simulcast.ss = {std::vector<VideoStream>(), 0, 3, 0,
std::vector<SpatialLayer>(), false};
RunTest(simulcast);
}
#endif // !defined(RTC_DISABLE_VP9)
// Android bots can't handle FullHD, so disable the test.
#if defined(WEBRTC_ANDROID)
#define MAYBE_SimulcastFullHdOveruse DISABLED_SimulcastFullHdOveruse
#else
#define MAYBE_SimulcastFullHdOveruse SimulcastFullHdOveruse
#endif
TEST_F(FullStackTest, MAYBE_SimulcastFullHdOveruse) {
VideoQualityTest::Params simulcast;
simulcast.call.send_side_bwe = true;
simulcast.video = {true, 1920, 1080, 30, 800000, 2500000,
2500000, false, "VP8", 3, 2, 400000,
false, false, "", "Generator"};
simulcast.analyzer = {"simulcast_HD_high", 0.0, 0.0,
kFullStackTestDurationSecs};
simulcast.pipe.loss_percent = 0;
simulcast.pipe.queue_delay_ms = 100;
std::vector<VideoStream> streams = {DefaultVideoStream(simulcast),
DefaultVideoStream(simulcast),
DefaultVideoStream(simulcast)};
simulcast.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), true};
webrtc::test::ScopedFieldTrials override_trials(
"WebRTC-ForceSimulatedOveruseIntervalMs/1000-50000-300/");
RunTest(simulcast);
}
TEST_F(FullStackTest, SimulcastVP8_3SL_High) {
VideoQualityTest::Params simulcast;
simulcast.call.send_side_bwe = true;
@ -478,7 +508,7 @@ TEST_F(FullStackTest, SimulcastVP8_3SL_High) {
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
DefaultVideoStream(video_params_medium),
DefaultVideoStream(video_params_high)};
simulcast.ss = {streams, 2, 1, 0};
simulcast.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
RunTest(simulcast);
}
@ -500,7 +530,7 @@ TEST_F(FullStackTest, SimulcastVP8_3SL_Medium) {
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
DefaultVideoStream(video_params_medium),
DefaultVideoStream(video_params_high)};
simulcast.ss = {streams, 1, 1, 0};
simulcast.ss = {streams, 1, 1, 0, std::vector<SpatialLayer>(), false};
RunTest(simulcast);
}
@ -522,7 +552,7 @@ TEST_F(FullStackTest, SimulcastVP8_3SL_Low) {
std::vector<VideoStream> streams = {DefaultVideoStream(video_params_low),
DefaultVideoStream(video_params_medium),
DefaultVideoStream(video_params_high)};
simulcast.ss = {streams, 0, 1, 0};
simulcast.ss = {streams, 0, 1, 0, std::vector<SpatialLayer>(), false};
RunTest(simulcast);
}
@ -545,7 +575,7 @@ TEST_F(FullStackTest, LargeRoomVP8_5thumb) {
DefaultVideoStream(video_params_medium),
DefaultVideoStream(video_params_high)};
large_room.num_thumbnails = 5;
large_room.ss = {streams, 2, 1, 0};
large_room.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
RunTest(large_room);
}
@ -578,7 +608,7 @@ TEST_F(FullStackTest, MAYBE_LargeRoomVP8_15thumb) {
DefaultVideoStream(video_params_medium),
DefaultVideoStream(video_params_high)};
large_room.num_thumbnails = 15;
large_room.ss = {streams, 2, 1, 0};
large_room.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
RunTest(large_room);
}
@ -601,7 +631,7 @@ TEST_F(FullStackTest, MAYBE_LargeRoomVP8_50thumb) {
DefaultVideoStream(video_params_medium),
DefaultVideoStream(video_params_high)};
large_room.num_thumbnails = 50;
large_room.ss = {streams, 2, 1, 0};
large_room.ss = {streams, 2, 1, 0, std::vector<SpatialLayer>(), false};
RunTest(large_room);
}

View File

@ -148,12 +148,13 @@ class VideoAnalyzer : public PacketReceiver,
size_t selected_stream,
int selected_sl,
int selected_tl,
bool is_quick_test_enabled)
bool is_quick_test_enabled,
Clock* clock)
: transport_(transport),
receiver_(nullptr),
send_stream_(nullptr),
receive_stream_(nullptr),
captured_frame_forwarder_(this),
captured_frame_forwarder_(this, clock),
test_label_(test_label),
graph_data_output_file_(graph_data_output_file),
graph_title_(graph_title),
@ -219,6 +220,13 @@ class VideoAnalyzer : public PacketReceiver,
virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
void SetSource(test::VideoCapturer* video_capturer, bool respect_sink_wants) {
if (respect_sink_wants)
captured_frame_forwarder_.SetSource(video_capturer);
rtc::VideoSinkWants wants;
video_capturer->AddOrUpdateSink(InputInterface(), wants);
}
void SetSendStream(VideoSendStream* stream) {
rtc::CritScope lock(&crit_);
RTC_DCHECK(!send_stream_);
@ -782,7 +790,7 @@ class VideoAnalyzer : public PacketReceiver,
// Perform expensive psnr and ssim calculations while not holding lock.
double psnr = -1.0;
double ssim = -1.0;
if (comparison.reference) {
if (comparison.reference && !comparison.dropped) {
psnr = I420PSNR(&*comparison.reference, &*comparison.render);
ssim = I420SSIM(&*comparison.reference, &*comparison.render);
}
@ -901,8 +909,15 @@ class VideoAnalyzer : public PacketReceiver,
class CapturedFrameForwarder : public rtc::VideoSinkInterface<VideoFrame>,
public rtc::VideoSourceInterface<VideoFrame> {
public:
explicit CapturedFrameForwarder(VideoAnalyzer* analyzer)
: analyzer_(analyzer), send_stream_input_(nullptr) {}
explicit CapturedFrameForwarder(VideoAnalyzer* analyzer, Clock* clock)
: analyzer_(analyzer),
send_stream_input_(nullptr),
video_capturer_(nullptr),
clock_(clock) {}
void SetSource(test::VideoCapturer* video_capturer) {
video_capturer_ = video_capturer;
}
private:
void OnFrame(const VideoFrame& video_frame) override {
@ -910,8 +925,8 @@ class VideoAnalyzer : public PacketReceiver,
// Frames from the capturer does not have a rtp timestamp.
// Create one so it can be used for comparison.
RTC_DCHECK_EQ(0, video_frame.timestamp());
if (copy.ntp_time_ms() == 0)
copy.set_ntp_time_ms(rtc::TimeMillis());
if (video_frame.ntp_time_ms() == 0)
copy.set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
copy.set_timestamp(copy.ntp_time_ms() * 90);
analyzer_->AddCapturedFrameForComparison(copy);
rtc::CritScope lock(&crit_);
@ -925,6 +940,9 @@ class VideoAnalyzer : public PacketReceiver,
rtc::CritScope lock(&crit_);
RTC_DCHECK(!send_stream_input_ || send_stream_input_ == sink);
send_stream_input_ = sink;
if (video_capturer_) {
video_capturer_->AddOrUpdateSink(this, wants);
}
}
// Called by |send_stream_| when |send_stream_.SetSource()| is called.
@ -937,6 +955,8 @@ class VideoAnalyzer : public PacketReceiver,
VideoAnalyzer* const analyzer_;
rtc::CriticalSection crit_;
rtc::VideoSinkInterface<VideoFrame>* send_stream_input_ GUARDED_BY(crit_);
test::VideoCapturer* video_capturer_;
Clock* clock_;
};
void AddCapturedFrameForComparison(const VideoFrame& video_frame) {
@ -1238,6 +1258,7 @@ void VideoQualityTest::FillScalabilitySettings(
params->ss.streams.push_back(stream);
}
params->ss.selected_stream = selected_stream;
params->ss.infer_streams = false;
params->ss.num_spatial_layers = num_spatial_layers ? num_spatial_layers : 1;
params->ss.selected_sl = selected_sl;
@ -1319,8 +1340,15 @@ void VideoQualityTest::SetupVideo(Transport* send_transport,
video_encoder_config_.max_bitrate_bps +=
params_.ss.streams[i].max_bitrate_bps;
}
video_encoder_config_.video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>(params_.ss.streams);
if (params_.ss.infer_streams) {
video_encoder_config_.video_stream_factory =
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
params_.video.codec, params_.ss.streams[0].max_qp,
params_.video.fps, params_.screenshare.enabled, true);
} else {
video_encoder_config_.video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>(params_.ss.streams);
}
video_encoder_config_.spatial_layers = params_.ss.spatial_layers;
@ -1423,9 +1451,15 @@ void VideoQualityTest::SetupThumbnails(Transport* send_transport,
params_.video.suspend_below_min_bitrate;
thumbnail_encoder_config.number_of_streams = 1;
thumbnail_encoder_config.max_bitrate_bps = 50000;
thumbnail_encoder_config.video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>(
std::vector<webrtc::VideoStream>{DefaultThumbnailStream()});
if (params_.ss.infer_streams) {
thumbnail_encoder_config.video_stream_factory =
new rtc::RefCountedObject<VideoStreamFactory>(params_.ss.streams);
} else {
thumbnail_encoder_config.video_stream_factory =
new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
params_.video.codec, params_.ss.streams[0].max_qp,
params_.video.fps, params_.screenshare.enabled, true);
}
thumbnail_encoder_config.spatial_layers = params_.ss.spatial_layers;
VideoReceiveStream::Config thumbnail_receive_config(send_transport);
@ -1565,7 +1599,11 @@ void VideoQualityTest::CreateCapturer() {
EXPECT_TRUE(frame_generator_capturer->Init());
video_capturer_.reset(frame_generator_capturer);
} else {
if (params_.video.clip_name.empty()) {
if (params_.video.clip_name == "Generator") {
video_capturer_.reset(test::FrameGeneratorCapturer::Create(
static_cast<int>(params_.video.width),
static_cast<int>(params_.video.height), params_.video.fps, clock_));
} else if (params_.video.clip_name.empty()) {
video_capturer_.reset(test::VcmCapturer::Create(
params_.video.width, params_.video.height, params_.video.fps,
params_.video.capture_device_index));
@ -1631,7 +1669,7 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
kVideoSendSsrcs[params_.ss.selected_stream],
kSendRtxSsrcs[params_.ss.selected_stream],
static_cast<size_t>(params_.ss.selected_stream), params.ss.selected_sl,
params_.video.selected_tl, is_quick_test_enabled);
params_.video.selected_tl, is_quick_test_enabled, clock_);
analyzer.SetReceiver(receiver_call_->Receiver());
send_transport.SetReceiver(&analyzer);
recv_transport.SetReceiver(sender_call_->Receiver());
@ -1662,8 +1700,7 @@ void VideoQualityTest::RunWithAnalyzer(const Params& params) {
CreateCapturer();
rtc::VideoSinkWants wants;
video_capturer_->AddOrUpdateSink(analyzer.InputInterface(), wants);
analyzer.SetSource(video_capturer_.get(), params_.ss.infer_streams);
StartEncodedFrameLogs(video_send_stream_);
StartEncodedFrameLogs(video_receive_streams_[0]);

View File

@ -51,7 +51,7 @@ class VideoQualityTest : public test::CallTest {
bool ulpfec;
bool flexfec;
std::string encoded_frame_base_path;
std::string clip_name;
std::string clip_name; // "Generator" to generate frames instead.
size_t capture_device_index;
} video;
struct Audio {
@ -82,6 +82,8 @@ class VideoQualityTest : public test::CallTest {
int selected_sl;
// If empty, bitrates are generated in VP9Impl automatically.
std::vector<SpatialLayer> spatial_layers;
// If set, default parameters will be used instead of |streams|.
bool infer_streams;
} ss;
int num_thumbnails;
};

View File

@ -15,6 +15,7 @@
#include <numeric>
#include <utility>
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/base/arraysize.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/location.h"
@ -575,6 +576,20 @@ void ViEEncoder::ReconfigureEncoder() {
encoder_config_.video_stream_factory->CreateEncoderStreams(
last_frame_info_->width, last_frame_info_->height, encoder_config_);
// TODO(ilnik): If configured resolution is significantly less than provided,
// e.g. because there are not enough SSRCs for all simulcast streams,
// signal new resolutions via SinkWants to video source.
// Stream dimensions may be not equal to given because of a simulcast
// restrictions.
int highest_stream_width = static_cast<int>(streams.back().width);
int highest_stream_height = static_cast<int>(streams.back().height);
// Dimension may be reduced to be, e.g. divisible by 4.
RTC_CHECK_GE(last_frame_info_->width, highest_stream_width);
RTC_CHECK_GE(last_frame_info_->height, highest_stream_height);
crop_width_ = last_frame_info_->width - highest_stream_width;
crop_height_ = last_frame_info_->height - highest_stream_height;
VideoCodec codec;
if (!VideoCodecInitializer::SetupCodec(encoder_config_, settings_, streams,
nack_enabled_, &codec,
@ -770,12 +785,35 @@ void ViEEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
}
TraceFrameDropEnd();
VideoFrame out_frame(video_frame);
// Crop frame if needed.
if (crop_width_ > 0 || crop_height_ > 0) {
int cropped_width = video_frame.width() - crop_width_;
int cropped_height = video_frame.height() - crop_height_;
rtc::scoped_refptr<I420Buffer> cropped_buffer =
I420Buffer::Create(cropped_width, cropped_height);
// TODO(ilnik): Remove scaling if cropping is too big, as it should never
// happen after SinkWants signaled correctly from ReconfigureEncoder.
if (crop_width_ < 4 && crop_height_ < 4) {
cropped_buffer->CropAndScaleFrom(
*video_frame.video_frame_buffer()->ToI420(), crop_width_ / 2,
crop_height_ / 2, cropped_width, cropped_height);
} else {
cropped_buffer->ScaleFrom(
*video_frame.video_frame_buffer()->ToI420().get());
}
out_frame =
VideoFrame(cropped_buffer, video_frame.timestamp(),
video_frame.render_time_ms(), video_frame.rotation());
out_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
}
TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
"Encode");
overuse_detector_->FrameCaptured(video_frame, time_when_posted_us);
overuse_detector_->FrameCaptured(out_frame, time_when_posted_us);
video_sender_.AddVideoFrame(video_frame, nullptr);
video_sender_.AddVideoFrame(out_frame, nullptr);
}
void ViEEncoder::SendKeyFrame() {

View File

@ -257,6 +257,8 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
// encoder on the next frame.
bool pending_encoder_reconfiguration_ ACCESS_ON(&encoder_queue_);
rtc::Optional<VideoFrameInfo> last_frame_info_ ACCESS_ON(&encoder_queue_);
int crop_width_ ACCESS_ON(&encoder_queue_);
int crop_height_ ACCESS_ON(&encoder_queue_);
uint32_t encoder_start_bitrate_bps_ ACCESS_ON(&encoder_queue_);
size_t max_data_payload_length_ ACCESS_ON(&encoder_queue_);
bool nack_enabled_ ACCESS_ON(&encoder_queue_);

View File

@ -166,6 +166,7 @@ class VideoStreamFactory
const int framerate_;
};
class AdaptingFrameForwarder : public test::FrameForwarder {
public:
AdaptingFrameForwarder() : adaptation_enabled_(false) {}
@ -2944,4 +2945,71 @@ TEST_F(ViEEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Resolution) {
vie_encoder_->Stop();
}
TEST_F(ViEEncoderTest, AcceptsFullHdAdaptedDownSimulcastFrames) {
// Simulates simulcast behavior and makes highest stream resolutions divisible
// by 4.
class CroppingVideoStreamFactory
: public VideoEncoderConfig::VideoStreamFactoryInterface {
public:
explicit CroppingVideoStreamFactory(size_t num_temporal_layers,
int framerate)
: num_temporal_layers_(num_temporal_layers), framerate_(framerate) {
EXPECT_GT(num_temporal_layers, 0u);
EXPECT_GT(framerate, 0);
}
private:
std::vector<VideoStream> CreateEncoderStreams(
int width,
int height,
const VideoEncoderConfig& encoder_config) override {
std::vector<VideoStream> streams =
test::CreateVideoStreams(width - width % 4, height - height % 4,
encoder_config);
for (VideoStream& stream : streams) {
stream.temporal_layer_thresholds_bps.resize(num_temporal_layers_ - 1);
stream.max_framerate = framerate_;
}
return streams;
}
const size_t num_temporal_layers_;
const int framerate_;
};
const int kFrameWidth = 1920;
const int kFrameHeight = 1080;
// 3/4 of 1920.
const int kAdaptedFrameWidth = 1440;
// 3/4 of 1080 rounded down to multiple of 4.
const int kAdaptedFrameHeight = 808;
const int kFramerate = 24;
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Trigger reconfigure encoder (without resetting the entire instance).
VideoEncoderConfig video_encoder_config;
video_encoder_config.max_bitrate_bps = kTargetBitrateBps;
video_encoder_config.number_of_streams = 1;
video_encoder_config.video_stream_factory =
new rtc::RefCountedObject<CroppingVideoStreamFactory>(1, kFramerate);
vie_encoder_->ConfigureEncoder(std::move(video_encoder_config),
kMaxPayloadLength, false);
vie_encoder_->WaitUntilTaskQueueIsIdle();
video_source_.set_adaptation_enabled(true);
video_source_.IncomingCapturedFrame(
CreateFrame(1, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(kFrameWidth, kFrameHeight);
// Trigger CPU overuse, downscale by 3/4.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(
CreateFrame(2, kFrameWidth, kFrameHeight));
sink_.WaitForEncodedFrame(kAdaptedFrameWidth, kAdaptedFrameHeight);
vie_encoder_->Stop();
}
} // namespace webrtc