[VP9 SVC] Round spatial layers dimensions to ensure integer scaling factors are used

Bug: webrtc:11652
Change-Id: Id3642d607f62b72a567d521d9874b8588c2ce429
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/176517
Reviewed-by: Erik Språng <sprang@webrtc.org>
Commit-Queue: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31465}
This commit is contained in:
Ilya Nikolaevskiy
2020-06-05 12:36:32 +02:00
committed by Commit Bot
parent 9766b890a8
commit 09eb6e249d
4 changed files with 50 additions and 0 deletions

View File

@ -79,6 +79,11 @@ std::vector<SpatialLayer> ConfigureSvcNormalVideo(size_t input_width,
// First active layer must be configured.
num_spatial_layers = std::max(num_spatial_layers, first_active_layer + 1);
// Ensure top layer is even enough.
int required_divisiblity = 1 << num_spatial_layers;
input_width = input_width - input_width % required_divisiblity;
input_height = input_height - input_height % required_divisiblity;
for (size_t sl_idx = first_active_layer; sl_idx < num_spatial_layers;
++sl_idx) {
SpatialLayer spatial_layer = {0};

View File

@ -219,6 +219,14 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
video_codec.spatialLayers[i] = spatial_layers[i];
}
// The top spatial layer dimensions may not be equal to the input
// resolution because of the rounding or explicit configuration.
// This difference must be propagated to the stream configuration.
video_codec.width = spatial_layers.back().width;
video_codec.height = spatial_layers.back().height;
video_codec.simulcastStream[0].width = spatial_layers.back().width;
video_codec.simulcastStream[0].height = spatial_layers.back().height;
// Update layering settings.
video_codec.VP9()->numberOfSpatialLayers =
static_cast<unsigned char>(spatial_layers.size());

View File

@ -588,6 +588,13 @@ void VideoStreamEncoder::ReconfigureEncoder() {
RTC_LOG(LS_ERROR) << "Failed to create encoder configuration.";
}
if (encoder_config_.codec_type == kVideoCodecVP9) {
// Spatial layers configuration might impose some parity restrictions,
// thus some cropping might be needed.
crop_width_ = last_frame_info_->width - codec.width;
crop_height_ = last_frame_info_->height - codec.height;
}
char log_stream_buf[4 * 1024];
rtc::SimpleStringBuilder log_stream(log_stream_buf);
log_stream << "ReconfigureEncoder:\n";

View File

@ -5917,4 +5917,34 @@ TEST_F(VideoStreamEncoderTest, AutomaticAnimationDetection) {
video_stream_encoder_->Stop();
}
TEST_F(VideoStreamEncoderTest, ConfiguresVp9SvcAtOddResolutions) {
const int kWidth = 720; // 540p adapted down.
const int kHeight = 405;
const int kNumFrames = 3;
// Works on screenshare mode.
ResetEncoder("VP9", /*num_streams=*/1, /*num_temporal_layers=*/1,
/*num_spatial_layers=*/2, /*screenshare=*/true);
video_source_.set_adaptation_enabled(true);
video_stream_encoder_->OnBitrateUpdatedAndWaitForManagedResources(
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps),
DataRate::BitsPerSec(kTargetBitrateBps), 0, 0, 0);
VideoFrame frame = CreateFrame(1, kWidth, kHeight);
// Pass enough frames with the full update to trigger animation detection.
for (int i = 0; i < kNumFrames; ++i) {
int64_t timestamp_ms =
fake_clock_.TimeNanos() / rtc::kNumNanosecsPerMillisec;
frame.set_ntp_time_ms(timestamp_ms);
frame.set_timestamp_us(timestamp_ms * 1000);
video_source_.IncomingCapturedFrame(frame);
WaitForEncodedFrame(timestamp_ms);
}
video_stream_encoder_->Stop();
}
} // namespace webrtc