Change rtc::VideoSinkWants to have target and a max pixel count

The current method with max_pixel_count and max_pixel_count_step_up,
where only one should be used at a time and this first signaling an
inclusive upper bound and other other an exclusive lower bound, makes
for a lot of confusion.

I've updated this to have a desired target and a maximum instead. The
source should select a resolution as close to the target as possible,
but no higher than the maximum.

I intend to also add similar frame rate settings in an upcoming cl.

BUG=webrtc:4172,webrtc:6850

Review-Url: https://codereview.webrtc.org/2672793002
Cr-Commit-Position: refs/heads/master@{#16533}
This commit is contained in:
sprang
2017-02-10 07:04:27 -08:00
committed by Commit bot
parent e9ad271db4
commit 84a3759825
14 changed files with 258 additions and 177 deletions

View File

@ -490,13 +490,20 @@ TEST_F(CallPerfTest, ReceivesCpuOveruseAndUnderuse) {
const rtc::VideoSinkWants& wants) override {
// First expect CPU overuse. Then expect CPU underuse when the encoder
// delay has been decreased.
if (wants.max_pixel_count) {
if (wants.target_pixel_count &&
*wants.target_pixel_count <
wants.max_pixel_count.value_or(std::numeric_limits<int>::max())) {
// On adapting up, ViEEncoder::VideoSourceProxy will set the target
// pixel count to a step up from the current and the max value to
// something higher than the target.
EXPECT_FALSE(expect_lower_resolution_wants_);
observation_complete_.Set();
} else if (wants.max_pixel_count) {
// On adapting down, ViEEncoder::VideoSourceProxy will set only the max
// pixel count, leaving the target unset.
EXPECT_TRUE(expect_lower_resolution_wants_);
expect_lower_resolution_wants_ = false;
encoder_.SetDelay(2);
} else if (wants.max_pixel_count_step_up) {
EXPECT_FALSE(expect_lower_resolution_wants_);
observation_complete_.Set();
}
}

View File

@ -81,8 +81,8 @@ bool AdaptedVideoTrackSource::apply_rotation() {
void AdaptedVideoTrackSource::OnSinkWantsChanged(
const rtc::VideoSinkWants& wants) {
RTC_DCHECK(thread_checker_.CalledOnValidThread());
video_adapter_.OnResolutionRequest(wants.max_pixel_count,
wants.max_pixel_count_step_up);
video_adapter_.OnResolutionRequest(wants.target_pixel_count,
wants.max_pixel_count);
}
bool AdaptedVideoTrackSource::AdaptFrame(int width,

View File

@ -26,6 +26,12 @@ namespace {
struct Fraction {
int numerator;
int denominator;
// Determines number of output pixels if both width and height of an input of
// |input_pixels| pixels is scaled with the fraction numerator / denominator.
int scale_pixel_count(int input_pixels) {
return (numerator * numerator * input_pixels) / (denominator * denominator);
}
};
// Round |value_to_round| to a multiple of |multiple|. Prefer rounding upwards,
@ -37,29 +43,54 @@ int roundUp(int value_to_round, int multiple, int max_value) {
: (max_value / multiple * multiple);
}
// Generates a scale factor that makes |input_num_pixels| smaller or
// larger than |target_num_pixels|, depending on the value of |step_up|.
Fraction FindScale(int input_num_pixels, int target_num_pixels, bool step_up) {
// Generates a scale factor that makes |input_pixels| close to |target_pixels|,
// but no higher than |max_pixels|.
Fraction FindScale(int input_pixels, int target_pixels, int max_pixels) {
// This function only makes sense for a positive target.
RTC_DCHECK_GT(target_num_pixels, 0);
RTC_DCHECK_GT(target_pixels, 0);
RTC_DCHECK_GT(max_pixels, 0);
RTC_DCHECK_GE(max_pixels, target_pixels);
// Don't scale up original.
if (target_pixels >= input_pixels)
return Fraction{1, 1};
Fraction current_scale = Fraction{1, 1};
Fraction best_scale = Fraction{1, 1};
Fraction last_scale = Fraction{1, 1};
const float target_scale =
sqrt(target_num_pixels / static_cast<float>(input_num_pixels));
while (best_scale.numerator > (target_scale * best_scale.denominator)) {
last_scale = best_scale;
if (best_scale.numerator % 3 == 0 && best_scale.denominator % 2 == 0) {
// Multiply by 2/3
best_scale.numerator /= 3;
best_scale.denominator /= 2;
// The minimum (absolute) difference between the number of output pixels and
// the target pixel count.
int min_pixel_diff = std::numeric_limits<int>::max();
if (input_pixels < max_pixels) {
// Start condition for 1/1 case, if it is less than max.
min_pixel_diff = std::abs(input_pixels - target_pixels);
}
// Alternately scale down by 2/3 and 3/4. This results in fractions which are
// effectively scalable. For instance, starting at 1280x720 will result in
// the series (3/4) => 960x540, (1/2) => 640x360, (3/8) => 480x270,
// (1/4) => 320x180, (3/16) => 240x125, (1/8) => 160x90.
while (current_scale.scale_pixel_count(input_pixels) > target_pixels) {
if (current_scale.numerator % 3 == 0 &&
current_scale.denominator % 2 == 0) {
// Multiply by 2/3.
current_scale.numerator /= 3;
current_scale.denominator /= 2;
} else {
// Multiply by 3/4
best_scale.numerator *= 3;
best_scale.denominator *= 4;
// Multiply by 3/4.
current_scale.numerator *= 3;
current_scale.denominator *= 4;
}
int output_pixels = current_scale.scale_pixel_count(input_pixels);
if (output_pixels <= max_pixels) {
int diff = std::abs(target_pixels - output_pixels);
if (diff < min_pixel_diff) {
min_pixel_diff = diff;
best_scale = current_scale;
}
}
}
if (step_up)
return last_scale;
return best_scale;
}
} // namespace
@ -74,8 +105,8 @@ VideoAdapter::VideoAdapter(int required_resolution_alignment)
previous_width_(0),
previous_height_(0),
required_resolution_alignment_(required_resolution_alignment),
resolution_request_max_pixel_count_(std::numeric_limits<int>::max()),
step_up_(false) {}
resolution_request_target_pixel_count_(std::numeric_limits<int>::max()),
resolution_request_max_pixel_count_(std::numeric_limits<int>::max()) {}
VideoAdapter::VideoAdapter() : VideoAdapter(1) {}
@ -124,14 +155,11 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
// OnOutputFormatRequest and OnResolutionRequest.
int max_pixel_count = resolution_request_max_pixel_count_;
if (requested_format_) {
// TODO(kthelgason): remove the - |step_up_| hack when we change how
// resolution is requested from VideoSourceProxy.
// This is required because we must not scale above the requested
// format so we subtract one when scaling up.
max_pixel_count = std::min(
max_pixel_count, requested_format_->width * requested_format_->height -
static_cast<int>(step_up_));
max_pixel_count, requested_format_->width * requested_format_->height);
}
int target_pixel_count =
std::min(resolution_request_target_pixel_count_, max_pixel_count);
// Drop the input frame if necessary.
if (max_pixel_count <= 0 || !KeepFrame(in_timestamp_ns)) {
@ -173,8 +201,8 @@ bool VideoAdapter::AdaptFrameResolution(int in_width,
*cropped_height =
std::min(in_height, static_cast<int>(in_width / requested_aspect));
}
const Fraction scale =
FindScale(*cropped_width * *cropped_height, max_pixel_count, step_up_);
const Fraction scale = FindScale((*cropped_width) * (*cropped_height),
target_pixel_count, max_pixel_count);
// Adjust cropping slightly to get even integer output size and a perfect
// scale factor. Make sure the resulting dimensions are aligned correctly
// to be nice to hardware encoders.
@ -222,12 +250,13 @@ void VideoAdapter::OnOutputFormatRequest(const VideoFormat& format) {
}
void VideoAdapter::OnResolutionRequest(
rtc::Optional<int> max_pixel_count,
rtc::Optional<int> max_pixel_count_step_up) {
const rtc::Optional<int>& target_pixel_count,
const rtc::Optional<int>& max_pixel_count) {
rtc::CritScope cs(&critical_section_);
resolution_request_max_pixel_count_ = max_pixel_count.value_or(
max_pixel_count_step_up.value_or(std::numeric_limits<int>::max()));
step_up_ = static_cast<bool>(max_pixel_count_step_up);
resolution_request_max_pixel_count_ =
max_pixel_count.value_or(std::numeric_limits<int>::max());
resolution_request_target_pixel_count_ =
target_pixel_count.value_or(resolution_request_max_pixel_count_);
}
} // namespace cricket

View File

@ -48,11 +48,13 @@ class VideoAdapter {
// 720x1280 is requested.
void OnOutputFormatRequest(const VideoFormat& format);
// Requests the output frame size from |AdaptFrameResolution| to not have
// more than |max_pixel_count| pixels and have "one step" up more pixels than
// max_pixel_count_step_up.
void OnResolutionRequest(rtc::Optional<int> max_pixel_count,
rtc::Optional<int> max_pixel_count_step_up);
// Requests the output frame size from |AdaptFrameResolution| to have as close
// as possible to |target_pixel_count|, but no more than |max_pixel_count|
// pixels. If |target_pixel_count| is not set, treat it as being equal to
// |max_pixel_count|. If |max_pixel_count| is not set, treat is as being the
// highest resolution available.
void OnResolutionRequest(const rtc::Optional<int>& target_pixel_count,
const rtc::Optional<int>& max_pixel_count);
private:
// Determine if frame should be dropped based on input fps and requested fps.
@ -73,8 +75,8 @@ class VideoAdapter {
// OnResolutionRequest respectively.
// The adapted output format is the minimum of these.
rtc::Optional<VideoFormat> requested_format_ GUARDED_BY(critical_section_);
int resolution_request_target_pixel_count_ GUARDED_BY(critical_section_);
int resolution_request_max_pixel_count_ GUARDED_BY(critical_section_);
bool step_up_ GUARDED_BY(critical_section_);
// The critical section to protect the above variables.
rtc::CriticalSection critical_section_;

View File

@ -57,10 +57,13 @@ class VideoAdapterTest : public testing::Test {
explicit VideoCapturerListener(VideoAdapter* adapter)
: video_adapter_(adapter),
cropped_width_(0),
cropped_height_(0),
out_width_(0),
out_height_(0),
captured_frames_(0),
dropped_frames_(0),
last_adapt_was_no_op_(false) {
}
last_adapt_was_no_op_(false) {}
void OnFrame(const webrtc::VideoFrame& frame) {
rtc::CritScope lock(&crit_);
@ -693,8 +696,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(720, out_height_);
// Adapt down one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(1280 * 720 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(1280 * 720 - 1));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -704,8 +707,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(540, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 540 - 1));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -715,8 +718,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(360, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -726,8 +729,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(270, out_height_);
// Adapt up one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(480 * 270));
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360),
rtc::Optional<int>(960 * 540));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -737,8 +740,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(360, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360));
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540),
rtc::Optional<int>(1280 * 720));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -748,8 +751,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
EXPECT_EQ(540, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
adapter_.OnResolutionRequest(rtc::Optional<int>(1280 * 720),
rtc::Optional<int>(1920 * 1080));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -768,15 +771,16 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestMaxZero) {
EXPECT_EQ(1280, out_width_);
EXPECT_EQ(720, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(0), rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(), rtc::Optional<int>(0));
EXPECT_FALSE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
}
TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
// Large step down.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -785,8 +789,9 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
EXPECT_EQ(480, out_width_);
EXPECT_EQ(270, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(960 * 720));
// Large step up.
adapter_.OnResolutionRequest(rtc::Optional<int>(1280 * 720),
rtc::Optional<int>(1920 * 1080));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -797,8 +802,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
}
TEST_F(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) {
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -837,8 +842,8 @@ TEST_F(VideoAdapterTest, TestOnResolutionRequestReset) {
EXPECT_EQ(1280, out_width_);
EXPECT_EQ(720, out_height_);
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
@ -871,8 +876,8 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(360, out_height_);
// Adapt down one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 - 1));
// Expect cropping to 16:9 format and 3/4 scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -883,8 +888,8 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(270, out_height_);
// Adapt down one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(480 * 270 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(480 * 270 - 1));
// Expect cropping to 16:9 format and 1/2 scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -895,8 +900,8 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(180, out_height_);
// Adapt up one step.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(320 * 180));
adapter_.OnResolutionRequest(rtc::Optional<int>(480 * 270),
rtc::Optional<int>(640 * 360));
// Expect cropping to 16:9 format and 3/4 scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -907,8 +912,8 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(270, out_height_);
// Adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(480 * 270));
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360),
rtc::Optional<int>(960 * 540));
// Expect cropping to 16:9 format and no scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -919,8 +924,8 @@ TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
EXPECT_EQ(360, out_height_);
// Try to adapt up one step more.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360));
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540),
rtc::Optional<int>(1280 * 720));
// Expect cropping to 16:9 format and no scaling.
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
&cropped_width_, &cropped_height_,
@ -935,8 +940,8 @@ TEST_F(VideoAdapterTest, TestCroppingOddResolution) {
// Ask for 640x360 (16:9 aspect), with 3/16 scaling.
adapter_.OnOutputFormatRequest(
VideoFormat(640, 360, 0, FOURCC_I420));
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 360 * 3 / 16 * 3 / 16),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 360 * 3 / 16 * 3 / 16));
// Send 640x480 (4:3 aspect).
EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
@ -956,8 +961,8 @@ TEST_F(VideoAdapterTest, TestAdaptToVerySmallResolution) {
const int w = 1920;
const int h = 1080;
adapter_.OnOutputFormatRequest(VideoFormat(w, h, 0, FOURCC_I420));
adapter_.OnResolutionRequest(rtc::Optional<int>(w * h * 1 / 16 * 1 / 16),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(w * h * 1 / 16 * 1 / 16));
// Send 1920x1080 (16:9 aspect).
EXPECT_TRUE(adapter_.AdaptFrameResolution(
@ -971,8 +976,8 @@ TEST_F(VideoAdapterTest, TestAdaptToVerySmallResolution) {
EXPECT_EQ(67, out_height_);
// Adapt back up one step to 3/32.
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(w * h * 1 / 16 * 1 / 16));
adapter_.OnResolutionRequest(rtc::Optional<int>(w * h * 3 / 32 * 3 / 32),
rtc::Optional<int>(w * h * 1 / 8 * 1 / 8));
// Send 1920x1080 (16:9 aspect).
EXPECT_TRUE(adapter_.AdaptFrameResolution(
@ -992,8 +997,8 @@ TEST_F(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) {
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 480));
adapter_.OnResolutionRequest(rtc::Optional<int>(960 * 540),
rtc::Optional<int>());
// Still expect all frames to be dropped
EXPECT_FALSE(adapter_.AdaptFrameResolution(
@ -1001,8 +1006,8 @@ TEST_F(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) {
&cropped_width_, &cropped_height_,
&out_width_, &out_height_));
adapter_.OnResolutionRequest(rtc::Optional<int>(640 * 480 - 1),
rtc::Optional<int>());
adapter_.OnResolutionRequest(rtc::Optional<int>(),
rtc::Optional<int>(640 * 480 - 1));
// Still expect all frames to be dropped
EXPECT_FALSE(adapter_.AdaptFrameResolution(

View File

@ -89,18 +89,20 @@ void VideoBroadcaster::UpdateWants() {
(*sink.wants.max_pixel_count < *wants.max_pixel_count))) {
wants.max_pixel_count = sink.wants.max_pixel_count;
}
// wants.max_pixel_count_step_up == MIN(sink.wants.max_pixel_count_step_up)
if (sink.wants.max_pixel_count_step_up &&
(!wants.max_pixel_count_step_up ||
(*sink.wants.max_pixel_count_step_up <
*wants.max_pixel_count_step_up))) {
wants.max_pixel_count_step_up = sink.wants.max_pixel_count_step_up;
// Select the minimum requested target_pixel_count, if any, of all sinks so
// that we don't over utilize the resources for any one.
// TODO(sprang): Consider using the median instead, since the limit can be
// expressed by max_pixel_count.
if (sink.wants.target_pixel_count &&
(!wants.target_pixel_count ||
(*sink.wants.target_pixel_count < *wants.target_pixel_count))) {
wants.target_pixel_count = sink.wants.target_pixel_count;
}
}
if (wants.max_pixel_count && wants.max_pixel_count_step_up &&
*wants.max_pixel_count_step_up >= *wants.max_pixel_count) {
wants.max_pixel_count_step_up = Optional<int>();
if (wants.max_pixel_count && wants.target_pixel_count &&
*wants.target_pixel_count >= *wants.max_pixel_count) {
wants.target_pixel_count = wants.max_pixel_count;
}
current_wants_ = wants;
}

View File

@ -106,25 +106,25 @@ TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCount) {
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count);
}
TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCountStepUp) {
TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxAndTargetPixelCount) {
VideoBroadcaster broadcaster;
EXPECT_TRUE(!broadcaster.wants().max_pixel_count_step_up);
EXPECT_TRUE(!broadcaster.wants().target_pixel_count);
FakeVideoRenderer sink1;
VideoSinkWants wants1;
wants1.max_pixel_count_step_up = rtc::Optional<int>(1280 * 720);
wants1.target_pixel_count = rtc::Optional<int>(1280 * 720);
broadcaster.AddOrUpdateSink(&sink1, wants1);
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count_step_up);
EXPECT_EQ(1280 * 720, *broadcaster.wants().target_pixel_count);
FakeVideoRenderer sink2;
VideoSinkWants wants2;
wants2.max_pixel_count_step_up = rtc::Optional<int>(640 * 360);
wants2.target_pixel_count = rtc::Optional<int>(640 * 360);
broadcaster.AddOrUpdateSink(&sink2, wants2);
EXPECT_EQ(640 * 360, *broadcaster.wants().max_pixel_count_step_up);
EXPECT_EQ(640 * 360, *broadcaster.wants().target_pixel_count);
broadcaster.RemoveSink(&sink2);
EXPECT_EQ(1280 * 720, *broadcaster.wants().max_pixel_count_step_up);
EXPECT_EQ(1280 * 720, *broadcaster.wants().target_pixel_count);
}
TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {

View File

@ -149,8 +149,8 @@ void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
apply_rotation_ = wants.rotation_applied;
if (video_adapter()) {
video_adapter()->OnResolutionRequest(wants.max_pixel_count,
wants.max_pixel_count_step_up);
video_adapter()->OnResolutionRequest(wants.target_pixel_count,
wants.max_pixel_count);
}
}

View File

@ -275,7 +275,7 @@ TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
// Request a lower resolution.
wants.max_pixel_count =
rtc::Optional<int>(renderer_.width() * renderer_.height() * 3 / 5);
rtc::Optional<int>((renderer_.width() * renderer_.height() * 3) / 5);
capturer_->AddOrUpdateSink(&renderer_, wants);
EXPECT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(3, renderer_.num_rendered_frames());
@ -294,8 +294,8 @@ TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
EXPECT_EQ(360, renderer2.height());
// Request higher resolution.
wants.max_pixel_count_step_up = wants.max_pixel_count;
wants.max_pixel_count = rtc::Optional<int>();
wants.target_pixel_count.emplace((*wants.max_pixel_count * 5) / 3);
wants.max_pixel_count.emplace(*wants.max_pixel_count * 4);
capturer_->AddOrUpdateSink(&renderer_, wants);
EXPECT_TRUE(capturer_->CaptureFrame());
EXPECT_EQ(5, renderer_.num_rendered_frames());

View File

@ -28,12 +28,12 @@ struct VideoSinkWants {
// Tells the source the maximum number of pixels the sink wants.
rtc::Optional<int> max_pixel_count;
// Like |max_pixel_count| but relative to the given value. The source is
// requested to produce frames with a resolution one "step up" from the given
// value. In practice, this means that the sink can consume this amount of
// pixels but wants more and the source should produce a resolution one
// "step" higher than this but not higher.
rtc::Optional<int> max_pixel_count_step_up;
// Tells the source the desired number of pixels the sinks wants. This will
// typically be used when stepping the resolution up again when conditions
// have improved after an earlier downgrade. The source should select the
// closest resolution to this pixel count, but if max_pixel_count is set, it
// still sets the absolute upper bound.
rtc::Optional<int> target_pixel_count;
};
template <typename VideoFrameT>

View File

@ -2124,19 +2124,24 @@ TEST_F(WebRtcVideoChannel2Test, AdaptsOnOveruseAndChangeResolution) {
EXPECT_EQ(724 / 2, send_stream->GetLastHeight());
// Trigger underuse which should go back up in resolution.
wants.max_pixel_count = rtc::Optional<int>();
wants.max_pixel_count_step_up = rtc::Optional<int>(
send_stream->GetLastWidth() * send_stream->GetLastHeight());
int current_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight();
// Cap the max to 4x the pixel count (assuming max 1/2 x 1/2 scale downs)
// of the current stream, so we don't take too large steps.
wants.max_pixel_count = rtc::Optional<int>(current_pixel_count * 4);
// Default step down is 3/5 pixel count, so go up by 5/3.
wants.target_pixel_count = rtc::Optional<int>((current_pixel_count * 5) / 3);
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
EXPECT_EQ(5, send_stream->GetNumberOfSwappedFrames());
EXPECT_EQ(1284 * 3 / 4, send_stream->GetLastWidth());
EXPECT_EQ(724 * 3 / 4, send_stream->GetLastHeight());
// Trigger underuse which should go back up in resolution.
wants.max_pixel_count = rtc::Optional<int>();
wants.max_pixel_count_step_up = rtc::Optional<int>(
send_stream->GetLastWidth() * send_stream->GetLastHeight());
// Trigger underuse again, should go back up to full resolution.
current_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight();
wants.max_pixel_count = rtc::Optional<int>(current_pixel_count * 4);
wants.target_pixel_count = rtc::Optional<int>((current_pixel_count * 5) / 3);
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureCustomFrame(1284, 724, cricket::FOURCC_I420));
EXPECT_EQ(6, send_stream->GetNumberOfSwappedFrames());
@ -2272,9 +2277,10 @@ void WebRtcVideoChannel2Test::TestCpuAdaptation(bool enable_overuse,
EXPECT_LT(send_stream->GetLastHeight(), capture_format.height);
// Trigger underuse which should go back to normal resolution.
wants.max_pixel_count = rtc::Optional<int>();
wants.max_pixel_count_step_up = rtc::Optional<int>(
send_stream->GetLastWidth() * send_stream->GetLastHeight());
int last_pixel_count =
send_stream->GetLastWidth() * send_stream->GetLastHeight();
wants.max_pixel_count = rtc::Optional<int>(last_pixel_count * 4);
wants.target_pixel_count = rtc::Optional<int>((last_pixel_count * 5) / 3);
send_stream->InjectVideoSinkWants(wants);
EXPECT_TRUE(capturer.CaptureFrame());

View File

@ -204,7 +204,7 @@ class ViEEncoder::VideoSourceProxy {
if (pixels_wanted < kMinPixelsPerFrame)
return;
sink_wants_.max_pixel_count = rtc::Optional<int>(pixels_wanted);
sink_wants_.max_pixel_count_step_up = rtc::Optional<int>();
sink_wants_.target_pixel_count = rtc::Optional<int>();
if (source_)
source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
}
@ -219,9 +219,10 @@ class ViEEncoder::VideoSourceProxy {
}
// The input video frame size will have a resolution with "one step up"
// pixels than |max_pixel_count_step_up| where "one step up" depends on
// how the source can scale the input frame size.
sink_wants_.max_pixel_count = rtc::Optional<int>();
sink_wants_.max_pixel_count_step_up = rtc::Optional<int>(pixel_count);
// how the source can scale the input frame size. We still cap the step up
// to be at most twice the number of pixels.
sink_wants_.target_pixel_count = rtc::Optional<int>((pixel_count * 5) / 3);
sink_wants_.max_pixel_count = rtc::Optional<int>(pixel_count * 4);
if (source_)
source_->AddOrUpdateSink(vie_encoder_, sink_wants_);
}
@ -651,7 +652,7 @@ EncodedImageCallback::Result ViEEncoder::OnEncodedImage(
encoder_queue_.PostTask([this, timestamp, time_sent_us, qp] {
RTC_DCHECK_RUN_ON(&encoder_queue_);
overuse_detector_.FrameSent(timestamp, time_sent_us);
if (quality_scaler_)
if (quality_scaler_ && qp >= 0)
quality_scaler_->ReportQP(qp);
});
@ -740,14 +741,21 @@ void ViEEncoder::OnBitrateUpdated(uint32_t bitrate_bps,
void ViEEncoder::AdaptDown(AdaptReason reason) {
RTC_DCHECK_RUN_ON(&encoder_queue_);
if (degradation_preference_ != DegradationPreference::kBalanced)
if (degradation_preference_ != DegradationPreference::kBalanced ||
!last_frame_info_) {
return;
// Request lower resolution if the current resolution is lower than last time
// we asked for the resolution to be lowered.
int current_pixel_count =
last_frame_info_ ? last_frame_info_->pixel_count() : 0;
if (max_pixel_count_ && current_pixel_count >= *max_pixel_count_)
}
int current_pixel_count = last_frame_info_->pixel_count();
if (last_adaptation_request_ &&
last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptDown &&
current_pixel_count >= last_adaptation_request_->input_pixel_count_) {
// Don't request lower resolution if the current resolution is not lower
// than the last time we asked for the resolution to be lowered.
return;
}
last_adaptation_request_.emplace(AdaptationRequest{
current_pixel_count, AdaptationRequest::Mode::kAdaptDown});
switch (reason) {
case kQuality:
stats_proxy_->OnQualityRestrictedResolutionChanged(
@ -760,8 +768,6 @@ void ViEEncoder::AdaptDown(AdaptReason reason) {
stats_proxy_->OnCpuRestrictedResolutionChanged(true);
break;
}
max_pixel_count_ = rtc::Optional<int>(current_pixel_count);
max_pixel_count_step_up_ = rtc::Optional<int>();
++scale_counter_[reason];
source_proxy_->RequestResolutionLowerThan(current_pixel_count);
LOG(LS_INFO) << "Scaling down resolution.";
@ -774,15 +780,23 @@ void ViEEncoder::AdaptDown(AdaptReason reason) {
void ViEEncoder::AdaptUp(AdaptReason reason) {
RTC_DCHECK_RUN_ON(&encoder_queue_);
if (scale_counter_[reason] == 0 ||
degradation_preference_ != DegradationPreference::kBalanced) {
degradation_preference_ != DegradationPreference::kBalanced ||
!last_frame_info_) {
return;
}
// Only scale if resolution is higher than last time
// we requested higher resolution.
int current_pixel_count =
last_frame_info_ ? last_frame_info_->pixel_count() : 0;
if (current_pixel_count <= max_pixel_count_step_up_.value_or(0))
// Only scale if resolution is higher than last time we requested higher
// resolution.
int current_pixel_count = last_frame_info_->pixel_count();
if (last_adaptation_request_ &&
last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptUp &&
current_pixel_count <= last_adaptation_request_->input_pixel_count_) {
// Don't request higher resolution if the current resolution is not higher
// than the last time we asked for the resolution to be higher.
return;
}
last_adaptation_request_.emplace(AdaptationRequest{
current_pixel_count, AdaptationRequest::Mode::kAdaptUp});
switch (reason) {
case kQuality:
stats_proxy_->OnQualityRestrictedResolutionChanged(
@ -794,8 +808,6 @@ void ViEEncoder::AdaptUp(AdaptReason reason) {
1);
break;
}
max_pixel_count_ = rtc::Optional<int>();
max_pixel_count_step_up_ = rtc::Optional<int>(current_pixel_count);
--scale_counter_[reason];
source_proxy_->RequestHigherResolutionThan(current_pixel_count);
LOG(LS_INFO) << "Scaling up resolution.";

View File

@ -222,10 +222,16 @@ class ViEEncoder : public rtc::VideoSinkInterface<VideoFrame>,
VideoSendStream::DegradationPreference degradation_preference_
ACCESS_ON(&encoder_queue_);
// Pixel count last time the resolution was requested to be changed down.
rtc::Optional<int> max_pixel_count_ ACCESS_ON(&encoder_queue_);
// Pixel count last time the resolution was requested to be changed up.
rtc::Optional<int> max_pixel_count_step_up_ ACCESS_ON(&encoder_queue_);
struct AdaptationRequest {
// The pixel count produced by the source at the time of the adaptation.
int input_pixel_count_;
// Indicates if request was to adapt up or down.
enum class Mode { kAdaptUp, kAdaptDown } mode_;
};
// Stores a snapshot of the last adaptation request triggered by an AdaptUp
// or AdaptDown signal.
rtc::Optional<AdaptationRequest> last_adaptation_request_
ACCESS_ON(&encoder_queue_);
rtc::RaceChecker incoming_frame_race_checker_
GUARDED_BY(incoming_frame_race_checker_);

View File

@ -151,8 +151,8 @@ class AdaptingFrameForwarder : public test::FrameForwarder {
void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
const rtc::VideoSinkWants& wants) override {
rtc::CritScope cs(&crit_);
adapter_.OnResolutionRequest(wants.max_pixel_count,
wants.max_pixel_count_step_up);
adapter_.OnResolutionRequest(wants.target_pixel_count,
wants.max_pixel_count);
test::FrameForwarder::AddOrUpdateSink(sink, wants);
}
@ -616,8 +616,8 @@ TEST_F(ViEEncoderTest, SinkWantsRotationApplied) {
TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
int frame_width = 1280;
int frame_height = 720;
@ -631,10 +631,10 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
vie_encoder_->TriggerCpuOveruse();
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or(
std::numeric_limits<int>::max()),
frame_width * frame_height);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
frame_width /= 2;
frame_height /= 2;
@ -647,16 +647,17 @@ TEST_F(ViEEncoderTest, SinkWantsFromOveruseDetector) {
ViEEncoder::kMaxCpuDowngrades + 1, frame_width, frame_height));
sink_.WaitForEncodedFrame(ViEEncoder::kMaxCpuDowngrades + 1);
vie_encoder_->TriggerCpuOveruse();
EXPECT_EQ(video_source_.sink_wants().target_pixel_count,
current_wants.target_pixel_count);
EXPECT_EQ(video_source_.sink_wants().max_pixel_count,
current_wants.max_pixel_count);
EXPECT_EQ(video_source_.sink_wants().max_pixel_count_step_up,
current_wants.max_pixel_count_step_up);
// Trigger CPU normal use.
vie_encoder_->TriggerCpuNormalUsage();
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
EXPECT_EQ(video_source_.sink_wants().max_pixel_count_step_up.value_or(0),
frame_width * frame_height);
EXPECT_EQ(frame_width * frame_height * 5 / 3,
video_source_.sink_wants().target_pixel_count.value_or(0));
EXPECT_EQ(frame_width * frame_height * 4,
video_source_.sink_wants().max_pixel_count.value_or(0));
vie_encoder_->Stop();
}
@ -665,8 +666,8 @@ TEST_F(ViEEncoderTest,
ResolutionSinkWantsResetOnSetSourceWithDisabledResolutionScaling) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
int frame_width = 1280;
int frame_height = 720;
@ -680,10 +681,10 @@ TEST_F(ViEEncoderTest,
video_source_.IncomingCapturedFrame(
CreateFrame(2, frame_width, frame_height));
sink_.WaitForEncodedFrame(2);
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_LT(video_source_.sink_wants().max_pixel_count.value_or(
std::numeric_limits<int>::max()),
frame_width * frame_height);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
// Set new source.
test::FrameForwarder new_video_source;
@ -691,14 +692,14 @@ TEST_F(ViEEncoderTest,
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainResolution);
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count_step_up);
new_video_source.IncomingCapturedFrame(
CreateFrame(3, frame_width, frame_height));
sink_.WaitForEncodedFrame(3);
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count);
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count_step_up);
// Calling SetSource with resolution scaling enabled apply the old SinkWants.
vie_encoder_->SetSource(&new_video_source,
@ -706,7 +707,7 @@ TEST_F(ViEEncoderTest,
EXPECT_LT(new_video_source.sink_wants().max_pixel_count.value_or(
std::numeric_limits<int>::max()),
frame_width * frame_height);
EXPECT_FALSE(new_video_source.sink_wants().max_pixel_count_step_up);
EXPECT_FALSE(new_video_source.sink_wants().target_pixel_count);
vie_encoder_->Stop();
}
@ -880,16 +881,27 @@ TEST_F(ViEEncoderTest, SwitchingSourceKeepsQualityAdaptation) {
TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Trigger CPU overuse.
vie_encoder_->TriggerCpuOveruse();
int frame_width = 1280;
int frame_height = 720;
int sequence = 1;
// Trigger CPU overuse, won't bite before first frame.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(
CreateFrame(1, frame_width, frame_height));
sink_.WaitForEncodedFrame(1);
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
VideoSendStream::Stats stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(0, stats.number_of_cpu_adapt_changes);
// Trigger CPU overuse again, should now adapt down.
vie_encoder_->TriggerCpuOveruse();
video_source_.IncomingCapturedFrame(
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
@ -899,8 +911,8 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
VideoSendStream::DegradationPreference::kBalanced);
new_video_source.IncomingCapturedFrame(
CreateFrame(2, frame_width, frame_height));
sink_.WaitForEncodedFrame(2);
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
@ -910,8 +922,8 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
&new_video_source,
VideoSendStream::DegradationPreference::kMaintainResolution);
new_video_source.IncomingCapturedFrame(
CreateFrame(3, frame_width, frame_height));
sink_.WaitForEncodedFrame(3);
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
@ -920,8 +932,8 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
vie_encoder_->SetSource(&video_source_,
VideoSendStream::DegradationPreference::kBalanced);
video_source_.IncomingCapturedFrame(
CreateFrame(4, frame_width, frame_height));
sink_.WaitForEncodedFrame(4);
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_TRUE(stats.cpu_limited_resolution);
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
@ -929,8 +941,8 @@ TEST_F(ViEEncoderTest, StatsTracksAdaptationStatsWhenSwitchingSource) {
// Trigger CPU normal usage.
vie_encoder_->TriggerCpuNormalUsage();
video_source_.IncomingCapturedFrame(
CreateFrame(5, frame_width, frame_height));
sink_.WaitForEncodedFrame(5);
CreateFrame(sequence, frame_width, frame_height));
sink_.WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
EXPECT_FALSE(stats.cpu_limited_resolution);
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
@ -957,8 +969,8 @@ TEST_F(ViEEncoderTest, ScalingUpAndDownDoesNothingWithMaintainResolution) {
vie_encoder_->OnBitrateUpdated(kTargetBitrateBps, 0, 0);
// Expect no scaling to begin with
EXPECT_FALSE(video_source_.sink_wants().target_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count);
EXPECT_FALSE(video_source_.sink_wants().max_pixel_count_step_up);
video_source_.IncomingCapturedFrame(
CreateFrame(1, frame_width, frame_height));