[Adaptation] Add OnAdaptationApplied(), remove ResourceListenerResponse.

This CL is part of the Call-Level Adaptation Processing design doc:
https://docs.google.com/document/d/1ZyC26yOCknrrcYa839ZWLxD6o6Gig5A3lVTh4E41074/edit?usp=sharing

The ResourceListenerResponse was used to make the QualityScaler
not clear QP samples and instead increase its frequency of checking for
QP under certain circumstances, see enum description:
https://webrtc.googlesource.com/src.git/+/c70b1028d47c1aee4892545190cd66e97d09cd55/call/adaptation/resource.h#33

Because the QualityScaler depends on whether and how adaptation
happened it should listen to adaptation happening.

This CL moves the logic that was previously in VideoStreamAdapter closer
to the QualityScaler: QualityScalerResource::OnAdaptationApplied().

This would allow the VideoStreamAdapter to operate on a separate task
queue in the future, with no dependencies on any stream-specific
resources that might operate on other task queues.

Bug: webrtc:11172, webrtc:11521
Change-Id: I07971a8a5fab5715f4ccb7d2c63f1b92bd47170f
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173090
Commit-Queue: Henrik Boström <hbos@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Reviewed-by: Evan Shrubsole <eshr@google.com>
Cr-Commit-Position: refs/heads/master@{#31143}
This commit is contained in:
Henrik Boström
2020-04-28 12:24:33 +02:00
committed by Commit Bot
parent 4381af48b4
commit 91aa73255e
18 changed files with 269 additions and 184 deletions

View File

@ -44,12 +44,17 @@ bool Resource::IsAdaptationUpAllowed(
return true;
}
ResourceListenerResponse Resource::OnResourceUsageStateMeasured(
ResourceUsageState usage_state) {
void Resource::OnAdaptationApplied(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
const Resource& reason_resource) {}
void Resource::OnResourceUsageStateMeasured(ResourceUsageState usage_state) {
usage_state_ = usage_state;
if (!listener_)
return ResourceListenerResponse::kNothing;
return listener_->OnResourceUsageStateMeasured(*this);
return;
listener_->OnResourceUsageStateMeasured(*this);
}
} // namespace webrtc

View File

@ -29,41 +29,13 @@ enum class ResourceUsageState {
kUnderuse,
};
enum class ResourceListenerResponse {
kNothing,
// This response is only applicable to QualityScaler-based resources.
// It tells the QualityScaler to increase its QP measurement frequency.
//
// This is modelled after AdaptationObserverInterface::AdaptDown()'s return
// value. The method comment says "Returns false if a downgrade was requested
// but the request did not result in a new limiting resolution or fps."
// However the actual implementation seems to be: Return false if
// !has_input_video_ or if we use balanced degradation preference and we DID
// adapt frame rate but the difference between input frame rate and balanced
// settings' min fps is less than the balanced settings' min fps diff - in all
// other cases, return true whether or not adaptation happened.
//
// For QualityScaler-based resources, kQualityScalerShouldIncreaseFrequency
// maps to "return false" and kNothing maps to "return true".
//
// TODO(https://crbug.com/webrtc/11222): Remove this enum. Resource
// measurements and adaptation decisions need to be separated in order to
// support injectable adaptation modules, multi-stream aware adaptation and
// decision-making logic based on multiple resources.
kQualityScalerShouldIncreaseFrequency,
};
class ResourceListener {
public:
virtual ~ResourceListener();
// Informs the listener of a new measurement of resource usage. This means
// that |resource.usage_state()| is now up-to-date.
//
// The listener may influence the resource that signaled the measurement
// according to the returned ResourceListenerResponse enum.
virtual ResourceListenerResponse OnResourceUsageStateMeasured(
const Resource& resource) = 0;
virtual void OnResourceUsageStateMeasured(const Resource& resource) = 0;
};
class Resource {
@ -85,15 +57,17 @@ class Resource {
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
const Resource& reason_resource) const;
virtual void OnAdaptationApplied(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
const Resource& reason_resource);
virtual std::string name() const = 0;
protected:
// Updates the usage state and informs all registered listeners.
// Returns the result of the last listener's OnResourceUsageStateMeasured()
// call that was not kNothing, else kNothing.
ResourceListenerResponse OnResourceUsageStateMeasured(
ResourceUsageState usage_state);
void OnResourceUsageStateMeasured(ResourceUsageState usage_state);
private:
absl::optional<ResourceUsageState> usage_state_;

View File

@ -26,7 +26,8 @@ ResourceAdaptationProcessor::ResourceAdaptationProcessor(
effective_degradation_preference_(DegradationPreference::DISABLED),
is_screenshare_(false),
stream_adapter_(std::make_unique<VideoStreamAdapter>()),
last_reported_source_restrictions_() {}
last_reported_source_restrictions_(),
processing_in_progress_(false) {}
ResourceAdaptationProcessor::~ResourceAdaptationProcessor() {}
@ -103,16 +104,16 @@ void ResourceAdaptationProcessor::MaybeUpdateVideoSourceRestrictions(
}
}
ResourceListenerResponse
ResourceAdaptationProcessor::OnResourceUsageStateMeasured(
void ResourceAdaptationProcessor::OnResourceUsageStateMeasured(
const Resource& resource) {
RTC_DCHECK(resource.usage_state().has_value());
switch (resource.usage_state().value()) {
case ResourceUsageState::kOveruse:
return OnResourceOveruse(resource);
OnResourceOveruse(resource);
break;
case ResourceUsageState::kUnderuse:
OnResourceUnderuse(resource);
return ResourceListenerResponse::kNothing;
break;
}
}
@ -126,6 +127,8 @@ bool ResourceAdaptationProcessor::HasSufficientInputForAdaptation(
void ResourceAdaptationProcessor::OnResourceUnderuse(
const Resource& reason_resource) {
RTC_DCHECK(!processing_in_progress_);
processing_in_progress_ = true;
// Clear all usage states. In order to re-run adaptation logic, resources need
// to provide new resource usage measurements.
// TODO(hbos): Support not unconditionally clearing usage states by having the
@ -136,14 +139,17 @@ void ResourceAdaptationProcessor::OnResourceUnderuse(
VideoStreamInputState input_state = input_state_provider_->InputState();
if (effective_degradation_preference_ == DegradationPreference::DISABLED ||
!HasSufficientInputForAdaptation(input_state)) {
processing_in_progress_ = false;
return;
}
// Update video input states and encoder settings for accurate adaptation.
stream_adapter_->SetInput(input_state);
// How can this stream be adapted up?
Adaptation adaptation = stream_adapter_->GetAdaptationUp();
if (adaptation.status() != Adaptation::Status::kValid)
if (adaptation.status() != Adaptation::Status::kValid) {
processing_in_progress_ = false;
return;
}
// Are all resources OK with this adaptation being applied?
VideoSourceRestrictions restrictions_before =
stream_adapter_->source_restrictions();
@ -156,17 +162,25 @@ void ResourceAdaptationProcessor::OnResourceUnderuse(
restrictions_after,
reason_resource);
})) {
processing_in_progress_ = false;
return;
}
// Apply adaptation.
stream_adapter_->ApplyAdaptation(adaptation);
for (Resource* resource : resources_) {
resource->OnAdaptationApplied(input_state, restrictions_before,
restrictions_after, reason_resource);
}
// Update VideoSourceRestrictions based on adaptation. This also informs the
// |adaptation_listeners_|.
MaybeUpdateVideoSourceRestrictions(&reason_resource);
processing_in_progress_ = false;
}
ResourceListenerResponse ResourceAdaptationProcessor::OnResourceOveruse(
void ResourceAdaptationProcessor::OnResourceOveruse(
const Resource& reason_resource) {
RTC_DCHECK(!processing_in_progress_);
processing_in_progress_ = true;
// Clear all usage states. In order to re-run adaptation logic, resources need
// to provide new resource usage measurements.
// TODO(hbos): Support not unconditionally clearing usage states by having the
@ -176,11 +190,13 @@ ResourceListenerResponse ResourceAdaptationProcessor::OnResourceOveruse(
}
VideoStreamInputState input_state = input_state_provider_->InputState();
if (!input_state.has_input()) {
return ResourceListenerResponse::kQualityScalerShouldIncreaseFrequency;
processing_in_progress_ = false;
return;
}
if (effective_degradation_preference_ == DegradationPreference::DISABLED ||
!HasSufficientInputForAdaptation(input_state)) {
return ResourceListenerResponse::kNothing;
processing_in_progress_ = false;
return;
}
// Update video input states and encoder settings for accurate adaptation.
stream_adapter_->SetInput(input_state);
@ -188,15 +204,24 @@ ResourceListenerResponse ResourceAdaptationProcessor::OnResourceOveruse(
Adaptation adaptation = stream_adapter_->GetAdaptationDown();
if (adaptation.min_pixel_limit_reached())
encoder_stats_observer_->OnMinPixelLimitReached();
if (adaptation.status() != Adaptation::Status::kValid)
return ResourceListenerResponse::kNothing;
if (adaptation.status() != Adaptation::Status::kValid) {
processing_in_progress_ = false;
return;
}
// Apply adaptation.
ResourceListenerResponse response =
stream_adapter_->ApplyAdaptation(adaptation);
VideoSourceRestrictions restrictions_before =
stream_adapter_->source_restrictions();
VideoSourceRestrictions restrictions_after =
stream_adapter_->PeekNextRestrictions(adaptation);
stream_adapter_->ApplyAdaptation(adaptation);
for (Resource* resource : resources_) {
resource->OnAdaptationApplied(input_state, restrictions_before,
restrictions_after, reason_resource);
}
// Update VideoSourceRestrictions based on adaptation. This also informs the
// |adaptation_listeners_|.
MaybeUpdateVideoSourceRestrictions(&reason_resource);
return response;
processing_in_progress_ = false;
}
void ResourceAdaptationProcessor::TriggerAdaptationDueToFrameDroppedDueToSize(

View File

@ -52,8 +52,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
// ResourceListener implementation.
// Triggers OnResourceUnderuse() or OnResourceOveruse().
ResourceListenerResponse OnResourceUsageStateMeasured(
const Resource& resource) override;
void OnResourceUsageStateMeasured(const Resource& resource) override;
// May trigger 1-2 adaptations. It is meant to reduce resolution - useful if a
// frame was dropped due to its size - but if you look at the implementation
@ -72,7 +71,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
// informing listeners of the new VideoSourceRestriction and adaptation
// counters.
void OnResourceUnderuse(const Resource& reason_resource);
ResourceListenerResponse OnResourceOveruse(const Resource& reason_resource);
void OnResourceOveruse(const Resource& reason_resource);
// Needs to be invoked any time |degradation_preference_| or |is_screenshare_|
// changes to ensure |effective_degradation_preference_| is up-to-date.
@ -93,6 +92,18 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface,
// Responsible for generating and applying possible adaptations.
const std::unique_ptr<VideoStreamAdapter> stream_adapter_;
VideoSourceRestrictions last_reported_source_restrictions_;
// Prevents recursion.
//
// This is used to prevent triggering resource adaptation in the process of
// already handling resouce adaptation, since that could cause the same states
// to be modified in unexpected ways. Example:
//
// Resource::OnResourceUsageStateMeasured() ->
// ResourceAdaptationProcessor::OnResourceOveruse() ->
// Resource::OnAdaptationApplied() ->
// Resource::OnResourceUsageStateMeasured() ->
// ResourceAdaptationProcessor::OnResourceOveruse() // Boom, not allowed.
bool processing_in_progress_;
};
} // namespace webrtc

View File

@ -21,9 +21,7 @@ using ::testing::StrictMock;
class MockResourceListener : public ResourceListener {
public:
MOCK_METHOD(ResourceListenerResponse,
OnResourceUsageStateMeasured,
(const Resource& resource));
MOCK_METHOD(void, OnResourceUsageStateMeasured, (const Resource& resource));
};
TEST(ResourceTest, RegisteringListenerReceivesCallbacks) {
@ -34,7 +32,6 @@ TEST(ResourceTest, RegisteringListenerReceivesCallbacks) {
.Times(1)
.WillOnce([](const Resource& resource) {
EXPECT_EQ(ResourceUsageState::kOveruse, resource.usage_state());
return ResourceListenerResponse::kNothing;
});
fake_resource.set_usage_state(ResourceUsageState::kOveruse);
fake_resource.SetResourceListener(nullptr);

View File

@ -20,7 +20,7 @@ FakeResource::FakeResource(std::string name)
FakeResource::~FakeResource() {}
void FakeResource::set_usage_state(ResourceUsageState usage_state) {
last_response_ = OnResourceUsageStateMeasured(usage_state);
OnResourceUsageStateMeasured(usage_state);
}
} // namespace webrtc

View File

@ -25,14 +25,9 @@ class FakeResource : public Resource {
void set_usage_state(ResourceUsageState usage_state);
absl::optional<ResourceListenerResponse> last_response() const {
return last_response_;
}
std::string name() const override { return name_; }
private:
absl::optional<ResourceListenerResponse> last_response_;
const std::string name_;
};

View File

@ -67,13 +67,22 @@ void VideoSourceRestrictions::set_max_frame_rate(
bool DidIncreaseResolution(VideoSourceRestrictions restrictions_before,
VideoSourceRestrictions restrictions_after) {
if (!restrictions_before.max_pixels_per_frame().has_value()) {
if (!restrictions_before.max_pixels_per_frame().has_value())
return false;
}
if (!restrictions_after.max_pixels_per_frame().has_value())
return true;
return restrictions_after.max_pixels_per_frame().value() >
restrictions_before.max_pixels_per_frame().value();
}
bool DidDecreaseFrameRate(VideoSourceRestrictions restrictions_before,
VideoSourceRestrictions restrictions_after) {
if (!restrictions_after.max_frame_rate().has_value())
return false;
if (!restrictions_before.max_frame_rate().has_value())
return true;
return restrictions_after.max_frame_rate().value() <
restrictions_before.max_frame_rate().value();
}
} // namespace webrtc

View File

@ -68,6 +68,9 @@ class VideoSourceRestrictions {
bool DidIncreaseResolution(VideoSourceRestrictions restrictions_before,
VideoSourceRestrictions restrictions_after);
bool DidDecreaseFrameRate(VideoSourceRestrictions restrictions_before,
VideoSourceRestrictions restrictions_after);
} // namespace webrtc
#endif // CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_

View File

@ -534,12 +534,10 @@ VideoSourceRestrictions VideoStreamAdapter::PeekNextRestrictions(
return restrictor_copy.source_restrictions();
}
ResourceListenerResponse VideoStreamAdapter::ApplyAdaptation(
const Adaptation& adaptation) {
void VideoStreamAdapter::ApplyAdaptation(const Adaptation& adaptation) {
RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_);
if (adaptation.status() != Adaptation::Status::kValid) {
return ResourceListenerResponse::kNothing;
}
if (adaptation.status() != Adaptation::Status::kValid)
return;
// Remember the input pixels and fps of this adaptation. Used to avoid
// adapting again before this adaptation has had an effect.
last_adaptation_request_.emplace(AdaptationRequest{
@ -549,25 +547,6 @@ ResourceListenerResponse VideoStreamAdapter::ApplyAdaptation(
// Adapt!
source_restrictor_->ApplyAdaptationStep(adaptation.step(),
degradation_preference_);
// In BALANCED, if requested FPS is higher or close to input FPS to the target
// we tell the QualityScaler to increase its frequency.
// TODO(hbos): Don't have QualityScaler-specific logic here. If the
// QualityScaler wants to add special logic depending on what effects
// adaptation had, it should listen to changes to the VideoSourceRestrictions
// instead.
if (degradation_preference_ == DegradationPreference::BALANCED &&
adaptation.step().type == Adaptation::StepType::kDecreaseFrameRate) {
absl::optional<int> min_diff =
balanced_settings_.MinFpsDiff(input_state_.frame_size_pixels().value());
if (min_diff && input_state_.frames_per_second().value() > 0) {
int fps_diff =
input_state_.frames_per_second().value() - adaptation.step().target;
if (fps_diff < min_diff.value()) {
return ResourceListenerResponse::kQualityScalerShouldIncreaseFrequency;
}
}
}
return ResourceListenerResponse::kNothing;
}
} // namespace webrtc

View File

@ -138,8 +138,7 @@ class VideoStreamAdapter {
VideoSourceRestrictions PeekNextRestrictions(
const Adaptation& adaptation) const;
// Updates source_restrictions() based according to the Adaptation.
// TODO(hbos): Delete ResourceListenerResponse!
ResourceListenerResponse ApplyAdaptation(const Adaptation& adaptation);
void ApplyAdaptation(const Adaptation& adaptation);
private:
class VideoSourceRestrictor;

View File

@ -12,9 +12,15 @@
#include <utility>
#include "call/adaptation/resource_adaptation_processor.h"
namespace webrtc {
QualityScalerResource::QualityScalerResource() : quality_scaler_(nullptr) {}
QualityScalerResource::QualityScalerResource(
ResourceAdaptationProcessor* adaptation_processor)
: adaptation_processor_(adaptation_processor),
quality_scaler_(nullptr),
pending_qp_usage_callback_(nullptr) {}
bool QualityScalerResource::is_started() const {
return quality_scaler_.get();
@ -64,16 +70,62 @@ void QualityScalerResource::OnFrameDropped(
void QualityScalerResource::OnReportQpUsageHigh(
rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface> callback) {
bool clear_qp_samples =
OnResourceUsageStateMeasured(ResourceUsageState::kOveruse) !=
ResourceListenerResponse::kQualityScalerShouldIncreaseFrequency;
callback->OnQpUsageHandled(clear_qp_samples);
RTC_DCHECK(!pending_qp_usage_callback_);
pending_qp_usage_callback_ = std::move(callback);
// If this triggers adaptation, OnAdaptationApplied() is called by the
// processor where we determine if QP should be cleared and we invoke and null
// the |pending_qp_usage_callback_|.
OnResourceUsageStateMeasured(ResourceUsageState::kOveruse);
// If |pending_qp_usage_callback_| has not been nulled yet then we did not
// just trigger an adaptation and should not clear the QP samples.
if (pending_qp_usage_callback_) {
pending_qp_usage_callback_->OnQpUsageHandled(false);
pending_qp_usage_callback_ = nullptr;
}
}
void QualityScalerResource::OnReportQpUsageLow(
rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface> callback) {
RTC_DCHECK(!pending_qp_usage_callback_);
OnResourceUsageStateMeasured(ResourceUsageState::kUnderuse);
callback->OnQpUsageHandled(true);
}
void QualityScalerResource::OnAdaptationApplied(
const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
const Resource& reason_resource) {
// We only clear QP samples on adaptations triggered by the QualityScaler.
if (!pending_qp_usage_callback_)
return;
bool clear_qp_samples = true;
// If we're in "balanced" and the frame rate before and after adaptation did
// not differ that much, don't clear the QP samples and instead check for QP
// again in a short amount of time. This may trigger adapting down again soon.
// TODO(hbos): Can this be simplified by getting rid of special casing logic?
// For example, we could decide whether or not to clear QP samples based on
// how big the adaptation step was alone (regardless of degradation preference
// or what resource triggered the adaptation) and the QualityScaler could
// check for QP when it had enough QP samples rather than at a variable
// interval whose delay is calculated based on events such as these. Now there
// is much dependency on a specific OnReportQpUsageHigh() event and "balanced"
// but adaptations happening might not align with QualityScaler's CheckQpTask.
if (adaptation_processor_->effective_degradation_preference() ==
DegradationPreference::BALANCED &&
DidDecreaseFrameRate(restrictions_before, restrictions_after)) {
absl::optional<int> min_diff = BalancedDegradationSettings().MinFpsDiff(
input_state.frame_size_pixels().value());
if (min_diff && input_state.frames_per_second().value() > 0) {
int fps_diff = input_state.frames_per_second().value() -
restrictions_after.max_frame_rate().value();
if (fps_diff < min_diff.value()) {
clear_qp_samples = false;
}
}
}
pending_qp_usage_callback_->OnQpUsageHandled(clear_qp_samples);
pending_qp_usage_callback_ = nullptr;
}
} // namespace webrtc

View File

@ -21,6 +21,8 @@
namespace webrtc {
class ResourceAdaptationProcessor;
// Handles interaction with the QualityScaler.
// TODO(hbos): Add unittests specific to this class, it is currently only tested
// indirectly by usage in the ResourceAdaptationProcessor (which is only tested
@ -29,7 +31,8 @@ namespace webrtc {
class QualityScalerResource : public Resource,
public QualityScalerQpUsageHandlerInterface {
public:
QualityScalerResource();
explicit QualityScalerResource(
ResourceAdaptationProcessor* adaptation_processor);
bool is_started() const;
@ -52,8 +55,17 @@ class QualityScalerResource : public Resource,
std::string name() const override { return "QualityScalerResource"; }
// Resource implementation.
void OnAdaptationApplied(const VideoStreamInputState& input_state,
const VideoSourceRestrictions& restrictions_before,
const VideoSourceRestrictions& restrictions_after,
const Resource& reason_resource) override;
private:
ResourceAdaptationProcessor* const adaptation_processor_;
std::unique_ptr<QualityScaler> quality_scaler_;
rtc::scoped_refptr<QualityScalerQpUsageHandlerCallbackInterface>
pending_qp_usage_callback_;
};
} // namespace webrtc

View File

@ -249,7 +249,7 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager(
prevent_increase_resolution_due_to_bitrate_resource_(this),
prevent_adapt_up_in_balanced_resource_(this),
encode_usage_resource_(std::move(overuse_detector)),
quality_scaler_resource_(),
quality_scaler_resource_(adaptation_processor),
input_state_provider_(input_state_provider),
adaptation_processor_(adaptation_processor),
encoder_stats_observer_(encoder_stats_observer),
@ -322,6 +322,11 @@ std::vector<Resource*> VideoStreamEncoderResourceManager::MappedResources()
return resources;
}
QualityScalerResource*
VideoStreamEncoderResourceManager::quality_scaler_resource_for_testing() {
return &quality_scaler_resource_;
}
void VideoStreamEncoderResourceManager::SetEncoderSettings(
EncoderSettings encoder_settings) {
encoder_settings_ = std::move(encoder_settings);

View File

@ -106,6 +106,7 @@ class VideoStreamEncoderResourceManager
// TODO(hbos): Can we get rid of this?
void MapResourceToReason(Resource* resource, VideoAdaptationReason reason);
std::vector<Resource*> MappedResources() const;
QualityScalerResource* quality_scaler_resource_for_testing();
// If true, the VideoStreamEncoder should eexecute its logic to maybe drop
// frames baseed on size and bitrate.
bool DropInitialFrames() const;
@ -134,11 +135,6 @@ class VideoStreamEncoderResourceManager
VideoAdaptationReason GetReasonFromResource(const Resource& resource) const;
// Performs the adaptation by getting the next target, applying it and
// informing listeners of the new VideoSourceRestriction and adapt counters.
void OnResourceUnderuse(const Resource& reason_resource);
ResourceListenerResponse OnResourceOveruse(const Resource& reason_resource);
CpuOveruseOptions GetCpuOveruseOptions() const;
int LastInputFrameSizeOrDefault() const;

View File

@ -1950,4 +1950,10 @@ void VideoStreamEncoder::InjectAdaptationResource(
resource_adaptation_processor_.AddResource(resource);
}
QualityScalerResource*
VideoStreamEncoder::quality_scaler_resource_for_testing() {
RTC_DCHECK_RUN_ON(&encoder_queue_);
return stream_resource_manager_.quality_scaler_resource_for_testing();
}
} // namespace webrtc

View File

@ -118,6 +118,8 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
VideoAdaptationReason reason)
RTC_RUN_ON(&encoder_queue_);
QualityScalerResource* quality_scaler_resource_for_testing();
private:
class VideoFrameInfo {
public:

View File

@ -31,6 +31,7 @@
#include "common_video/include/video_frame_buffer.h"
#include "media/base/video_adapter.h"
#include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
#include "modules/video_coding/utility/quality_scaler.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "rtc_base/fake_clock.h"
#include "rtc_base/logging.h"
@ -49,7 +50,6 @@
namespace webrtc {
using ScaleReason = VideoAdaptationReason;
using ::testing::_;
using ::testing::AllOf;
using ::testing::Field;
@ -146,6 +146,25 @@ class CpuOveruseDetectorProxy : public OveruseFrameDetector {
int last_target_framerate_fps_ RTC_GUARDED_BY(lock_);
};
class FakeQualityScalerQpUsageHandlerCallback
: public QualityScalerQpUsageHandlerCallbackInterface {
public:
FakeQualityScalerQpUsageHandlerCallback()
: QualityScalerQpUsageHandlerCallbackInterface() {}
~FakeQualityScalerQpUsageHandlerCallback() override {}
void OnQpUsageHandled(bool clear_qp_samples) override {
clear_qp_samples_result_ = clear_qp_samples;
}
absl::optional<bool> clear_qp_samples_result() const {
return clear_qp_samples_result_;
}
private:
absl::optional<bool> clear_qp_samples_result_;
};
class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
public:
VideoStreamEncoderUnderTest(SendStatisticsProxy* stats_proxy,
@ -168,47 +187,6 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
VideoAdaptationReason::kCpu);
}
void PostTaskAndWait(bool down, VideoAdaptationReason reason) {
PostTaskAndWait(down, reason, /*expected_results=*/true);
}
void PostTaskAndWait(bool down,
VideoAdaptationReason reason,
bool expected_results) {
rtc::Event event;
encoder_queue()->PostTask([this, &event, reason, down, expected_results] {
ResourceUsageState usage_state =
down ? ResourceUsageState::kOveruse : ResourceUsageState::kUnderuse;
FakeResource* resource = nullptr;
switch (reason) {
case VideoAdaptationReason::kQuality:
resource = fake_quality_resource_.get();
break;
case VideoAdaptationReason::kCpu:
resource = fake_cpu_resource_.get();
break;
default:
RTC_NOTREACHED();
}
resource->set_usage_state(usage_state);
if (!expected_results) {
ASSERT_EQ(VideoAdaptationReason::kQuality, reason)
<< "We can only assert adaptation result for quality resources";
EXPECT_EQ(
ResourceListenerResponse::kQualityScalerShouldIncreaseFrequency,
resource->last_response());
} else {
EXPECT_EQ(ResourceListenerResponse::kNothing,
resource->last_response());
}
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
}
// This is used as a synchronisation mechanism, to make sure that the
// encoder queue is not blocked before we start sending it frames.
void WaitUntilTaskQueueIsIdle() {
@ -217,25 +195,56 @@ class VideoStreamEncoderUnderTest : public VideoStreamEncoder {
ASSERT_TRUE(event.Wait(5000));
}
// Triggers resource usage measurements on the fake CPU resource.
void TriggerCpuOveruse() {
PostTaskAndWait(/*down=*/true, VideoAdaptationReason::kCpu);
}
void TriggerCpuNormalUsage() {
PostTaskAndWait(/*down=*/false, VideoAdaptationReason::kCpu);
rtc::Event event;
encoder_queue()->PostTask([this, &event] {
fake_cpu_resource_->set_usage_state(ResourceUsageState::kOveruse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
}
void TriggerCpuUnderuse() {
rtc::Event event;
encoder_queue()->PostTask([this, &event] {
fake_cpu_resource_->set_usage_state(ResourceUsageState::kUnderuse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
}
// Triggers resource usage measurements on the fake quality resource.
void TriggerQualityLow() {
PostTaskAndWait(/*down=*/true, VideoAdaptationReason::kQuality);
rtc::Event event;
encoder_queue()->PostTask([this, &event] {
fake_quality_resource_->set_usage_state(ResourceUsageState::kOveruse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
}
void TriggerQualityLowExpectFalse() {
PostTaskAndWait(/*down=*/true, VideoAdaptationReason::kQuality,
/*expected_results=*/false);
}
void TriggerQualityHigh() {
PostTaskAndWait(/*down=*/false, VideoAdaptationReason::kQuality);
rtc::Event event;
encoder_queue()->PostTask([this, &event] {
fake_quality_resource_->set_usage_state(ResourceUsageState::kUnderuse);
event.Set();
});
ASSERT_TRUE(event.Wait(5000));
}
// Fakes high QP resource usage measurements on the real
// QualityScalerResource. Returns whether or not QP samples would have been
// cleared if this had been a real signal from the QualityScaler.
bool TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared() {
rtc::Event event;
rtc::scoped_refptr<FakeQualityScalerQpUsageHandlerCallback> callback =
new FakeQualityScalerQpUsageHandlerCallback();
encoder_queue()->PostTask([this, &event, callback] {
quality_scaler_resource_for_testing()->OnReportQpUsageHigh(callback);
event.Set();
});
EXPECT_TRUE(event.Wait(5000));
EXPECT_TRUE(callback->clear_qp_samples_result().has_value());
return callback->clear_qp_samples_result().value();
}
CpuOveruseDetectorProxy* overuse_detector_proxy_;
@ -1830,7 +1839,7 @@ TEST_F(VideoStreamEncoderTest, TestCpuDowngrades_BalancedMode) {
sink_.WaitForEncodedFrame(t);
t += frame_interval_ms;
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
VerifyBalancedModeFpsRange(
video_source_.sink_wants(),
*video_source_.last_sent_width() * *video_source_.last_sent_height());
@ -2024,7 +2033,7 @@ TEST_F(VideoStreamEncoderTest, StatsTracksCpuAdaptationStats) {
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
// Trigger CPU normal use.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
video_source_.IncomingCapturedFrame(CreateFrame(3, kWidth, kHeight));
WaitForEncodedFrame(3);
@ -2095,7 +2104,7 @@ TEST_F(VideoStreamEncoderTest, SwitchingSourceKeepsCpuAdaptation) {
EXPECT_EQ(1, stats.number_of_cpu_adapt_changes);
// Trigger CPU normal use.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
new_video_source.IncomingCapturedFrame(CreateFrame(6, kWidth, kHeight));
WaitForEncodedFrame(6);
stats = stats_proxy_->GetStats();
@ -2397,7 +2406,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(2, stats.number_of_cpu_adapt_changes);
// Trigger CPU normal usage.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
video_source_.IncomingCapturedFrame(CreateFrame(sequence, kWidth, kHeight));
WaitForEncodedFrame(sequence++);
stats = stats_proxy_->GetStats();
@ -2418,7 +2427,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(3, stats.number_of_cpu_adapt_changes);
// Trigger CPU normal usage.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
new_video_source.IncomingCapturedFrame(
CreateFrame(sequence, kWidth, kHeight));
WaitForEncodedFrame(sequence++);
@ -2581,7 +2590,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_resolution);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@ -2610,7 +2619,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no change.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
VerifyFpsMaxResolutionMax(source.sink_wants());
EXPECT_FALSE(stats_proxy_->GetStats().cpu_limited_framerate);
EXPECT_EQ(0, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
@ -2836,7 +2845,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
@ -2854,7 +2863,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(3, stats_proxy_->GetStats().number_of_cpu_adapt_changes);
// Trigger adapt up, expect no restriction.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
sink_.WaitForEncodedFrame(kWidth, kHeight);
@ -3062,8 +3071,11 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsFalseIfFpsDiffLtThreshold) {
VerifyFpsMaxResolutionMax(source_.sink_wants());
// Trigger adapt down, expect scaled down framerate (640x360@24fps).
// Fps diff (input-requested:0) < threshold, expect AdaptDown to return false.
video_stream_encoder_->TriggerQualityLowExpectFalse();
// Fps diff (input-requested:0) < threshold, expect adapting down not to clear
// QP samples.
EXPECT_FALSE(
video_stream_encoder_
->TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared());
VerifyFpsEqResolutionMax(source_.sink_wants(), 24);
video_stream_encoder_->Stop();
@ -3085,8 +3097,11 @@ TEST_F(BalancedDegradationTest, AdaptDownReturnsTrueIfFpsDiffGeThreshold) {
VerifyFpsMaxResolutionMax(source_.sink_wants());
// Trigger adapt down, expect scaled down framerate (640x360@24fps).
// Fps diff (input-requested:1) == threshold, expect AdaptDown to return true.
video_stream_encoder_->TriggerQualityLow();
// Fps diff (input-requested:1) == threshold, expect adapting down to clear QP
// samples.
EXPECT_TRUE(
video_stream_encoder_
->TriggerQualityScalerHighQpAndReturnIfQpSamplesShouldBeCleared());
VerifyFpsEqResolutionMax(source_.sink_wants(), 24);
video_stream_encoder_->Stop();
@ -3387,7 +3402,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger cpu adapt up, expect upscaled resolution (480x270).
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
@ -3398,7 +3413,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger cpu adapt up, expect upscaled resolution (640x360).
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
@ -3409,7 +3424,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger cpu adapt up, expect upscaled resolution (960x540).
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
@ -3421,7 +3436,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger cpu adapt up, no cpu downgrades, expect no change (960x540).
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
@ -3670,7 +3685,7 @@ TEST_F(VideoStreamEncoderTest, OveruseDetectorUpdatedOnReconfigureAndAdaption) {
stats = stats_proxy_->GetStats();
stats.input_frame_rate = adapted_framerate / 2;
stats_proxy_->SetMockStats(stats);
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
EXPECT_EQ(
video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
@ -3737,7 +3752,7 @@ TEST_F(VideoStreamEncoderTest,
stats = stats_proxy_->GetStats();
stats.input_frame_rate = adapted_framerate;
stats_proxy_->SetMockStats(stats);
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
video_stream_encoder_->WaitUntilTaskQueueIsIdle();
EXPECT_EQ(
video_stream_encoder_->overuse_detector_proxy_->GetLastTargetFramerate(),
@ -4113,7 +4128,7 @@ TEST_F(VideoStreamEncoderTest,
WaitForEncodedFrame((kFrameWidth * 3) / 4, (kFrameHeight * 3) / 4);
// Trigger CPU normal use, return to original resolution.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
video_source_.IncomingCapturedFrame(
CreateFrame(3 * kFrameIntervalMs, kFrameWidth, kFrameHeight));
WaitForEncodedFrame(kFrameWidth, kFrameHeight);
@ -4187,7 +4202,7 @@ TEST_F(VideoStreamEncoderTest,
kErrorMargin);
// Go back up one step.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
num_frames_dropped = 0;
for (int i = 0; i < max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
@ -4203,7 +4218,7 @@ TEST_F(VideoStreamEncoderTest,
kErrorMargin);
// Go back up to original mode.
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
num_frames_dropped = 0;
for (int i = 0; i < max_framerate_; ++i) {
timestamp_ms += kFrameIntervalMs;
@ -4515,7 +4530,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger cpu adapt up, expect increased fps (640x360@30fps).
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);
@ -4541,7 +4556,7 @@ TEST_F(VideoStreamEncoderTest, AdaptWithTwoReasonsAndDifferentOrder_Framerate) {
EXPECT_EQ(2, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger cpu adapt up, expect no restriction (1280x720fps@30fps).
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(kWidth, kHeight);
@ -4618,7 +4633,7 @@ TEST_F(VideoStreamEncoderTest,
EXPECT_EQ(1, stats_proxy_->GetStats().number_of_quality_adapt_changes);
// Trigger cpu adapt up, expect upscaled resolution (640x360@15fps).
video_stream_encoder_->TriggerCpuNormalUsage();
video_stream_encoder_->TriggerCpuUnderuse();
timestamp_ms += kFrameIntervalMs;
source.IncomingCapturedFrame(CreateFrame(timestamp_ms, kWidth, kHeight));
WaitForEncodedFrame(timestamp_ms);