Fix potential memory exhaustion in DefaultVideoQualityAnalyzer

DefaultVideoQualityAnalyzer accumulates in flight frames in internal
queue to perform psnr/ssim computation. This queue can grow a lot if
test experience high frame loss. As a result of this, the analyzer
can use quite a lot of memory and cause OOM crashes.

This CL limits the size of the queue based on the assumption that after
a certain point a frame can be considered lost and so it is impossible
to calculate PSNR/SSIM.

Bug: webrtc:11373
Change-Id: Iaabcc8d1c3c9142dc58ea5f2f30f599864b088e8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/168951
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#30602}
This commit is contained in:
Artem Titov
2020-02-25 10:56:26 +01:00
committed by Commit Bot
parent 2c35da4c00
commit fcf4e2cd67
4 changed files with 365 additions and 50 deletions

View File

@ -35,6 +35,7 @@ if (rtc_include_tests) {
deps = [ deps = [
":default_encoded_image_data_injector_unittest", ":default_encoded_image_data_injector_unittest",
":default_video_quality_analyzer_test",
":peer_connection_e2e_smoke_test", ":peer_connection_e2e_smoke_test",
":single_process_encoded_image_data_injector_unittest", ":single_process_encoded_image_data_injector_unittest",
] ]
@ -394,6 +395,22 @@ if (rtc_include_tests) {
"../../../rtc_base:logging", "../../../rtc_base:logging",
] ]
} }
rtc_library("default_video_quality_analyzer_test") {
testonly = true
sources = [ "analyzer/video/default_video_quality_analyzer_test.cc" ]
deps = [
":default_video_quality_analyzer",
"../..:test_support",
"../../../api:create_frame_generator",
"../../../api:rtp_packet_info",
"../../../api/video:encoded_image",
"../../../api/video:video_frame",
"../../../api/video:video_frame_i420",
"../../../modules/rtp_rtcp:rtp_rtcp_format",
"../../../system_wrappers",
]
}
} }
rtc_library("analyzer_helper") { rtc_library("analyzer_helper") {

View File

@ -64,8 +64,11 @@ double RateCounter::GetEventsPerSecond() const {
} }
DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer( DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled) bool heavy_metrics_computation_enabled,
int max_frames_in_flight_per_stream_count)
: heavy_metrics_computation_enabled_(heavy_metrics_computation_enabled), : heavy_metrics_computation_enabled_(heavy_metrics_computation_enabled),
max_frames_in_flight_per_stream_count_(
max_frames_in_flight_per_stream_count),
clock_(Clock::GetRealTimeClock()) {} clock_(Clock::GetRealTimeClock()) {}
DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() { DefaultVideoQualityAnalyzer::~DefaultVideoQualityAnalyzer() {
Stop(); Stop();
@ -120,7 +123,7 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
stream_frame_counters_[stream_label].captured++; stream_frame_counters_[stream_label].captured++;
StreamState* state = &stream_states_[stream_label]; StreamState* state = &stream_states_[stream_label];
state->frame_ids.push_back(frame_id); state->PushBack(frame_id);
// Update frames in flight info. // Update frames in flight info.
auto it = captured_frames_in_flight_.find(frame_id); auto it = captured_frames_in_flight_.find(frame_id);
if (it != captured_frames_in_flight_.end()) { if (it != captured_frames_in_flight_.end()) {
@ -130,8 +133,8 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
auto stats_it = frame_stats_.find(frame_id); auto stats_it = frame_stats_.find(frame_id);
RTC_DCHECK(stats_it != frame_stats_.end()); RTC_DCHECK(stats_it != frame_stats_.end());
RTC_DCHECK(frame_id == state->frame_ids.front()); uint16_t oldest_frame_id = state->PopFront();
state->frame_ids.pop_front(); RTC_DCHECK_EQ(frame_id, oldest_frame_id);
frame_counters_.dropped++; frame_counters_.dropped++;
stream_frame_counters_[stream_label].dropped++; stream_frame_counters_[stream_label].dropped++;
AddComparison(it->second, absl::nullopt, true, stats_it->second); AddComparison(it->second, absl::nullopt, true, stats_it->second);
@ -152,6 +155,15 @@ uint16_t DefaultVideoQualityAnalyzer::OnFrameCaptured(
it->second.erase(frame_id); it->second.erase(frame_id);
} }
stream_to_frame_id_history_[stream_label].insert(frame_id); stream_to_frame_id_history_[stream_label].insert(frame_id);
// If state has too many frames that are in flight => remove the oldest
// queued frame in order to avoid to use too much memory.
if (state->GetAliveFramesCount() > max_frames_in_flight_per_stream_count_) {
uint16_t frame_id_to_remove = state->MarkNextAliveFrameAsDead();
auto removed_count = captured_frames_in_flight_.erase(frame_id_to_remove);
RTC_DCHECK_EQ(removed_count, 1)
<< "Invalid stream state: alive frame is removed already";
}
} }
return frame_id; return frame_id;
} }
@ -247,8 +259,10 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
// Find corresponding captured frame. // Find corresponding captured frame.
auto frame_it = captured_frames_in_flight_.find(frame.id()); auto frame_it = captured_frames_in_flight_.find(frame.id());
RTC_DCHECK(frame_it != captured_frames_in_flight_.end()); absl::optional<VideoFrame> captured_frame =
const VideoFrame& captured_frame = frame_it->second; frame_it != captured_frames_in_flight_.end()
? absl::optional<VideoFrame>(frame_it->second)
: absl::nullopt;
// After we received frame here we need to check if there are any dropped // After we received frame here we need to check if there are any dropped
// frames between this one and last one, that was rendered for this video // frames between this one and last one, that was rendered for this video
@ -257,10 +271,9 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
const std::string& stream_label = frame_stats->stream_label; const std::string& stream_label = frame_stats->stream_label;
StreamState* state = &stream_states_[stream_label]; StreamState* state = &stream_states_[stream_label];
int dropped_count = 0; int dropped_count = 0;
while (!state->frame_ids.empty() && state->frame_ids.front() != frame.id()) { while (!state->Empty() && state->Front() != frame.id()) {
dropped_count++; dropped_count++;
uint16_t dropped_frame_id = state->frame_ids.front(); uint16_t dropped_frame_id = state->PopFront();
state->frame_ids.pop_front();
// Frame with id |dropped_frame_id| was dropped. We need: // Frame with id |dropped_frame_id| was dropped. We need:
// 1. Update global and stream frame counters // 1. Update global and stream frame counters
// 2. Extract corresponding frame from |captured_frames_in_flight_| // 2. Extract corresponding frame from |captured_frames_in_flight_|
@ -273,22 +286,27 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
auto dropped_frame_stats_it = frame_stats_.find(dropped_frame_id); auto dropped_frame_stats_it = frame_stats_.find(dropped_frame_id);
RTC_DCHECK(dropped_frame_stats_it != frame_stats_.end()); RTC_DCHECK(dropped_frame_stats_it != frame_stats_.end());
auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id); auto dropped_frame_it = captured_frames_in_flight_.find(dropped_frame_id);
RTC_CHECK(dropped_frame_it != captured_frames_in_flight_.end()); absl::optional<VideoFrame> dropped_frame =
dropped_frame_it != captured_frames_in_flight_.end()
? absl::optional<VideoFrame>(dropped_frame_it->second)
: absl::nullopt;
AddComparison(dropped_frame_it->second, absl::nullopt, true, AddComparison(dropped_frame, absl::nullopt, true,
dropped_frame_stats_it->second); dropped_frame_stats_it->second);
frame_stats_.erase(dropped_frame_stats_it); frame_stats_.erase(dropped_frame_stats_it);
captured_frames_in_flight_.erase(dropped_frame_it); if (dropped_frame_it != captured_frames_in_flight_.end()) {
captured_frames_in_flight_.erase(dropped_frame_it);
}
} }
RTC_DCHECK(!state->frame_ids.empty()); RTC_DCHECK(!state->Empty());
state->frame_ids.pop_front(); state->PopFront();
if (state->last_rendered_frame_time) { if (state->last_rendered_frame_time()) {
frame_stats->prev_frame_rendered_time = frame_stats->prev_frame_rendered_time =
state->last_rendered_frame_time.value(); state->last_rendered_frame_time().value();
} }
state->last_rendered_frame_time = frame_stats->rendered_time; state->set_last_rendered_frame_time(frame_stats->rendered_time);
{ {
rtc::CritScope cr(&comparison_lock_); rtc::CritScope cr(&comparison_lock_);
stream_stats_[stream_label].skipped_between_rendered.AddSample( stream_stats_[stream_label].skipped_between_rendered.AddSample(
@ -296,7 +314,9 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
} }
AddComparison(captured_frame, frame, false, *frame_stats); AddComparison(captured_frame, frame, false, *frame_stats);
captured_frames_in_flight_.erase(frame_it); if (frame_it != captured_frames_in_flight_.end()) {
captured_frames_in_flight_.erase(frame_it);
}
frame_stats_.erase(stats_it); frame_stats_.erase(stats_it);
} }
@ -343,9 +363,9 @@ void DefaultVideoQualityAnalyzer::Stop() {
// |stream_last_freeze_end_time_| for this stream will be |start_time_|. // |stream_last_freeze_end_time_| for this stream will be |start_time_|.
// If there is freeze, then we need add time from last rendered frame // If there is freeze, then we need add time from last rendered frame
// to last freeze end as time between freezes. // to last freeze end as time between freezes.
if (state.last_rendered_frame_time) { if (state.last_rendered_frame_time()) {
item.second.time_between_freezes_ms.AddSample( item.second.time_between_freezes_ms.AddSample(
(state.last_rendered_frame_time.value() - (state.last_rendered_frame_time().value() -
stream_last_freeze_end_time_.at(item.first)) stream_last_freeze_end_time_.at(item.first))
.ms()); .ms());
} }
@ -380,7 +400,7 @@ std::set<std::string> DefaultVideoQualityAnalyzer::GetKnownVideoStreams()
return out; return out;
} }
const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() { const FrameCounters& DefaultVideoQualityAnalyzer::GetGlobalCounters() const {
rtc::CritScope crit(&lock_); rtc::CritScope crit(&lock_);
return frame_counters_; return frame_counters_;
} }
@ -465,10 +485,15 @@ void DefaultVideoQualityAnalyzer::AddComparison(
// If there too many computations waiting in the queue, we won't provide // If there too many computations waiting in the queue, we won't provide
// frames itself to make future computations lighter. // frames itself to make future computations lighter.
if (comparisons_.size() >= kMaxActiveComparisons) { if (comparisons_.size() >= kMaxActiveComparisons) {
comparisons_.emplace_back(dropped, frame_stats); comparisons_.emplace_back(absl::nullopt, absl::nullopt, dropped,
frame_stats, OverloadReason::kCpu);
} else { } else {
OverloadReason overload_reason = OverloadReason::kNone;
if (!captured && !dropped) {
overload_reason = OverloadReason::kMemory;
}
comparisons_.emplace_back(std::move(captured), std::move(rendered), dropped, comparisons_.emplace_back(std::move(captured), std::move(rendered), dropped,
frame_stats); frame_stats, overload_reason);
} }
comparison_available_event_.Set(); comparison_available_event_.Set();
} }
@ -529,8 +554,10 @@ void DefaultVideoQualityAnalyzer::ProcessComparison(
RTC_CHECK(stats_it != stream_stats_.end()); RTC_CHECK(stats_it != stream_stats_.end());
StreamStats* stats = &stats_it->second; StreamStats* stats = &stats_it->second;
analyzer_stats_.comparisons_done++; analyzer_stats_.comparisons_done++;
if (!comparison.captured) { if (comparison.overload_reason == OverloadReason::kCpu) {
analyzer_stats_.overloaded_comparisons_done++; analyzer_stats_.cpu_overloaded_comparisons_done++;
} else if (comparison.overload_reason == OverloadReason::kMemory) {
analyzer_stats_.memory_overloaded_comparisons_done++;
} }
if (psnr > 0) { if (psnr > 0) {
stats->psnr.AddSample(psnr); stats->psnr.AddSample(psnr);
@ -612,8 +639,10 @@ void DefaultVideoQualityAnalyzer::ReportResults() {
<< analyzer_stats_.comparisons_queue_size.GetPercentile(0.99); << analyzer_stats_.comparisons_queue_size.GetPercentile(0.99);
} }
RTC_LOG(INFO) << "comparisons_done=" << analyzer_stats_.comparisons_done; RTC_LOG(INFO) << "comparisons_done=" << analyzer_stats_.comparisons_done;
RTC_LOG(INFO) << "overloaded_comparisons_done=" RTC_LOG(INFO) << "cpu_overloaded_comparisons_done="
<< analyzer_stats_.overloaded_comparisons_done; << analyzer_stats_.cpu_overloaded_comparisons_done;
RTC_LOG(INFO) << "memory_overloaded_comparisons_done="
<< analyzer_stats_.memory_overloaded_comparisons_done;
} }
void DefaultVideoQualityAnalyzer::ReportVideoBweResults( void DefaultVideoQualityAnalyzer::ReportVideoBweResults(
@ -737,19 +766,28 @@ DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison(
absl::optional<VideoFrame> captured, absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered, absl::optional<VideoFrame> rendered,
bool dropped, bool dropped,
FrameStats frame_stats) FrameStats frame_stats,
OverloadReason overload_reason)
: captured(std::move(captured)), : captured(std::move(captured)),
rendered(std::move(rendered)), rendered(std::move(rendered)),
dropped(dropped), dropped(dropped),
frame_stats(std::move(frame_stats)) {} frame_stats(std::move(frame_stats)),
overload_reason(overload_reason) {}
DefaultVideoQualityAnalyzer::FrameComparison::FrameComparison( uint16_t DefaultVideoQualityAnalyzer::StreamState::PopFront() {
bool dropped, uint16_t frame_id = frame_ids_.front();
FrameStats frame_stats) frame_ids_.pop_front();
: captured(absl::nullopt), if (dead_frames_count_ > 0) {
rendered(absl::nullopt), dead_frames_count_--;
dropped(dropped), }
frame_stats(std::move(frame_stats)) {} return frame_id;
}
uint16_t DefaultVideoQualityAnalyzer::StreamState::MarkNextAliveFrameAsDead() {
uint16_t frame_id = frame_ids_[dead_frames_count_];
dead_frames_count_++;
return frame_id;
}
} // namespace webrtc_pc_e2e } // namespace webrtc_pc_e2e
} // namespace webrtc } // namespace webrtc

View File

@ -33,6 +33,11 @@
namespace webrtc { namespace webrtc {
namespace webrtc_pc_e2e { namespace webrtc_pc_e2e {
// WebRTC will request a key frame after 3 seconds if no frames were received.
// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
// key frame request.
constexpr int kDefaultMaxFramesInFlightPerStream = 270;
class RateCounter { class RateCounter {
public: public:
void AddEvent(Timestamp event_time); void AddEvent(Timestamp event_time);
@ -105,14 +110,18 @@ struct AnalyzerStats {
// Size of analyzer internal comparisons queue, measured when new element // Size of analyzer internal comparisons queue, measured when new element
// id added to the queue. // id added to the queue.
SamplesStatsCounter comparisons_queue_size; SamplesStatsCounter comparisons_queue_size;
// Amount of performed comparisons of 2 video frames from captured and // Number of performed comparisons of 2 video frames from captured and
// rendered streams. // rendered streams.
int64_t comparisons_done = 0; int64_t comparisons_done = 0;
// Amount of overloaded comparisons. Comparison is overloaded if it is queued // Number of cpu overloaded comparisons. Comparison is cpu overloaded if it is
// when there are too many not processed comparisons in the queue. Overloaded // queued when there are too many not processed comparisons in the queue.
// comparison doesn't include metrics, that require heavy computations like // Overloaded comparison doesn't include metrics like SSIM and PSNR that
// SSIM and PSNR. // require heavy computations.
int64_t overloaded_comparisons_done = 0; int64_t cpu_overloaded_comparisons_done = 0;
// Number of memory overloaded comparisons. Comparison is memory overloaded if
// it is queued when its captured frame was already removed due to high memory
// usage for that video stream.
int64_t memory_overloaded_comparisons_done = 0;
}; };
struct VideoBweStats { struct VideoBweStats {
@ -126,7 +135,9 @@ struct VideoBweStats {
class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface { class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
public: public:
explicit DefaultVideoQualityAnalyzer( explicit DefaultVideoQualityAnalyzer(
bool heavy_metrics_computation_enabled = true); bool heavy_metrics_computation_enabled = true,
int max_frames_in_flight_per_stream_count =
kDefaultMaxFramesInFlightPerStream);
~DefaultVideoQualityAnalyzer() override; ~DefaultVideoQualityAnalyzer() override;
void Start(std::string test_case_name, int max_threads_count) override; void Start(std::string test_case_name, int max_threads_count) override;
@ -149,7 +160,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// Returns set of stream labels, that were met during test call. // Returns set of stream labels, that were met during test call.
std::set<std::string> GetKnownVideoStreams() const; std::set<std::string> GetKnownVideoStreams() const;
const FrameCounters& GetGlobalCounters(); const FrameCounters& GetGlobalCounters() const;
// Returns frame counter per stream label. Valid stream labels can be obtained // Returns frame counter per stream label. Valid stream labels can be obtained
// by calling GetKnownVideoStreams() // by calling GetKnownVideoStreams()
const std::map<std::string, FrameCounters>& GetPerStreamCounters() const; const std::map<std::string, FrameCounters>& GetPerStreamCounters() const;
@ -186,6 +197,16 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
absl::optional<int> rendered_frame_height = absl::nullopt; absl::optional<int> rendered_frame_height = absl::nullopt;
}; };
// Describes why comparison was done in overloaded mode (without calculating
// PSNR and SSIM).
enum class OverloadReason {
kNone,
// Not enough CPU to process all incoming comparisons.
kCpu,
// Not enough memory to store captured frames for all comparisons.
kMemory
};
// Represents comparison between two VideoFrames. Contains video frames itself // Represents comparison between two VideoFrames. Contains video frames itself
// and stats. Can be one of two types: // and stats. Can be one of two types:
// 1. Normal - in this case |captured| is presented and either |rendered| is // 1. Normal - in this case |captured| is presented and either |rendered| is
@ -198,8 +219,8 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
FrameComparison(absl::optional<VideoFrame> captured, FrameComparison(absl::optional<VideoFrame> captured,
absl::optional<VideoFrame> rendered, absl::optional<VideoFrame> rendered,
bool dropped, bool dropped,
FrameStats frame_stats); FrameStats frame_stats,
FrameComparison(bool dropped, FrameStats frameStats); OverloadReason overload_reason);
// Frames can be omitted if there too many computations waiting in the // Frames can be omitted if there too many computations waiting in the
// queue. // queue.
@ -210,10 +231,32 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// will be |absl::nullopt|. // will be |absl::nullopt|.
bool dropped; bool dropped;
FrameStats frame_stats; FrameStats frame_stats;
OverloadReason overload_reason;
}; };
// Represents a current state of video stream. // Represents a current state of video stream.
struct StreamState { class StreamState {
public:
void PushBack(uint16_t frame_id) { frame_ids_.emplace_back(frame_id); }
uint16_t PopFront();
bool Empty() { return frame_ids_.empty(); }
uint16_t Front() { return frame_ids_.front(); }
int GetAliveFramesCount() { return frame_ids_.size() - dead_frames_count_; }
uint16_t MarkNextAliveFrameAsDead();
void set_last_rendered_frame_time(Timestamp time) {
last_rendered_frame_time_ = time;
}
absl::optional<Timestamp> last_rendered_frame_time() const {
return last_rendered_frame_time_;
}
private:
// To correctly determine dropped frames we have to know sequence of frames // To correctly determine dropped frames we have to know sequence of frames
// in each stream so we will keep a list of frame ids inside the stream. // in each stream so we will keep a list of frame ids inside the stream.
// When the frame is rendered, we will pop ids from the list for until id // When the frame is rendered, we will pop ids from the list for until id
@ -225,8 +268,10 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
// If we received frame with id frame_id3, then we will pop frame_id1 and // If we received frame with id frame_id3, then we will pop frame_id1 and
// frame_id2 and consider that frames as dropped and then compare received // frame_id2 and consider that frames as dropped and then compare received
// frame with the one from |captured_frames_in_flight_| with id frame_id3. // frame with the one from |captured_frames_in_flight_| with id frame_id3.
std::deque<uint16_t> frame_ids; std::deque<uint16_t> frame_ids_;
absl::optional<Timestamp> last_rendered_frame_time = absl::nullopt; // Count of dead frames in the beginning of the deque.
int dead_frames_count_;
absl::optional<Timestamp> last_rendered_frame_time_ = absl::nullopt;
}; };
enum State { kNew, kActive, kStopped }; enum State { kNew, kActive, kStopped };
@ -258,6 +303,7 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
Timestamp Now(); Timestamp Now();
const bool heavy_metrics_computation_enabled_; const bool heavy_metrics_computation_enabled_;
const int max_frames_in_flight_per_stream_count_;
webrtc::Clock* const clock_; webrtc::Clock* const clock_;
std::atomic<uint16_t> next_frame_id_{0}; std::atomic<uint16_t> next_frame_id_{0};
@ -267,7 +313,12 @@ class DefaultVideoQualityAnalyzer : public VideoQualityAnalyzerInterface {
State state_ RTC_GUARDED_BY(lock_) = State::kNew; State state_ RTC_GUARDED_BY(lock_) = State::kNew;
Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity(); Timestamp start_time_ RTC_GUARDED_BY(lock_) = Timestamp::MinusInfinity();
// Frames that were captured by all streams and still aren't rendered by any // Frames that were captured by all streams and still aren't rendered by any
// stream or deemed dropped. // stream or deemed dropped. Frame with id X can be removed from this map if:
// 1. The frame with id X was received in OnFrameRendered
// 2. The frame with id Y > X was received in OnFrameRendered
// 3. Next available frame id for newly captured frame is X
// 4. There too many frames in flight for current video stream and X is the
// oldest frame id in this stream.
std::map<uint16_t, VideoFrame> captured_frames_in_flight_ std::map<uint16_t, VideoFrame> captured_frames_in_flight_
RTC_GUARDED_BY(lock_); RTC_GUARDED_BY(lock_);
// Global frames count for all video streams. // Global frames count for all video streams.

View File

@ -0,0 +1,209 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <map>
#include <memory>
#include <vector>
#include "api/rtp_packet_info.h"
#include "api/rtp_packet_infos.h"
#include "api/test/create_frame_generator.h"
#include "api/video/encoded_image.h"
#include "api/video/i420_buffer.h"
#include "api/video/video_frame.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
constexpr int kAnalyzerMaxThreadsCount = 1;
constexpr int kMaxFramesInFlightPerStream = 10;
constexpr int kFrameWidth = 320;
constexpr int kFrameHeight = 240;
constexpr char kStreamLabel[] = "video-stream";
VideoFrame NextFrame(test::FrameGeneratorInterface* frame_generator,
int64_t timestamp_us) {
test::FrameGeneratorInterface::VideoFrameData frame_data =
frame_generator->NextFrame();
return VideoFrame::Builder()
.set_video_frame_buffer(frame_data.buffer)
.set_update_rect(frame_data.update_rect)
.set_timestamp_us(timestamp_us)
.build();
}
EncodedImage FakeEncode(const VideoFrame& frame) {
EncodedImage image;
std::vector<RtpPacketInfo> packet_infos;
packet_infos.push_back(
RtpPacketInfo(/*ssrc=*/1,
/*csrcs=*/{},
/*rtp_timestamp=*/frame.timestamp(),
/*audio_level=*/absl::nullopt,
/*absolute_capture_time=*/absl::nullopt,
/*receive_time_ms=*/frame.timestamp_us() + 10));
image.SetPacketInfos(RtpPacketInfos(packet_infos));
return image;
}
VideoFrame DeepCopy(const VideoFrame& frame) {
VideoFrame copy = frame;
copy.set_video_frame_buffer(
I420Buffer::Copy(*frame.video_frame_buffer()->ToI420()));
return copy;
}
TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedAndThenAllFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(frame);
analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame));
}
for (const uint16_t& frame_id : frames_order) {
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
analyzer.OnFrameDecoded(received_frame, /*decode_time_ms=*/absl::nullopt,
/*qp=*/absl::nullopt);
analyzer.OnFrameRendered(received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done,
kMaxFramesInFlightPerStream);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 2);
EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream * 2);
EXPECT_EQ(frame_counters.dropped, 0);
}
TEST(DefaultVideoQualityAnalyzerTest,
MemoryOverloadedHalfDroppedAndThenHalfFramesReceived) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream * 2; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(frame);
analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame));
}
for (size_t i = kMaxFramesInFlightPerStream; i < frames_order.size(); ++i) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
analyzer.OnFrameDecoded(received_frame, /*decode_time_ms=*/absl::nullopt,
/*qp=*/absl::nullopt);
analyzer.OnFrameRendered(received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream * 2);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream * 2);
EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream);
}
TEST(DefaultVideoQualityAnalyzerTest, NormalScenario) {
std::unique_ptr<test::FrameGeneratorInterface> frame_generator =
test::CreateSquareFrameGenerator(kFrameWidth, kFrameHeight,
/*type=*/absl::nullopt,
/*num_squares=*/absl::nullopt);
DefaultVideoQualityAnalyzer analyzer(
/*heavy_metrics_computation_enabled=*/false, kMaxFramesInFlightPerStream);
analyzer.Start("test_case", kAnalyzerMaxThreadsCount);
std::map<uint16_t, VideoFrame> captured_frames;
std::vector<uint16_t> frames_order;
for (int i = 0; i < kMaxFramesInFlightPerStream; ++i) {
VideoFrame frame = NextFrame(frame_generator.get(), i);
frame.set_id(analyzer.OnFrameCaptured(kStreamLabel, frame));
frames_order.push_back(frame.id());
captured_frames.insert({frame.id(), frame});
analyzer.OnFramePreEncode(frame);
analyzer.OnFrameEncoded(frame.id(), FakeEncode(frame));
}
for (size_t i = 1; i < frames_order.size(); i += 2) {
uint16_t frame_id = frames_order.at(i);
VideoFrame received_frame = DeepCopy(captured_frames.at(frame_id));
analyzer.OnFramePreDecode(received_frame.id(), FakeEncode(received_frame));
analyzer.OnFrameDecoded(received_frame, /*decode_time_ms=*/absl::nullopt,
/*qp=*/absl::nullopt);
analyzer.OnFrameRendered(received_frame);
}
// Give analyzer some time to process frames on async thread. The computations
// have to be fast (heavy metrics are disabled!), so if doesn't fit 100ms it
// means we have an issue!
SleepMs(100);
analyzer.Stop();
AnalyzerStats stats = analyzer.GetAnalyzerStats();
EXPECT_EQ(stats.memory_overloaded_comparisons_done, 0);
EXPECT_EQ(stats.comparisons_done, kMaxFramesInFlightPerStream);
FrameCounters frame_counters = analyzer.GetGlobalCounters();
EXPECT_EQ(frame_counters.captured, kMaxFramesInFlightPerStream);
EXPECT_EQ(frame_counters.received, kMaxFramesInFlightPerStream / 2);
EXPECT_EQ(frame_counters.decoded, kMaxFramesInFlightPerStream / 2);
EXPECT_EQ(frame_counters.rendered, kMaxFramesInFlightPerStream / 2);
EXPECT_EQ(frame_counters.dropped, kMaxFramesInFlightPerStream / 2);
}
} // namespace
} // namespace webrtc_pc_e2e
} // namespace webrtc