Extract various DVQA stats related objects into its own header

Bug: b/196229820
Change-Id: I238e49532a6bfd71eee8b6b60dfbb1645c8234ac
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/228438
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#34736}
This commit is contained in:
Artem Titov
2021-08-12 13:28:26 +02:00
committed by WebRTC LUCI CQ
parent 2a6d48b82f
commit a17ec76351
7 changed files with 280 additions and 223 deletions

View File

@ -617,6 +617,8 @@ if (!build_with_chromium) {
sources = [
"analyzer/video/default_video_quality_analyzer.cc",
"analyzer/video/default_video_quality_analyzer.h",
"analyzer/video/default_video_quality_analyzer_shared_objects.cc",
"analyzer/video/default_video_quality_analyzer_shared_objects.h",
]
deps = [
@ -631,6 +633,7 @@ if (!build_with_chromium) {
"../../../api/video:video_frame",
"../../../api/video:video_rtp_headers",
"../../../common_video",
"../../../rtc_base:checks",
"../../../rtc_base:criticalsection",
"../../../rtc_base:logging",
"../../../rtc_base:rtc_base_approved",

View File

@ -25,6 +25,7 @@
#include "rtc_base/strings/string_builder.h"
#include "rtc_base/time_utils.h"
#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@ -104,66 +105,6 @@ SamplesStatsCounter::StatsSample StatsSample(double value,
} // namespace
void RateCounter::AddEvent(Timestamp event_time) {
if (event_first_time_.IsMinusInfinity()) {
event_first_time_ = event_time;
}
event_last_time_ = event_time;
event_count_++;
}
double RateCounter::GetEventsPerSecond() const {
RTC_DCHECK(!IsEmpty());
// Divide on us and multiply on kMicrosPerSecond to correctly process cases
// where there were too small amount of events, so difference is less then 1
// sec. We can use us here, because Timestamp has us resolution.
return static_cast<double>(event_count_) /
(event_last_time_ - event_first_time_).us() * kMicrosPerSecond;
}
std::string StatsKey::ToString() const {
rtc::StringBuilder out;
out << stream_label << "_" << sender << "_" << receiver;
return out.str();
}
bool operator<(const StatsKey& a, const StatsKey& b) {
if (a.stream_label != b.stream_label) {
return a.stream_label < b.stream_label;
}
if (a.sender != b.sender) {
return a.sender < b.sender;
}
return a.receiver < b.receiver;
}
bool operator==(const StatsKey& a, const StatsKey& b) {
return a.stream_label == b.stream_label && a.sender == b.sender &&
a.receiver == b.receiver;
}
std::string InternalStatsKey::ToString() const {
rtc::StringBuilder out;
out << "stream=" << stream << "_sender=" << sender
<< "_receiver=" << receiver;
return out.str();
}
bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) {
if (a.stream != b.stream) {
return a.stream < b.stream;
}
if (a.sender != b.sender) {
return a.sender < b.sender;
}
return a.receiver < b.receiver;
}
bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
return a.stream == b.stream && a.sender == b.sender &&
a.receiver == b.receiver;
}
DefaultVideoQualityAnalyzer::DefaultVideoQualityAnalyzer(
webrtc::Clock* clock,
DefaultVideoQualityAnalyzerOptions options)
@ -523,13 +464,9 @@ void DefaultVideoQualityAnalyzer::OnFrameRendered(
state->SetLastRenderedFrameTime(peer_index,
frame_in_flight->rendered_time(peer_index));
{
MutexLock cr(&comparison_lock_);
MutexLock lock(&comparison_lock_);
stream_stats_.at(stats_key).skipped_between_rendered.AddSample(
StatsSample(dropped_count, Now()));
}
{
MutexLock lock(&comparison_lock_);
analyzer_stats_.frames_in_flight_left_count.AddSample(
StatsSample(captured_frames_in_flight_.size(), Now()));
AddComparison(stats_key, captured_frame, frame, false,

View File

@ -29,169 +29,13 @@
#include "rtc_base/platform_thread.h"
#include "rtc_base/synchronization/mutex.h"
#include "system_wrappers/include/clock.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
#include "test/pc/e2e/analyzer/video/multi_head_queue.h"
#include "test/testsupport/perf_test.h"
namespace webrtc {
namespace webrtc_pc_e2e {
// WebRTC will request a key frame after 3 seconds if no frames were received.
// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
// key frame request.
constexpr size_t kDefaultMaxFramesInFlightPerStream = 270;
class RateCounter {
public:
void AddEvent(Timestamp event_time);
bool IsEmpty() const { return event_first_time_ == event_last_time_; }
double GetEventsPerSecond() const;
private:
Timestamp event_first_time_ = Timestamp::MinusInfinity();
Timestamp event_last_time_ = Timestamp::MinusInfinity();
int64_t event_count_ = 0;
};
struct FrameCounters {
// Count of frames, that were passed into WebRTC pipeline by video stream
// source.
int64_t captured = 0;
// Count of frames that reached video encoder.
int64_t pre_encoded = 0;
// Count of encoded images that were produced by encoder for all requested
// spatial layers and simulcast streams.
int64_t encoded = 0;
// Count of encoded images received in decoder for all requested spatial
// layers and simulcast streams.
int64_t received = 0;
// Count of frames that were produced by decoder.
int64_t decoded = 0;
// Count of frames that went out from WebRTC pipeline to video sink.
int64_t rendered = 0;
// Count of frames that were dropped in any point between capturing and
// rendering.
int64_t dropped = 0;
};
// Contains information about the codec that was used for encoding or decoding
// the stream.
struct StreamCodecInfo {
// Codec implementation name.
std::string codec_name;
// Id of the first frame for which this codec was used.
uint16_t first_frame_id;
// Id of the last frame for which this codec was used.
uint16_t last_frame_id;
// Timestamp when the first frame was handled by the encode/decoder.
Timestamp switched_on_at = Timestamp::PlusInfinity();
// Timestamp when this codec was used last time.
Timestamp switched_from_at = Timestamp::PlusInfinity();
};
struct StreamStats {
explicit StreamStats(Timestamp stream_started_time)
: stream_started_time(stream_started_time) {}
// The time when the first frame of this stream was captured.
Timestamp stream_started_time;
SamplesStatsCounter psnr;
SamplesStatsCounter ssim;
// Time from frame encoded (time point on exit from encoder) to the
// encoded image received in decoder (time point on entrance to decoder).
SamplesStatsCounter transport_time_ms;
// Time from frame was captured on device to time frame was displayed on
// device.
SamplesStatsCounter total_delay_incl_transport_ms;
// Time between frames out from renderer.
SamplesStatsCounter time_between_rendered_frames_ms;
RateCounter encode_frame_rate;
SamplesStatsCounter encode_time_ms;
SamplesStatsCounter decode_time_ms;
// Time from last packet of frame is received until it's sent to the renderer.
SamplesStatsCounter receive_to_render_time_ms;
// Max frames skipped between two nearest.
SamplesStatsCounter skipped_between_rendered;
// In the next 2 metrics freeze is a pause that is longer, than maximum:
// 1. 150ms
// 2. 3 * average time between two sequential frames.
// Item 1 will cover high fps video and is a duration, that is noticeable by
// human eye. Item 2 will cover low fps video like screen sharing.
// Freeze duration.
SamplesStatsCounter freeze_time_ms;
// Mean time between one freeze end and next freeze start.
SamplesStatsCounter time_between_freezes_ms;
SamplesStatsCounter resolution_of_rendered_frame;
SamplesStatsCounter target_encode_bitrate;
int64_t total_encoded_images_payload = 0;
int64_t dropped_by_encoder = 0;
int64_t dropped_before_encoder = 0;
// Vector of encoders used for this stream by sending client.
std::vector<StreamCodecInfo> encoders;
// Vectors of decoders used for this stream by receiving client.
std::vector<StreamCodecInfo> decoders;
};
struct AnalyzerStats {
// Size of analyzer internal comparisons queue, measured when new element
// id added to the queue.
SamplesStatsCounter comparisons_queue_size;
// Number of performed comparisons of 2 video frames from captured and
// rendered streams.
int64_t comparisons_done = 0;
// Number of cpu overloaded comparisons. Comparison is cpu overloaded if it is
// queued when there are too many not processed comparisons in the queue.
// Overloaded comparison doesn't include metrics like SSIM and PSNR that
// require heavy computations.
int64_t cpu_overloaded_comparisons_done = 0;
// Number of memory overloaded comparisons. Comparison is memory overloaded if
// it is queued when its captured frame was already removed due to high memory
// usage for that video stream.
int64_t memory_overloaded_comparisons_done = 0;
// Count of frames in flight in analyzer measured when new comparison is added
// and after analyzer was stopped.
SamplesStatsCounter frames_in_flight_left_count;
};
struct StatsKey {
StatsKey(std::string stream_label, std::string sender, std::string receiver)
: stream_label(std::move(stream_label)),
sender(std::move(sender)),
receiver(std::move(receiver)) {}
std::string ToString() const;
// Label of video stream to which stats belongs to.
std::string stream_label;
// Name of the peer which send this stream.
std::string sender;
// Name of the peer on which stream was received.
std::string receiver;
};
// Required to use StatsKey as std::map key.
bool operator<(const StatsKey& a, const StatsKey& b);
bool operator==(const StatsKey& a, const StatsKey& b);
struct InternalStatsKey {
InternalStatsKey(size_t stream, size_t sender, size_t receiver)
: stream(stream), sender(sender), receiver(receiver) {}
std::string ToString() const;
size_t stream;
size_t sender;
size_t receiver;
};
// Required to use InternalStatsKey as std::map key.
bool operator<(const InternalStatsKey& a, const InternalStatsKey& b);
bool operator==(const InternalStatsKey& a, const InternalStatsKey& b);
struct DefaultVideoQualityAnalyzerOptions {
// Tells DefaultVideoQualityAnalyzer if heavy metrics like PSNR and SSIM have
// to be computed or not.

View File

@ -0,0 +1,85 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
#include "api/units/timestamp.h"
#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
namespace webrtc_pc_e2e {
namespace {
constexpr int kMicrosPerSecond = 1000000;
} // namespace
void RateCounter::AddEvent(Timestamp event_time) {
if (event_first_time_.IsMinusInfinity()) {
event_first_time_ = event_time;
}
event_last_time_ = event_time;
event_count_++;
}
double RateCounter::GetEventsPerSecond() const {
RTC_DCHECK(!IsEmpty());
// Divide on us and multiply on kMicrosPerSecond to correctly process cases
// where there were too small amount of events, so difference is less then 1
// sec. We can use us here, because Timestamp has us resolution.
return static_cast<double>(event_count_) /
(event_last_time_ - event_first_time_).us() * kMicrosPerSecond;
}
std::string StatsKey::ToString() const {
rtc::StringBuilder out;
out << stream_label << "_" << sender << "_" << receiver;
return out.str();
}
bool operator<(const StatsKey& a, const StatsKey& b) {
if (a.stream_label != b.stream_label) {
return a.stream_label < b.stream_label;
}
if (a.sender != b.sender) {
return a.sender < b.sender;
}
return a.receiver < b.receiver;
}
bool operator==(const StatsKey& a, const StatsKey& b) {
return a.stream_label == b.stream_label && a.sender == b.sender &&
a.receiver == b.receiver;
}
std::string InternalStatsKey::ToString() const {
rtc::StringBuilder out;
out << "stream=" << stream << "_sender=" << sender
<< "_receiver=" << receiver;
return out.str();
}
bool operator<(const InternalStatsKey& a, const InternalStatsKey& b) {
if (a.stream != b.stream) {
return a.stream < b.stream;
}
if (a.sender != b.sender) {
return a.sender < b.sender;
}
return a.receiver < b.receiver;
}
bool operator==(const InternalStatsKey& a, const InternalStatsKey& b) {
return a.stream == b.stream && a.sender == b.sender &&
a.receiver == b.receiver;
}
} // namespace webrtc_pc_e2e
} // namespace webrtc

View File

@ -0,0 +1,185 @@
/*
* Copyright (c) 2021 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_
#define TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "api/numerics/samples_stats_counter.h"
#include "api/units/timestamp.h"
namespace webrtc {
namespace webrtc_pc_e2e {
// WebRTC will request a key frame after 3 seconds if no frames were received.
// We assume max frame rate ~60 fps, so 270 frames will cover max freeze without
// key frame request.
constexpr size_t kDefaultMaxFramesInFlightPerStream = 270;
class RateCounter {
public:
void AddEvent(Timestamp event_time);
bool IsEmpty() const { return event_first_time_ == event_last_time_; }
double GetEventsPerSecond() const;
private:
Timestamp event_first_time_ = Timestamp::MinusInfinity();
Timestamp event_last_time_ = Timestamp::MinusInfinity();
int64_t event_count_ = 0;
};
struct FrameCounters {
// Count of frames, that were passed into WebRTC pipeline by video stream
// source.
int64_t captured = 0;
// Count of frames that reached video encoder.
int64_t pre_encoded = 0;
// Count of encoded images that were produced by encoder for all requested
// spatial layers and simulcast streams.
int64_t encoded = 0;
// Count of encoded images received in decoder for all requested spatial
// layers and simulcast streams.
int64_t received = 0;
// Count of frames that were produced by decoder.
int64_t decoded = 0;
// Count of frames that went out from WebRTC pipeline to video sink.
int64_t rendered = 0;
// Count of frames that were dropped in any point between capturing and
// rendering.
int64_t dropped = 0;
};
// Contains information about the codec that was used for encoding or decoding
// the stream.
struct StreamCodecInfo {
// Codec implementation name.
std::string codec_name;
// Id of the first frame for which this codec was used.
uint16_t first_frame_id;
// Id of the last frame for which this codec was used.
uint16_t last_frame_id;
// Timestamp when the first frame was handled by the encode/decoder.
Timestamp switched_on_at = Timestamp::PlusInfinity();
// Timestamp when this codec was used last time.
Timestamp switched_from_at = Timestamp::PlusInfinity();
};
struct StreamStats {
explicit StreamStats(Timestamp stream_started_time)
: stream_started_time(stream_started_time) {}
// The time when the first frame of this stream was captured.
Timestamp stream_started_time;
SamplesStatsCounter psnr;
SamplesStatsCounter ssim;
// Time from frame encoded (time point on exit from encoder) to the
// encoded image received in decoder (time point on entrance to decoder).
SamplesStatsCounter transport_time_ms;
// Time from frame was captured on device to time frame was displayed on
// device.
SamplesStatsCounter total_delay_incl_transport_ms;
// Time between frames out from renderer.
SamplesStatsCounter time_between_rendered_frames_ms;
RateCounter encode_frame_rate;
SamplesStatsCounter encode_time_ms;
SamplesStatsCounter decode_time_ms;
// Time from last packet of frame is received until it's sent to the renderer.
SamplesStatsCounter receive_to_render_time_ms;
// Max frames skipped between two nearest.
SamplesStatsCounter skipped_between_rendered;
// In the next 2 metrics freeze is a pause that is longer, than maximum:
// 1. 150ms
// 2. 3 * average time between two sequential frames.
// Item 1 will cover high fps video and is a duration, that is noticeable by
// human eye. Item 2 will cover low fps video like screen sharing.
// Freeze duration.
SamplesStatsCounter freeze_time_ms;
// Mean time between one freeze end and next freeze start.
SamplesStatsCounter time_between_freezes_ms;
SamplesStatsCounter resolution_of_rendered_frame;
SamplesStatsCounter target_encode_bitrate;
int64_t total_encoded_images_payload = 0;
int64_t dropped_by_encoder = 0;
int64_t dropped_before_encoder = 0;
// Vector of encoders used for this stream by sending client.
std::vector<StreamCodecInfo> encoders;
// Vectors of decoders used for this stream by receiving client.
std::vector<StreamCodecInfo> decoders;
};
struct AnalyzerStats {
// Size of analyzer internal comparisons queue, measured when new element
// id added to the queue.
SamplesStatsCounter comparisons_queue_size;
// Number of performed comparisons of 2 video frames from captured and
// rendered streams.
int64_t comparisons_done = 0;
// Number of cpu overloaded comparisons. Comparison is cpu overloaded if it is
// queued when there are too many not processed comparisons in the queue.
// Overloaded comparison doesn't include metrics like SSIM and PSNR that
// require heavy computations.
int64_t cpu_overloaded_comparisons_done = 0;
// Number of memory overloaded comparisons. Comparison is memory overloaded if
// it is queued when its captured frame was already removed due to high memory
// usage for that video stream.
int64_t memory_overloaded_comparisons_done = 0;
// Count of frames in flight in analyzer measured when new comparison is added
// and after analyzer was stopped.
SamplesStatsCounter frames_in_flight_left_count;
};
struct StatsKey {
StatsKey(std::string stream_label, std::string sender, std::string receiver)
: stream_label(std::move(stream_label)),
sender(std::move(sender)),
receiver(std::move(receiver)) {}
std::string ToString() const;
// Label of video stream to which stats belongs to.
std::string stream_label;
// Name of the peer which send this stream.
std::string sender;
// Name of the peer on which stream was received.
std::string receiver;
};
// Required to use StatsKey as std::map key.
bool operator<(const StatsKey& a, const StatsKey& b);
bool operator==(const StatsKey& a, const StatsKey& b);
struct InternalStatsKey {
InternalStatsKey(size_t stream, size_t sender, size_t receiver)
: stream(stream), sender(sender), receiver(receiver) {}
std::string ToString() const;
size_t stream;
size_t sender;
size_t receiver;
};
// Required to use InternalStatsKey as std::map key.
bool operator<(const InternalStatsKey& a, const InternalStatsKey& b);
bool operator==(const InternalStatsKey& a, const InternalStatsKey& b);
} // namespace webrtc_pc_e2e
} // namespace webrtc
#endif // TEST_PC_E2E_ANALYZER_VIDEO_DEFAULT_VIDEO_QUALITY_ANALYZER_SHARED_OBJECTS_H_

View File

@ -8,6 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
#include <algorithm>
#include <map>
#include <memory>
@ -24,7 +26,7 @@
#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
#include "system_wrappers/include/sleep.h"
#include "test/gtest.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
namespace webrtc {
namespace webrtc_pc_e2e {

View File

@ -23,6 +23,7 @@
#include "test/gtest.h"
#include "test/pc/e2e/analyzer/audio/default_audio_quality_analyzer.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer.h"
#include "test/pc/e2e/analyzer/video/default_video_quality_analyzer_shared_objects.h"
#include "test/pc/e2e/stats_based_network_quality_metrics_reporter.h"
#include "test/testsupport/file_utils.h"