Add ability to export internal state of SamplesStatsCounter.

Add ability to export internal state of SamplesStatsCounter to be able
then to plot that data.

Bug: webrtc:10138
Change-Id: I5aae5b7dea2989e9f82820933a9ab6f21db17556
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/152542
Commit-Queue: Artem Titov <titovartem@webrtc.org>
Reviewed-by: Patrik Höglund <phoglund@webrtc.org>
Reviewed-by: Ilya Nikolaevskiy <ilnik@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#29173}
This commit is contained in:
Artem Titov
2019-09-12 20:30:54 +02:00
committed by Commit Bot
parent cc46b10cd0
commit 087be5cfd4
5 changed files with 107 additions and 5 deletions

View File

@ -170,6 +170,7 @@ rtc_source_set("perf_test") {
"../api:array_view",
"../rtc_base:checks",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_numerics",
]
}

View File

@ -660,11 +660,8 @@ void DefaultVideoQualityAnalyzer::ReportResult(
const std::string& test_case_name,
const SamplesStatsCounter& counter,
const std::string& unit) {
test::PrintResultMeanAndError(
metric_name, /*modifier=*/"", test_case_name,
counter.IsEmpty() ? 0 : counter.GetAverage(),
counter.IsEmpty() ? 0 : counter.GetStandardDeviation(), unit,
/*important=*/false);
test::PrintResult(metric_name, /*modifier=*/"", test_case_name, counter, unit,
/*important=*/false);
}
std::string DefaultVideoQualityAnalyzer::GetTestCaseName(

View File

@ -70,6 +70,14 @@ ABSL_FLAG(
"https://github.com/catapult-project/catapult/blob/master/dashboard/docs/"
"data-format.md.");
constexpr char kPlotAllMetrics[] = "all";
ABSL_FLAG(std::vector<std::string>,
plot,
{},
"List of metrics that should be exported for plotting (if they are "
"available). Example: psnr,ssim,encode_time. To plot all available "
" metrics pass 'all' as flag value");
#endif
ABSL_FLAG(bool, logs, true, "print logs to stderr");
@ -166,6 +174,14 @@ class TestMainImpl : public TestMain {
if (!chartjson_result_file.empty()) {
webrtc::test::WritePerfResults(chartjson_result_file);
}
std::vector<std::string> metrics_to_plot = absl::GetFlag(FLAGS_plot);
if (!metrics_to_plot.empty()) {
if (metrics_to_plot.size() == 1 &&
metrics_to_plot[0] == kPlotAllMetrics) {
metrics_to_plot.clear();
}
webrtc::test::PrintPlottableResults(metrics_to_plot);
}
std::string result_filename =
absl::GetFlag(FLAGS_isolated_script_test_output);

View File

@ -15,6 +15,7 @@
#include <cmath>
#include <fstream>
#include <map>
#include <set>
#include <sstream>
#include <vector>
@ -32,6 +33,13 @@ void OutputListToStream(std::ostream* ostream, const Container& values) {
}
}
struct PlottableCounter {
std::string graph_name;
std::string trace_name;
webrtc::SamplesStatsCounter counter;
std::string units;
};
class PerfResultsLogger {
public:
PerfResultsLogger() : crit_(), output_(stdout), graphs_() {}
@ -43,6 +51,19 @@ class PerfResultsLogger {
rtc::CritScope lock(&crit_);
output_ = output;
}
void LogResult(const std::string& graph_name,
const std::string& trace_name,
const webrtc::SamplesStatsCounter& counter,
const std::string& units,
const bool important) {
LogResultMeanAndError(
graph_name, trace_name, counter.IsEmpty() ? 0 : counter.GetAverage(),
counter.IsEmpty() ? 0 : counter.GetStandardDeviation(), units,
important);
rtc::CritScope lock(&crit_);
plottable_counters_.push_back({graph_name, trace_name, counter, units});
}
void LogResult(const std::string& graph_name,
const std::string& trace_name,
const double value,
@ -116,6 +137,41 @@ class PerfResultsLogger {
graphs_[graph_name].push_back(json_stream.str());
}
std::string ToJSON() const;
void PrintPlottableCounters(
const std::vector<std::string>& desired_graphs_raw) const {
std::set<std::string> desired_graphs(desired_graphs_raw.begin(),
desired_graphs_raw.end());
rtc::CritScope lock(&crit_);
for (auto& counter : plottable_counters_) {
if (!desired_graphs.empty()) {
auto it = desired_graphs.find(counter.graph_name);
if (it == desired_graphs.end()) {
continue;
}
}
std::ostringstream value_stream;
value_stream.precision(8);
value_stream << R"({"graph_name":")" << counter.graph_name << R"(",)";
value_stream << R"("trace_name":")" << counter.trace_name << R"(",)";
value_stream << R"("units":")" << counter.units << R"(",)";
if (!counter.counter.IsEmpty()) {
value_stream << R"("mean":)" << counter.counter.GetAverage() << ',';
value_stream << R"("std":)" << counter.counter.GetStandardDeviation()
<< ',';
}
value_stream << R"("samples":[)";
const char* sep = "";
for (const auto& sample : counter.counter.GetTimedSamples()) {
value_stream << sep << R"({"time":)" << sample.time.us() << ','
<< R"("value":)" << sample.value << '}';
sep = ",";
}
value_stream << "]}";
fprintf(output_, "PLOTTABLE_DATA: %s\n", value_stream.str().c_str());
}
}
private:
void LogResultsImpl(const std::string& graph_name,
@ -139,6 +195,7 @@ class PerfResultsLogger {
FILE* output_ RTC_GUARDED_BY(&crit_);
std::map<std::string, std::vector<std::string>> graphs_
RTC_GUARDED_BY(&crit_);
std::vector<PlottableCounter> plottable_counters_ RTC_GUARDED_BY(&crit_);
};
std::string PerfResultsLogger::ToJSON() const {
@ -181,6 +238,10 @@ std::string GetPerfResultsJSON() {
return GetPerfResultsLogger().ToJSON();
}
void PrintPlottableResults(const std::vector<std::string>& desired_graphs) {
GetPerfResultsLogger().PrintPlottableCounters(desired_graphs);
}
void WritePerfResults(const std::string& output_path) {
std::string json_results = GetPerfResultsJSON();
std::fstream json_file(output_path, std::fstream::out);
@ -198,6 +259,16 @@ void PrintResult(const std::string& measurement,
important);
}
void PrintResult(const std::string& measurement,
const std::string& modifier,
const std::string& trace,
const SamplesStatsCounter& counter,
const std::string& units,
const bool important) {
GetPerfResultsLogger().LogResult(measurement + modifier, trace, counter,
units, important);
}
void PrintResultMeanAndError(const std::string& measurement,
const std::string& modifier,
const std::string& trace,

View File

@ -13,8 +13,10 @@
#include <sstream>
#include <string>
#include <vector>
#include "api/array_view.h"
#include "rtc_base/numerics/samples_stats_counter.h"
namespace webrtc {
namespace test {
@ -61,10 +63,25 @@ void PrintResultList(const std::string& measurement,
const std::string& units,
bool important);
// Like PrintResult(), but prints a (mean, standard deviation) from stats
// counter. Also add specified metric to the plotable metrics output.
void PrintResult(const std::string& measurement,
const std::string& modifier,
const std::string& trace,
const SamplesStatsCounter& counter,
const std::string& units,
const bool important);
// Returns all perf results to date in a JSON string formatted as described in
// https://github.com/catapult-project/catapult/blob/master/dashboard/docs/data-format.md
std::string GetPerfResultsJSON();
// Print into stdout plottable metrics for further post processing.
// |desired_graphs| - list of metrics, that should be plotted. If empty - all
// available metrics will be plotted. If some of |desired_graphs| are missing
// they will be skipped.
void PrintPlottableResults(const std::vector<std::string>& desired_graphs);
// Writes the JSON representation of the perf results returned by
// GetPerfResultsJSON() to the file in output_path.
void WritePerfResults(const std::string& output_path);