Implement histogram perf results writer.

This will be used by WebRTC tests. It converts results exactly the
same as our downstream implementation (histogram_util).

This implementation should be pretty feature complete, or at least
enough to start testing the end-to-end flow. I will set up some
experimental recipe code and see if this actually makes it into the
dashboard.

Note: needs some catapult changes to land first and be rolled
into Chromium, and then WebRTC.

Bug: chromium:1029452
Change-Id: I939046929652fc27b8fcb18af54bde22886d9228
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/166172
Commit-Queue: Patrik Höglund <phoglund@webrtc.org>
Reviewed-by: Artem Titov <titovartem@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#30436}
This commit is contained in:
Patrik Höglund
2020-01-31 11:22:42 +01:00
committed by Commit Bot
parent bfe3ef8feb
commit 7f585b3c12
10 changed files with 491 additions and 21 deletions

View File

@ -226,7 +226,6 @@ rtc_library("perf_test") {
"testsupport/perf_test.h", "testsupport/perf_test.h",
"testsupport/perf_test_graphjson_writer.cc", "testsupport/perf_test_graphjson_writer.cc",
"testsupport/perf_test_graphjson_writer.h", "testsupport/perf_test_graphjson_writer.h",
"testsupport/perf_test_histogram_writer.cc",
"testsupport/perf_test_histogram_writer.h", "testsupport/perf_test_histogram_writer.h",
"testsupport/perf_test_result_writer.h", "testsupport/perf_test_result_writer.h",
] ]
@ -234,9 +233,19 @@ rtc_library("perf_test") {
"../api:array_view", "../api:array_view",
"../rtc_base:checks", "../rtc_base:checks",
"../rtc_base:criticalsection", "../rtc_base:criticalsection",
"../rtc_base:logging",
"../rtc_base:rtc_numerics", "../rtc_base:rtc_numerics",
"//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:flag",
] ]
if (rtc_enable_protobuf) {
sources += [ "testsupport/perf_test_histogram_writer.cc" ]
deps += [
"//third_party/catapult/tracing/tracing:histogram",
"//third_party/catapult/tracing/tracing:reserved_infos",
]
} else {
sources += [ "testsupport/perf_test_histogram_writer_no_protobuf.cc" ]
}
} }
if (is_ios) { if (is_ios) {
@ -502,6 +511,11 @@ if (rtc_include_tests) {
"testsupport/yuv_frame_writer_unittest.cc", "testsupport/yuv_frame_writer_unittest.cc",
] ]
if (rtc_enable_protobuf) {
sources += [ "testsupport/perf_test_histogram_writer_unittest.cc" ]
deps += [ "//third_party/catapult/tracing/tracing:histogram" ]
}
data = test_support_unittests_resources data = test_support_unittests_resources
if (is_android) { if (is_android) {
deps += [ "//testing/android/native_test:native_test_support" ] deps += [ "//testing/android/native_test:native_test_support" ]

4
test/testsupport/DEPS Normal file
View File

@ -0,0 +1,4 @@
include_rules = [
# Histogram C++ API, used by perf tests.
"+third_party/catapult/tracing/tracing/value"
]

View File

@ -17,7 +17,6 @@
#include <sstream> #include <sstream>
#include <vector> #include <vector>
#include "absl/flags/flag.h"
#include "rtc_base/checks.h" #include "rtc_base/checks.h"
#include "rtc_base/critical_section.h" #include "rtc_base/critical_section.h"
#include "test/testsupport/perf_test_graphjson_writer.h" #include "test/testsupport/perf_test_graphjson_writer.h"
@ -213,8 +212,8 @@ void SetPerfResultsOutput(FILE* output) {
GetResultsLinePrinter().SetOutput(output); GetResultsLinePrinter().SetOutput(output);
} }
std::string GetPerfResultsJSON() { std::string GetPerfResults() {
return GetPerfWriter().ToJSON(); return GetPerfWriter().Serialize();
} }
void PrintPlottableResults(const std::vector<std::string>& desired_graphs) { void PrintPlottableResults(const std::vector<std::string>& desired_graphs) {
@ -222,10 +221,10 @@ void PrintPlottableResults(const std::vector<std::string>& desired_graphs) {
} }
void WritePerfResults(const std::string& output_path) { void WritePerfResults(const std::string& output_path) {
std::string json_results = GetPerfResultsJSON(); std::string results = GetPerfResults();
std::fstream json_file(output_path, std::fstream::out); std::fstream output(output_path, std::fstream::out);
json_file << json_results; output << results;
json_file.close(); output.close();
} }
void PrintResult(const std::string& measurement, void PrintResult(const std::string& measurement,

View File

@ -15,6 +15,7 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "absl/flags/flag.h"
#include "api/array_view.h" #include "api/array_view.h"
#include "rtc_base/numerics/samples_stats_counter.h" #include "rtc_base/numerics/samples_stats_counter.h"
@ -88,9 +89,13 @@ void PrintResult(const std::string& measurement,
const bool important, const bool important,
ImproveDirection improve_direction = ImproveDirection::kNone); ImproveDirection improve_direction = ImproveDirection::kNone);
// Returns all perf results to date in a JSON string formatted as described in // If --write_histogram_proto_json=false, this returns all perf results to date
// https://github.com/catapult-project/catapult/blob/master/dashboard/docs/data-format.md // in a JSON string formatted as described in dashboard/docs/data-format.md
std::string GetPerfResultsJSON(); // in https://github.com/catapult-project/catapult/blob/master/. If
// --write_histogram_proto_json=true, returns a string-encoded proto as
// described in tracing/tracing/proto/histogram.proto in
// https://github.com/catapult-project/catapult/blob/master/.
std::string GetPerfResults();
// Print into stdout plottable metrics for further post processing. // Print into stdout plottable metrics for further post processing.
// |desired_graphs| - list of metrics, that should be plotted. If empty - all // |desired_graphs| - list of metrics, that should be plotted. If empty - all
@ -98,18 +103,20 @@ std::string GetPerfResultsJSON();
// they will be skipped. // they will be skipped.
void PrintPlottableResults(const std::vector<std::string>& desired_graphs); void PrintPlottableResults(const std::vector<std::string>& desired_graphs);
// Writes the JSON representation of the perf results returned by // Call GetPerfResults() and write its output to a file.
// GetPerfResultsJSON() to the file in output_path.
void WritePerfResults(const std::string& output_path); void WritePerfResults(const std::string& output_path);
// By default, perf results are printed to stdout. Set the FILE* to where they // By default, perf results are printed to stdout. Set the FILE* to where they
// should be printing instead. // should be printing instead.
void SetPerfResultsOutput(FILE* output); void SetPerfResultsOutput(FILE* output);
// You shouldn't use this function. It's only used to test the functions above. // Only for use by tests.
void ClearPerfResults(); void ClearPerfResults();
} // namespace test } // namespace test
} // namespace webrtc } // namespace webrtc
// Only for use by tests.
ABSL_DECLARE_FLAG(bool, write_histogram_proto_json);
#endif // TEST_TESTSUPPORT_PERF_TEST_H_ #endif // TEST_TESTSUPPORT_PERF_TEST_H_

View File

@ -110,7 +110,7 @@ class PerfTestGraphJsonWriter : public PerfTestResultWriter {
graphs_[graph_name].push_back(json_stream.str()); graphs_[graph_name].push_back(json_stream.str());
} }
std::string ToJSON() const { std::string Serialize() const {
std::ostringstream json_stream; std::ostringstream json_stream;
json_stream << R"({"format_version":"1.0",)"; json_stream << R"({"format_version":"1.0",)";
json_stream << R"("charts":{)"; json_stream << R"("charts":{)";

View File

@ -13,15 +13,180 @@
#include <stdlib.h> #include <stdlib.h>
#include <map> #include <map>
#include <memory>
#include "rtc_base/critical_section.h"
#include "rtc_base/logging.h"
#include "third_party/catapult/tracing/tracing/value/diagnostics/reserved_infos.h"
#include "third_party/catapult/tracing/tracing/value/histogram.h"
namespace webrtc { namespace webrtc {
namespace test { namespace test {
namespace {} // namespace namespace {
namespace proto = catapult::tracing::tracing::proto;
std::string AsJsonString(const std::string string) {
return "\"" + string + "\"";
}
class PerfTestHistogramWriter : public PerfTestResultWriter {
public:
PerfTestHistogramWriter() : crit_() {}
void ClearResults() override {
rtc::CritScope lock(&crit_);
histograms_.clear();
}
void LogResult(const std::string& graph_name,
const std::string& trace_name,
const double value,
const std::string& units,
const bool important,
ImproveDirection improve_direction) override {
(void)important;
AddSample(graph_name, trace_name, value, units, improve_direction);
}
void LogResultMeanAndError(const std::string& graph_name,
const std::string& trace_name,
const double mean,
const double error,
const std::string& units,
const bool important,
ImproveDirection improve_direction) override {
RTC_LOG(LS_WARNING) << "Discarding stddev, not supported by histograms";
(void)error;
(void)important;
AddSample(graph_name, trace_name, mean, units, improve_direction);
}
void LogResultList(const std::string& graph_name,
const std::string& trace_name,
const rtc::ArrayView<const double> values,
const std::string& units,
const bool important,
ImproveDirection improve_direction) override {
(void)important;
for (double value : values) {
AddSample(graph_name, trace_name, value, units, improve_direction);
}
}
std::string Serialize() const override {
proto::HistogramSet histogram_set;
rtc::CritScope lock(&crit_);
for (const auto& histogram : histograms_) {
std::unique_ptr<proto::Histogram> proto = histogram.second->toProto();
histogram_set.mutable_histograms()->AddAllocated(proto.release());
}
std::string output;
bool ok = histogram_set.SerializeToString(&output);
RTC_DCHECK(ok) << "Failed to serialize histogram set to string";
return output;
}
private:
void AddSample(const std::string& original_graph_name,
const std::string& trace_name,
const double value,
const std::string& units,
ImproveDirection improve_direction) {
// WebRTC annotates the units into the metric name when they are not
// supported by the Histogram API.
std::string graph_name = original_graph_name;
if (units == "dB") {
graph_name += "_dB";
} else if (units == "fps") {
graph_name += "_fps";
} else if (units == "%") {
graph_name += "_%";
}
// Lookup on graph name + trace name (or measurement + story in catapult
// parlance). There should be several histograms with the same measurement
// if they're for different stories.
std::string measurement_and_story = graph_name + trace_name;
rtc::CritScope lock(&crit_);
if (histograms_.count(measurement_and_story) == 0) {
proto::UnitAndDirection unit = ParseUnit(units, improve_direction);
std::unique_ptr<catapult::HistogramBuilder> builder =
std::make_unique<catapult::HistogramBuilder>(graph_name, unit);
histograms_[measurement_and_story] = std::move(builder);
proto::Diagnostic stories;
proto::GenericSet* generic_set = stories.mutable_generic_set();
generic_set->add_values(AsJsonString(trace_name));
histograms_[measurement_and_story]->AddDiagnostic(
catapult::kStoriesDiagnostic, stories);
}
if (units == "bps") {
// Bps has been interpreted as bits per second in WebRTC tests.
histograms_[measurement_and_story]->AddSample(value / 8);
} else {
histograms_[measurement_and_story]->AddSample(value);
}
}
proto::UnitAndDirection ParseUnit(const std::string& units,
ImproveDirection improve_direction) {
RTC_DCHECK(units.find('_') == std::string::npos)
<< "The unit_bigger|smallerIsBetter syntax isn't supported in WebRTC, "
"use the enum instead.";
proto::UnitAndDirection result;
result.set_improvement_direction(ParseDirection(improve_direction));
if (units == "bps") {
result.set_unit(proto::BYTES_PER_SECOND);
} else if (units == "dB") {
result.set_unit(proto::UNITLESS);
} else if (units == "fps") {
result.set_unit(proto::HERTZ);
} else if (units == "frames") {
result.set_unit(proto::COUNT);
} else if (units == "ms") {
result.set_unit(proto::MS_BEST_FIT_FORMAT);
} else if (units == "%") {
result.set_unit(proto::UNITLESS);
} else {
proto::Unit unit = catapult::UnitFromJsonUnit(units);
// UnitFromJsonUnit returns UNITLESS if it doesn't recognize the unit.
if (unit == proto::UNITLESS && units != "unitless") {
RTC_LOG(LS_WARNING) << "Unit " << units << " is unsupported.";
}
result.set_unit(unit);
}
return result;
}
proto::ImprovementDirection ParseDirection(
ImproveDirection improve_direction) {
switch (improve_direction) {
case ImproveDirection::kNone:
return proto::NOT_SPECIFIED;
case ImproveDirection::kSmallerIsBetter:
return proto::SMALLER_IS_BETTER;
case ImproveDirection::kBiggerIsBetter:
return proto::BIGGER_IS_BETTER;
default:
RTC_NOTREACHED() << "Invalid enum value " << improve_direction;
}
}
private:
rtc::CriticalSection crit_;
std::map<std::string, std::unique_ptr<catapult::HistogramBuilder>> histograms_
RTC_GUARDED_BY(&crit_);
};
} // namespace
PerfTestResultWriter* CreateHistogramWriter() { PerfTestResultWriter* CreateHistogramWriter() {
RTC_CHECK(false) << "Not implemented"; return new PerfTestHistogramWriter();
return nullptr;
} }
} // namespace test } // namespace test

View File

@ -0,0 +1,22 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test/testsupport/perf_test_histogram_writer.h"
namespace webrtc {
namespace test {
PerfTestResultWriter* CreateHistogramWriter() {
RTC_NOTREACHED() << "Cannot run perf tests with rtc_enable_protobuf = false. "
"Perf write results as protobufs.";
}
} // namespace test
} // namespace webrtc

View File

@ -0,0 +1,197 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "test/testsupport/perf_test_histogram_writer.h"
#include <memory>
#include <string>
#include "test/gtest.h"
#include "third_party/catapult/tracing/tracing/value/histogram.h"
namespace webrtc {
namespace test {
namespace proto = catapult::tracing::tracing::proto;
TEST(PerfHistogramWriterUnittest, TestSimpleHistogram) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("-", "-", 0, "ms", false, ImproveDirection::kNone);
proto::HistogramSet histogram_set;
EXPECT_TRUE(histogram_set.ParseFromString(writer->Serialize()))
<< "Expected valid histogram set";
ASSERT_EQ(histogram_set.histograms_size(), 1);
}
TEST(PerfHistogramWriterUnittest, WritesSamplesAndUserStory) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("measurement", "user_story", 15e7, "Hz", false,
ImproveDirection::kBiggerIsBetter);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
const proto::Histogram& hist1 = histogram_set.histograms(0);
EXPECT_EQ(hist1.name(), "measurement");
EXPECT_EQ(hist1.unit().unit(), proto::HERTZ);
EXPECT_EQ(hist1.unit().improvement_direction(), proto::BIGGER_IS_BETTER);
EXPECT_EQ(hist1.sample_values_size(), 1);
EXPECT_EQ(hist1.sample_values(0), 15e7);
EXPECT_EQ(hist1.diagnostics().diagnostic_map().count("stories"), 1u);
const proto::Diagnostic& stories =
hist1.diagnostics().diagnostic_map().at("stories");
ASSERT_EQ(stories.generic_set().values_size(), 1);
EXPECT_EQ(stories.generic_set().values(0), "\"user_story\"");
}
TEST(PerfHistogramWriterUnittest, WritesOneHistogramPerMeasurementAndStory) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("measurement", "story1", 1, "ms", false,
ImproveDirection::kNone);
writer->LogResult("measurement", "story1", 2, "ms", false,
ImproveDirection::kNone);
writer->LogResult("measurement", "story2", 2, "ms", false,
ImproveDirection::kNone);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
ASSERT_EQ(histogram_set.histograms_size(), 2);
const proto::Histogram& hist1 = histogram_set.histograms(0);
const proto::Histogram& hist2 = histogram_set.histograms(1);
EXPECT_EQ(hist1.name(), "measurement");
EXPECT_EQ(hist2.name(), "measurement");
const proto::Diagnostic& stories1 =
hist1.diagnostics().diagnostic_map().at("stories");
EXPECT_EQ(stories1.generic_set().values(0), "\"story1\"");
EXPECT_EQ(hist1.sample_values_size(), 2);
const proto::Diagnostic& stories2 =
hist2.diagnostics().diagnostic_map().at("stories");
EXPECT_EQ(stories2.generic_set().values(0), "\"story2\"");
EXPECT_EQ(hist2.sample_values_size(), 1);
}
TEST(PerfHistogramWriterUnittest, IgnoresError) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResultMeanAndError("-", "-", 17, 12345, "ms", false,
ImproveDirection::kNone);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
const proto::Histogram& hist1 = histogram_set.histograms(0);
EXPECT_EQ(hist1.running().mean(), 17);
EXPECT_EQ(hist1.running().variance(), 0) << "The error should be ignored.";
}
TEST(PerfHistogramWriterUnittest, WritesDecibelIntoMeasurementName) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("measurement", "-", 0, "dB", false,
ImproveDirection::kNone);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
const proto::Histogram& hist1 = histogram_set.histograms(0);
EXPECT_EQ(hist1.unit().unit(), proto::UNITLESS)
<< "dB should map to unitless";
EXPECT_EQ(hist1.name(), "measurement_dB") << "measurement should be renamed";
}
TEST(PerfHistogramWriterUnittest, WritesFpsIntoMeasurementName) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("measurement", "-", 0, "fps", false,
ImproveDirection::kNone);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
const proto::Histogram& hist1 = histogram_set.histograms(0);
EXPECT_EQ(hist1.unit().unit(), proto::HERTZ) << "fps should map to hertz";
EXPECT_EQ(hist1.name(), "measurement_fps") << "measurement should be renamed";
}
TEST(PerfHistogramWriterUnittest, WritesPercentIntoMeasurementName) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("measurement", "-", 0, "%", false, ImproveDirection::kNone);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
const proto::Histogram& hist1 = histogram_set.histograms(0);
EXPECT_EQ(hist1.unit().unit(), proto::UNITLESS)
<< "percent should map to hertz";
EXPECT_EQ(hist1.name(), "measurement_%") << "measurement should be renamed";
}
TEST(PerfHistogramWriterUnittest, BitsPerSecondIsConvertedToBytes) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("-", "-", 1024, "bps", false, ImproveDirection::kNone);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
const proto::Histogram& hist1 = histogram_set.histograms(0);
EXPECT_EQ(hist1.sample_values(0), 128) << "1024 bits = 128 bytes";
}
TEST(PerfHistogramWriterUnittest, ParsesDirection) {
std::unique_ptr<PerfTestResultWriter> writer =
std::unique_ptr<PerfTestResultWriter>(CreateHistogramWriter());
writer->LogResult("measurement1", "-", 0, "bps", false,
ImproveDirection::kBiggerIsBetter);
writer->LogResult("measurement2", "-", 0, "frames", false,
ImproveDirection::kSmallerIsBetter);
writer->LogResult("measurement3", "-", 0, "sigma", false,
ImproveDirection::kNone);
proto::HistogramSet histogram_set;
histogram_set.ParseFromString(writer->Serialize());
const proto::Histogram& hist1 = histogram_set.histograms(0);
const proto::Histogram& hist2 = histogram_set.histograms(1);
const proto::Histogram& hist3 = histogram_set.histograms(2);
EXPECT_EQ(hist1.unit().unit(), proto::BYTES_PER_SECOND);
EXPECT_EQ(hist1.unit().improvement_direction(), proto::BIGGER_IS_BETTER);
EXPECT_EQ(hist2.unit().unit(), proto::COUNT);
EXPECT_EQ(hist2.unit().improvement_direction(), proto::SMALLER_IS_BETTER);
EXPECT_EQ(hist3.unit().unit(), proto::SIGMA);
EXPECT_EQ(hist3.unit().improvement_direction(), proto::NOT_SPECIFIED);
}
} // namespace test
} // namespace webrtc

View File

@ -47,7 +47,7 @@ class PerfTestResultWriter {
const bool important, const bool important,
webrtc::test::ImproveDirection improve_direction) = 0; webrtc::test::ImproveDirection improve_direction) = 0;
virtual std::string ToJSON() const = 0; virtual std::string Serialize() const = 0;
}; };
} // namespace test } // namespace test

View File

@ -17,6 +17,11 @@
#include "test/gtest.h" #include "test/gtest.h"
#include "test/testsupport/rtc_expect_death.h" #include "test/testsupport/rtc_expect_death.h"
#if WEBRTC_ENABLE_PROTOBUF
#include "third_party/catapult/tracing/tracing/value/histogram.h"
namespace proto = catapult::tracing::tracing::proto;
#endif
namespace { namespace {
const char* kJsonExpected = R"({ const char* kJsonExpected = R"({
@ -98,15 +103,72 @@ TEST_F(PerfTest, TestGetPerfResultsJSON) {
const double kListOfScalars[] = {1, 2, 3}; const double kListOfScalars[] = {1, 2, 3};
PrintResultList("foo", "bar", "baz_vl", kListOfScalars, "units", false); PrintResultList("foo", "bar", "baz_vl", kListOfScalars, "units", false);
EXPECT_EQ(RemoveSpaces(kJsonExpected), GetPerfResultsJSON()); EXPECT_EQ(RemoveSpaces(kJsonExpected), GetPerfResults());
} }
TEST_F(PerfTest, TestClearPerfResults) { TEST_F(PerfTest, TestClearPerfResults) {
PrintResult("measurement", "modifier", "trace", 42, "units", false); PrintResult("measurement", "modifier", "trace", 42, "units", false);
ClearPerfResults(); ClearPerfResults();
EXPECT_EQ(R"({"format_version":"1.0","charts":{}})", GetPerfResultsJSON()); EXPECT_EQ(R"({"format_version":"1.0","charts":{}})", GetPerfResults());
} }
#if WEBRTC_ENABLE_PROTOBUF
TEST_F(PerfTest, TestGetPerfResultsHistograms) {
bool original_flag = absl::GetFlag(FLAGS_write_histogram_proto_json);
absl::SetFlag(&FLAGS_write_histogram_proto_json, true);
PrintResult("measurement", "_modifier", "story_1", 42, "ms", false);
PrintResult("foo", "bar", "story_1", 7, "sigma", true);
// Note: the error will be ignored, not supported by histograms.
PrintResultMeanAndError("foo", "bar", "story_1", 1, 2000, "sigma", false);
const double kListOfScalars[] = {1, 2, 3};
PrintResultList("foo", "bar", "story_1", kListOfScalars, "sigma", false);
proto::HistogramSet histogram_set;
EXPECT_TRUE(histogram_set.ParseFromString(GetPerfResults()))
<< "Expected valid histogram set";
ASSERT_EQ(histogram_set.histograms_size(), 2)
<< "Should be two histograms: foobar and measurement_modifier";
const proto::Histogram& hist1 = histogram_set.histograms(0);
const proto::Histogram& hist2 = histogram_set.histograms(1);
EXPECT_EQ(hist1.name(), "foobar");
// Spot check some things in here (there's a more thorough test on the
// histogram writer itself).
EXPECT_EQ(hist1.unit().unit(), proto::SIGMA);
EXPECT_EQ(hist1.sample_values_size(), 5);
EXPECT_EQ(hist1.sample_values(0), 7);
EXPECT_EQ(hist1.sample_values(1), 1);
EXPECT_EQ(hist1.sample_values(2), 1);
EXPECT_EQ(hist1.sample_values(3), 2);
EXPECT_EQ(hist1.sample_values(4), 3);
EXPECT_EQ(hist1.diagnostics().diagnostic_map().count("stories"), 1u);
const proto::Diagnostic& stories =
hist1.diagnostics().diagnostic_map().at("stories");
ASSERT_EQ(stories.generic_set().values_size(), 1);
EXPECT_EQ(stories.generic_set().values(0), "\"story_1\"");
EXPECT_EQ(hist2.name(), "measurement_modifier");
EXPECT_EQ(hist2.unit().unit(), proto::MS_BEST_FIT_FORMAT);
absl::SetFlag(&FLAGS_write_histogram_proto_json, original_flag);
}
TEST_F(PerfTest, TestClearPerfResultsHistograms) {
bool original_flag = absl::GetFlag(FLAGS_write_histogram_proto_json);
absl::SetFlag(&FLAGS_write_histogram_proto_json, true);
PrintResult("measurement", "modifier", "trace", 42, "ms", false);
ClearPerfResults();
EXPECT_EQ("", GetPerfResults());
absl::SetFlag(&FLAGS_write_histogram_proto_json, original_flag);
}
#endif // WEBRTC_ENABLE_PROTOBUF
#if GTEST_HAS_DEATH_TEST #if GTEST_HAS_DEATH_TEST
using PerfDeathTest = PerfTest; using PerfDeathTest = PerfTest;