diff --git a/audio/test/pc_low_bandwidth_audio_test.cc b/audio/test/pc_low_bandwidth_audio_test.cc index ed052c03f7..37c80860ff 100644 --- a/audio/test/pc_low_bandwidth_audio_test.cc +++ b/audio/test/pc_low_bandwidth_audio_test.cc @@ -110,7 +110,7 @@ std::string PerfResultsOutputFile() { void LogTestResults() { std::string perf_results_output_file = PerfResultsOutputFile(); - webrtc::test::WritePerfResults(perf_results_output_file); + EXPECT_TRUE(webrtc::test::WritePerfResults(perf_results_output_file)); const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info(); diff --git a/modules/audio_coding/codecs/isac/fix/test/kenny.cc b/modules/audio_coding/codecs/isac/fix/test/kenny.cc index 87d67a873b..a0a2dca118 100644 --- a/modules/audio_coding/codecs/isac/fix/test/kenny.cc +++ b/modules/audio_coding/codecs/isac/fix/test/kenny.cc @@ -861,7 +861,7 @@ TEST(IsacFixTest, Kenny) { (runtime * 10000) / length_file, "us", false); if (chartjson_result_file) { - webrtc::test::WritePerfResults(chartjson_result_file); + EXPECT_TRUE(webrtc::test::WritePerfResults(chartjson_result_file)); } fclose(inp); diff --git a/rtc_tools/frame_analyzer/frame_analyzer.cc b/rtc_tools/frame_analyzer/frame_analyzer.cc index 1a22bd6214..70af305e61 100644 --- a/rtc_tools/frame_analyzer/frame_analyzer.cc +++ b/rtc_tools/frame_analyzer/frame_analyzer.cc @@ -164,7 +164,9 @@ int main(int argc, char* argv[]) { std::string chartjson_result_file = absl::GetFlag(FLAGS_chartjson_result_file); if (!chartjson_result_file.empty()) { - webrtc::test::WritePerfResults(chartjson_result_file); + if (!webrtc::test::WritePerfResults(chartjson_result_file)) { + return 1; + } } std::string aligned_output_file = absl::GetFlag(FLAGS_aligned_output_file); if (!aligned_output_file.empty()) { diff --git a/test/ios/test_support.mm b/test/ios/test_support.mm index 1f975f2a7e..79ca3215a8 100644 --- a/test/ios/test_support.mm +++ b/test/ios/test_support.mm @@ -33,7 +33,7 @@ static int (*g_test_suite)(void) = NULL; static int g_argc; static char **g_argv; -static bool g_save_chartjson_result; +static bool g_write_perf_output; static absl::optional> g_metrics_to_plot; @interface UIApplication (Testing) @@ -76,8 +76,10 @@ static absl::optional> g_metrics_to_plot; int exitStatus = g_test_suite(); - if (g_save_chartjson_result) { + if (g_write_perf_output) { // Stores data into a json file under the app's document directory. + // TODO(https://crbug.com/1029452): Change ext to .pb when histograms are + // the default. NSString* fileName = @"perf_result.json"; NSArray* outputDirectories = NSSearchPathForDirectoriesInDomains( NSDocumentDirectory, NSUserDomainMask, YES); @@ -85,8 +87,9 @@ static absl::optional> g_metrics_to_plot; NSString* outputPath = [outputDirectories[0] stringByAppendingPathComponent:fileName]; - webrtc::test::WritePerfResults( - [NSString stdStringForString:outputPath]); + if (!webrtc::test::WritePerfResults([NSString stdStringForString:outputPath])) { + exit(1); + } } } if (g_metrics_to_plot) { @@ -121,7 +124,7 @@ void InitTestSuite(int (*test_suite)(void), g_test_suite = test_suite; g_argc = argc; g_argv = argv; - g_save_chartjson_result = save_chartjson_result; + g_write_perf_output = save_chartjson_result; g_metrics_to_plot = std::move(metrics_to_plot); } diff --git a/test/test_main_lib.cc b/test/test_main_lib.cc index c6a0e10f7b..c342c60255 100644 --- a/test/test_main_lib.cc +++ b/test/test_main_lib.cc @@ -181,7 +181,9 @@ class TestMainImpl : public TestMain { std::string chartjson_result_file = absl::GetFlag(FLAGS_isolated_script_test_perf_output); if (!chartjson_result_file.empty()) { - webrtc::test::WritePerfResults(chartjson_result_file); + if (!webrtc::test::WritePerfResults(chartjson_result_file)) { + return 1; + } } if (metrics_to_plot) { webrtc::test::PrintPlottableResults(*metrics_to_plot); diff --git a/test/testsupport/perf_test.cc b/test/testsupport/perf_test.cc index 2ab91901d1..ff0f0d9b6b 100644 --- a/test/testsupport/perf_test.cc +++ b/test/testsupport/perf_test.cc @@ -220,11 +220,29 @@ void PrintPlottableResults(const std::vector& desired_graphs) { GetPlottableCounterPrinter().Print(desired_graphs); } -void WritePerfResults(const std::string& output_path) { +bool WritePerfResults(const std::string& output_path) { std::string results = GetPerfResults(); - std::fstream output(output_path, std::fstream::out); - output << results; - output.close(); + FILE* output; + if (absl::GetFlag(FLAGS_write_histogram_proto_json)) { + output = fopen(output_path.c_str(), "wb"); + } else { + output = fopen(output_path.c_str(), "w"); + } + if (output == NULL) { + printf("Failed to write to %s.\n", output_path.c_str()); + return false; + } + size_t written = + fwrite(results.c_str(), sizeof(char), results.size(), output); + fclose(output); + + if (written != results.size()) { + long expected = results.size(); + printf("Wrote %zu, tried to write %lu\n", written, expected); + return false; + } + + return true; } void PrintResult(const std::string& measurement, diff --git a/test/testsupport/perf_test.h b/test/testsupport/perf_test.h index 252a0a0f61..58fa0350f2 100644 --- a/test/testsupport/perf_test.h +++ b/test/testsupport/perf_test.h @@ -105,8 +105,9 @@ std::string GetPerfResults(); // they will be skipped. void PrintPlottableResults(const std::vector& desired_graphs); -// Call GetPerfResults() and write its output to a file. -void WritePerfResults(const std::string& output_path); +// Call GetPerfResults() and write its output to a file. Returns false if we +// failed to write to the file. +bool WritePerfResults(const std::string& output_path); // By default, perf results are printed to stdout. Set the FILE* to where they // should be printing instead.