Store video_quality_loopback_test perf results in Chart JSON format.

Adds a flag to store the perf results in a JSON file using the Chart
JSON format [1].

[1] https://github.com/catapult-project/catapult/blob/master/dashboard/docs/data-format.md

TBR=phoglund@webrtc.org

Bug: chromium:755660
Change-Id: I6a896654a4a558df217ddefa4e8a52a487cdbebd
Reviewed-on: https://webrtc-review.googlesource.com/43180
Commit-Queue: Edward Lemur <ehmaldonado@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#21809}
This commit is contained in:
Edward Lemur
2018-01-30 15:33:02 +01:00
committed by Commit Bot
parent 607f464b16
commit 2e5966b3d3
8 changed files with 100 additions and 81 deletions

View File

@ -86,6 +86,8 @@ def _ParseArgs():
parser.add_argument('--temp_dir', parser.add_argument('--temp_dir',
help='A temporary directory to put the output.') help='A temporary directory to put the output.')
parser.add_argument('--adb-path', help='Path to adb binary.', default='adb') parser.add_argument('--adb-path', help='Path to adb binary.', default='adb')
parser.add_argument('--chartjson-result-file',
help='Where to store perf results in chartjson format.', default=None)
args = parser.parse_args() args = parser.parse_args()
return args return args
@ -148,7 +150,8 @@ def SetUpTools(android_device, temp_dir, processes):
'8089'])) '8089']))
def RunTest(android_device, adb_path, build_dir, temp_dir): def RunTest(android_device, adb_path, build_dir, temp_dir,
chartjson_result_file):
ffmpeg_path = os.path.join(TOOLCHAIN_DIR, 'ffmpeg') ffmpeg_path = os.path.join(TOOLCHAIN_DIR, 'ffmpeg')
def ConvertVideo(input_video, output_video): def ConvertVideo(input_video, output_video):
_RunCommand([ffmpeg_path, '-y', '-i', input_video, output_video]) _RunCommand([ffmpeg_path, '-y', '-i', input_video, output_video])
@ -181,8 +184,7 @@ def RunTest(android_device, adb_path, build_dir, temp_dir):
stats_file_ref = os.path.join(temp_dir, 'stats_ref.txt') stats_file_ref = os.path.join(temp_dir, 'stats_ref.txt')
stats_file_test = os.path.join(temp_dir, 'stats_test.txt') stats_file_test = os.path.join(temp_dir, 'stats_test.txt')
_RunCommand([ args = [
sys.executable, compare_script,
'--ref_video', reference_video_yuv, '--ref_video', reference_video_yuv,
'--test_video', test_video_yuv, '--test_video', test_video_yuv,
'--yuv_frame_width', '640', '--yuv_frame_width', '640',
@ -191,7 +193,12 @@ def RunTest(android_device, adb_path, build_dir, temp_dir):
'--stats_file_test', stats_file_test, '--stats_file_test', stats_file_test,
'--frame_analyzer', frame_analyzer, '--frame_analyzer', frame_analyzer,
'--ffmpeg_path', ffmpeg_path, '--ffmpeg_path', ffmpeg_path,
'--zxing_path', zxing_path]) '--zxing_path', zxing_path,
]
if chartjson_result_file:
args.extend(['--chartjson_result_file', chartjson_result_file])
_RunCommand([sys.executable, compare_script] + args)
def main(): def main():
@ -208,7 +215,8 @@ def main():
try: try:
android_device = SelectAndroidDevice(adb_path) android_device = SelectAndroidDevice(adb_path)
SetUpTools(android_device, temp_dir, processes) SetUpTools(android_device, temp_dir, processes)
RunTest(android_device, adb_path, build_dir, temp_dir) RunTest(android_device, adb_path, build_dir, temp_dir,
args.chartjson_result_file)
finally: finally:
for process in processes: for process in processes:
if process: if process:

View File

@ -66,6 +66,7 @@ rtc_static_library("video_quality_analysis") {
] ]
deps = [ deps = [
"../common_video", "../common_video",
"../test:perf_test",
"//third_party/libyuv", "//third_party/libyuv",
] ]
} }
@ -79,6 +80,7 @@ rtc_executable("frame_analyzer") {
deps = [ deps = [
":command_line_parser", ":command_line_parser",
":video_quality_analysis", ":video_quality_analysis",
"../test:perf_test",
"//build/win:default_exe_manifest", "//build/win:default_exe_manifest",
] ]
} }

View File

@ -62,6 +62,8 @@ def _ParseArgs():
help='Width of the YUV file\'s frames. Default: %default') help='Width of the YUV file\'s frames. Default: %default')
parser.add_option('--yuv_frame_height', type='int', default=480, parser.add_option('--yuv_frame_height', type='int', default=480,
help='Height of the YUV file\'s frames. Default: %default') help='Height of the YUV file\'s frames. Default: %default')
parser.add_option('--chartjson_result_file', type='str', default=None,
help='Where to store perf results in chartjson format.')
options, _ = parser.parse_args() options, _ = parser.parse_args()
if options.stats_file: if options.stats_file:
@ -161,6 +163,8 @@ def main():
'--width=%d' % options.yuv_frame_width, '--width=%d' % options.yuv_frame_width,
'--height=%d' % options.yuv_frame_height, '--height=%d' % options.yuv_frame_height,
] ]
if options.chartjson_result_file:
cmd.append('--chartjson_result_file=%s' % options.chartjson_result_file)
frame_analyzer = subprocess.Popen(cmd, stdin=_DevNull(), frame_analyzer = subprocess.Popen(cmd, stdin=_DevNull(),
stdout=sys.stdout, stderr=sys.stderr) stdout=sys.stdout, stderr=sys.stderr)
frame_analyzer.wait() frame_analyzer.wait()

View File

@ -17,6 +17,7 @@
#include "rtc_tools/frame_analyzer/video_quality_analysis.h" #include "rtc_tools/frame_analyzer/video_quality_analysis.h"
#include "rtc_tools/simple_command_line_parser.h" #include "rtc_tools/simple_command_line_parser.h"
#include "test/testsupport/perf_test.h"
/* /*
* A command line tool running PSNR and SSIM on a reference video and a test * A command line tool running PSNR and SSIM on a reference video and a test
@ -62,7 +63,10 @@ int main(int argc, char* argv[]) {
" - reference_file(string): The reference YUV file to compare against." " - reference_file(string): The reference YUV file to compare against."
" Default: ref.yuv\n" " Default: ref.yuv\n"
" - test_file(string): The test YUV file to run the analysis for." " - test_file(string): The test YUV file to run the analysis for."
" Default: test_file.yuv\n"; " Default: test_file.yuv\n"
" - chartjson_result_file: Where to store perf result in chartjson"
" format. If not present, no perf result will be stored."
" Default: None\n";
webrtc::test::CommandLineParser parser; webrtc::test::CommandLineParser parser;
@ -77,6 +81,7 @@ int main(int argc, char* argv[]) {
parser.SetFlag("stats_file_test", "stats_test.txt"); parser.SetFlag("stats_file_test", "stats_test.txt");
parser.SetFlag("reference_file", "ref.yuv"); parser.SetFlag("reference_file", "ref.yuv");
parser.SetFlag("test_file", "test.yuv"); parser.SetFlag("test_file", "test.yuv");
parser.SetFlag("chartjson_result_file", "");
parser.SetFlag("help", "false"); parser.SetFlag("help", "false");
parser.ProcessFlags(); parser.ProcessFlags();
@ -101,11 +106,16 @@ int main(int argc, char* argv[]) {
parser.GetFlag("stats_file_ref").c_str(), parser.GetFlag("stats_file_ref").c_str(),
parser.GetFlag("stats_file_test").c_str(), width, parser.GetFlag("stats_file_test").c_str(), width,
height, &results); height, &results);
webrtc::test::GetMaxRepeatedAndSkippedFrames(
parser.GetFlag("stats_file_ref"), parser.GetFlag("stats_file_test"),
&results);
webrtc::test::PrintAnalysisResults(parser.GetFlag("label"), &results);
std::string chartjson_result_file = parser.GetFlag("chartjson_result_file");
if (!chartjson_result_file.empty()) {
webrtc::test::WritePerfResults(chartjson_result_file);
}
std::string label = parser.GetFlag("label");
webrtc::test::PrintAnalysisResults(label, &results);
webrtc::test::PrintMaxRepeatedAndSkippedFrames(
label, parser.GetFlag("stats_file_ref"),
parser.GetFlag("stats_file_test"));
return 0; return 0;
} }

View File

@ -18,6 +18,8 @@
#include <map> #include <map>
#include <utility> #include <utility>
#include "test/testsupport/perf_test.h"
#define STATS_LINE_LENGTH 32 #define STATS_LINE_LENGTH 32
#define Y4M_FILE_HEADER_MAX_SIZE 200 #define Y4M_FILE_HEADER_MAX_SIZE 200
#define Y4M_FRAME_DELIMITER "FRAME" #define Y4M_FRAME_DELIMITER "FRAME"
@ -318,13 +320,6 @@ void RunAnalysis(const char* reference_file_name,
delete[] reference_frame; delete[] reference_frame;
} }
void PrintMaxRepeatedAndSkippedFrames(const std::string& label,
const std::string& stats_file_ref_name,
const std::string& stats_file_test_name) {
PrintMaxRepeatedAndSkippedFrames(stdout, label, stats_file_ref_name,
stats_file_test_name);
}
std::vector<std::pair<int, int> > CalculateFrameClusters( std::vector<std::pair<int, int> > CalculateFrameClusters(
FILE* file, FILE* file,
int* num_decode_errors) { int* num_decode_errors) {
@ -359,10 +354,9 @@ std::vector<std::pair<int, int> > CalculateFrameClusters(
return frame_cnt; return frame_cnt;
} }
void PrintMaxRepeatedAndSkippedFrames(FILE* output, void GetMaxRepeatedAndSkippedFrames(const std::string& stats_file_ref_name,
const std::string& label, const std::string& stats_file_test_name,
const std::string& stats_file_ref_name, ResultsContainer* results) {
const std::string& stats_file_test_name) {
FILE* stats_file_ref = fopen(stats_file_ref_name.c_str(), "r"); FILE* stats_file_ref = fopen(stats_file_ref_name.c_str(), "r");
FILE* stats_file_test = fopen(stats_file_test_name.c_str(), "r"); FILE* stats_file_test = fopen(stats_file_test_name.c_str(), "r");
if (stats_file_ref == NULL) { if (stats_file_ref == NULL) {
@ -460,22 +454,17 @@ void PrintMaxRepeatedAndSkippedFrames(FILE* output,
} }
continue; continue;
} }
fprintf(output, fprintf(stdout,
"Found barcode %d in test video, which is not in reference video\n", "Found barcode %d in test video, which is not in reference video\n",
it_test->first); it_test->first);
break; break;
} }
fprintf(output, "RESULT Max_repeated: %s= %d\n", label.c_str(), results->max_repeated_frames = max_repeated_frames;
max_repeated_frames); results->max_skipped_frames = max_skipped_frames;
fprintf(output, "RESULT Max_skipped: %s= %d\n", label.c_str(), results->total_skipped_frames = total_skipped_frames;
max_skipped_frames); results->decode_errors_ref = decode_errors_ref;
fprintf(output, "RESULT Total_skipped: %s= %d\n", label.c_str(), results->decode_errors_test = decode_errors_test;
total_skipped_frames);
fprintf(output, "RESULT Decode_errors_reference: %s= %d\n", label.c_str(),
decode_errors_ref);
fprintf(output, "RESULT Decode_errors_test: %s= %d\n", label.c_str(),
decode_errors_test);
} }
void PrintAnalysisResults(const std::string& label, ResultsContainer* results) { void PrintAnalysisResults(const std::string& label, ResultsContainer* results) {
@ -484,26 +473,32 @@ void PrintAnalysisResults(const std::string& label, ResultsContainer* results) {
void PrintAnalysisResults(FILE* output, const std::string& label, void PrintAnalysisResults(FILE* output, const std::string& label,
ResultsContainer* results) { ResultsContainer* results) {
std::vector<AnalysisResult>::iterator iter; SetPerfResultsOutput(output);
fprintf(output, "RESULT Unique_frames_count: %s= %u score\n", label.c_str(),
static_cast<unsigned int>(results->frames.size()));
if (results->frames.size() > 0u) { if (results->frames.size() > 0u) {
fprintf(output, "RESULT PSNR: %s= [", label.c_str()); PrintResult("Unique_frames_count", "", label, results->frames.size(),
for (iter = results->frames.begin(); iter != results->frames.end() - 1; "score", false);
++iter) {
fprintf(output, "%f,", iter->psnr_value);
}
fprintf(output, "%f] dB\n", iter->psnr_value);
fprintf(output, "RESULT SSIM: %s= [", label.c_str()); std::vector<double> psnr_values;
for (iter = results->frames.begin(); iter != results->frames.end() - 1; std::vector<double> ssim_values;
++iter) { for (const auto& frame : results->frames) {
fprintf(output, "%f,", iter->ssim_value); psnr_values.push_back(frame.psnr_value);
ssim_values.push_back(frame.ssim_value);
} }
fprintf(output, "%f] score\n", iter->ssim_value);
PrintResultList("PSNR", "", label, psnr_values, "dB", false);
PrintResultList("SSIM", "", label, ssim_values, "score", false);
} }
PrintResult("Max_repeated", "", label, results->max_repeated_frames, "",
false);
PrintResult("Max_skipped", "", label, results->max_skipped_frames, "", false);
PrintResult("Total_skipped", "", label, results->total_skipped_frames, "",
false);
PrintResult("Decode_errors_reference", "", label, results->decode_errors_ref,
"", false);
PrintResult("Decode_errors_test", "", label, results->decode_errors_test, "",
false);
} }
} // namespace test } // namespace test

View File

@ -37,6 +37,11 @@ struct ResultsContainer {
~ResultsContainer(); ~ResultsContainer();
std::vector<AnalysisResult> frames; std::vector<AnalysisResult> frames;
int max_repeated_frames;
int max_skipped_frames;
int total_skipped_frames;
int decode_errors_ref;
int decode_errors_test;
}; };
enum VideoAnalysisMetricsType {kPSNR, kSSIM}; enum VideoAnalysisMetricsType {kPSNR, kSSIM};
@ -102,15 +107,9 @@ std::vector<std::pair<int, int> > CalculateFrameClusters(
// Calculates max repeated and skipped frames and prints them to stdout in a // Calculates max repeated and skipped frames and prints them to stdout in a
// format that is compatible with Chromium performance numbers. // format that is compatible with Chromium performance numbers.
void PrintMaxRepeatedAndSkippedFrames(const std::string& label, void GetMaxRepeatedAndSkippedFrames(const std::string& stats_file_ref_name,
const std::string& stats_file_ref_name, const std::string& stats_file_test_name,
const std::string& stats_file_test_name); ResultsContainer* results);
// Similar to the above, but will print to the specified file handle.
void PrintMaxRepeatedAndSkippedFrames(FILE* output,
const std::string& label,
const std::string& stats_file_ref_name,
const std::string& stats_file_test_name);
// Gets the next line from an open stats file. // Gets the next line from an open stats file.
bool GetNextStatsLine(FILE* stats_file, char* line); bool GetNextStatsLine(FILE* stats_file, char* line);

View File

@ -86,24 +86,25 @@ TEST_F(VideoQualityAnalysisTest, PrintAnalysisResultsThreeFrames) {
PrintAnalysisResults(logfile_, "ThreeFrames", &result); PrintAnalysisResults(logfile_, "ThreeFrames", &result);
} }
TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesInvalidFile) { TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedAndSkippedFramesInvalidFile) {
ResultsContainer result;
remove(stats_filename_.c_str()); remove(stats_filename_.c_str());
PrintMaxRepeatedAndSkippedFrames(logfile_, "NonExistingStatsFile", GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
stats_filename_ref_, stats_filename_);
} }
TEST_F(VideoQualityAnalysisTest, TEST_F(VideoQualityAnalysisTest,
PrintMaxRepeatedAndSkippedFramesEmptyStatsFile) { GetMaxRepeatedAndSkippedFramesEmptyStatsFile) {
ResultsContainer result;
std::ofstream stats_file; std::ofstream stats_file;
stats_file.open(stats_filename_ref_.c_str()); stats_file.open(stats_filename_ref_.c_str());
stats_file.close(); stats_file.close();
stats_file.open(stats_filename_.c_str()); stats_file.open(stats_filename_.c_str());
stats_file.close(); stats_file.close();
PrintMaxRepeatedAndSkippedFrames(logfile_, "EmptyStatsFile", GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
stats_filename_ref_, stats_filename_);
} }
TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) { TEST_F(VideoQualityAnalysisTest, GetMaxRepeatedAndSkippedFramesNormalFile) {
ResultsContainer result;
std::ofstream stats_file; std::ofstream stats_file;
stats_file.open(stats_filename_ref_.c_str()); stats_file.open(stats_filename_ref_.c_str());
@ -123,8 +124,7 @@ TEST_F(VideoQualityAnalysisTest, PrintMaxRepeatedAndSkippedFramesNormalFile) {
stats_file << "frame_0004 0106\n"; stats_file << "frame_0004 0106\n";
stats_file.close(); stats_file.close();
PrintMaxRepeatedAndSkippedFrames(logfile_, "NormalStatsFile", GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
stats_filename_ref_, stats_filename_);
} }
namespace { namespace {
@ -143,6 +143,7 @@ void VerifyLogOutput(const std::string& log_filename,
TEST_F(VideoQualityAnalysisTest, TEST_F(VideoQualityAnalysisTest,
PrintMaxRepeatedAndSkippedFramesSkippedFrames) { PrintMaxRepeatedAndSkippedFramesSkippedFrames) {
ResultsContainer result;
std::ofstream stats_file; std::ofstream stats_file;
std::string log_filename = std::string log_filename =
@ -171,21 +172,22 @@ TEST_F(VideoQualityAnalysisTest,
stats_file << "frame_0006 0112\n"; stats_file << "frame_0006 0112\n";
stats_file.close(); stats_file.close();
PrintMaxRepeatedAndSkippedFrames(logfile, "NormalStatsFile", GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
stats_filename_ref_, stats_filename_); PrintAnalysisResults(logfile, "NormalStatsFile", &result);
ASSERT_EQ(0, fclose(logfile)); ASSERT_EQ(0, fclose(logfile));
std::vector<std::string> expected_out = { std::vector<std::string> expected_out = {
"RESULT Max_repeated: NormalStatsFile= 2", "RESULT Max_repeated: NormalStatsFile= 2 ",
"RESULT Max_skipped: NormalStatsFile= 2", "RESULT Max_skipped: NormalStatsFile= 2 ",
"RESULT Total_skipped: NormalStatsFile= 3", "RESULT Total_skipped: NormalStatsFile= 3 ",
"RESULT Decode_errors_reference: NormalStatsFile= 0", "RESULT Decode_errors_reference: NormalStatsFile= 0 ",
"RESULT Decode_errors_test: NormalStatsFile= 0"}; "RESULT Decode_errors_test: NormalStatsFile= 0 "};
VerifyLogOutput(log_filename, expected_out); VerifyLogOutput(log_filename, expected_out);
} }
TEST_F(VideoQualityAnalysisTest, TEST_F(VideoQualityAnalysisTest,
PrintMaxRepeatedAndSkippedFramesDecodeErrorInTest) { PrintMaxRepeatedAndSkippedFramesDecodeErrorInTest) {
ResultsContainer result;
std::ofstream stats_file; std::ofstream stats_file;
std::string log_filename = std::string log_filename =
@ -214,16 +216,16 @@ TEST_F(VideoQualityAnalysisTest,
stats_file << "frame_0006 0110\n"; stats_file << "frame_0006 0110\n";
stats_file.close(); stats_file.close();
PrintMaxRepeatedAndSkippedFrames(logfile, "NormalStatsFile", GetMaxRepeatedAndSkippedFrames(stats_filename_ref_, stats_filename_, &result);
stats_filename_ref_, stats_filename_); PrintAnalysisResults(logfile, "NormalStatsFile", &result);
ASSERT_EQ(0, fclose(logfile)); ASSERT_EQ(0, fclose(logfile));
std::vector<std::string> expected_out = { std::vector<std::string> expected_out = {
"RESULT Max_repeated: NormalStatsFile= 1", "RESULT Max_repeated: NormalStatsFile= 1 ",
"RESULT Max_skipped: NormalStatsFile= 0", "RESULT Max_skipped: NormalStatsFile= 0 ",
"RESULT Total_skipped: NormalStatsFile= 0", "RESULT Total_skipped: NormalStatsFile= 0 ",
"RESULT Decode_errors_reference: NormalStatsFile= 0", "RESULT Decode_errors_reference: NormalStatsFile= 0 ",
"RESULT Decode_errors_test: NormalStatsFile= 3"}; "RESULT Decode_errors_test: NormalStatsFile= 3 "};
VerifyLogOutput(log_filename, expected_out); VerifyLogOutput(log_filename, expected_out);
} }

View File

@ -122,7 +122,6 @@ rtc_source_set("field_trial") {
rtc_source_set("perf_test") { rtc_source_set("perf_test") {
visibility = [ "*" ] visibility = [ "*" ]
testonly = true
sources = [ sources = [
"testsupport/perf_test.cc", "testsupport/perf_test.cc",
"testsupport/perf_test.h", "testsupport/perf_test.h",