Use cropping aligning in video quality analysis tool
TBR=phoglund Bug: webrtc:9642 Change-Id: I32e54473ef6699b862b36c36c7d975b381db6ed2 Reviewed-on: https://webrtc-review.googlesource.com/c/99580 Commit-Queue: Magnus Jedvert <magjed@webrtc.org> Reviewed-by: Patrik Höglund <phoglund@google.com> Reviewed-by: Sami Kalliomäki <sakal@webrtc.org> Cr-Commit-Position: refs/heads/master@{#25757}
This commit is contained in:

committed by
Commit Bot

parent
ebb50c217d
commit
f259078009
@ -15,7 +15,10 @@
|
|||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
|
#include "rtc_base/strings/string_builder.h"
|
||||||
#include "rtc_base/stringutils.h"
|
#include "rtc_base/stringutils.h"
|
||||||
|
#include "rtc_tools/frame_analyzer/video_color_aligner.h"
|
||||||
|
#include "rtc_tools/frame_analyzer/video_geometry_aligner.h"
|
||||||
#include "rtc_tools/frame_analyzer/video_quality_analysis.h"
|
#include "rtc_tools/frame_analyzer/video_quality_analysis.h"
|
||||||
#include "rtc_tools/frame_analyzer/video_temporal_aligner.h"
|
#include "rtc_tools/frame_analyzer/video_temporal_aligner.h"
|
||||||
#include "rtc_tools/simple_command_line_parser.h"
|
#include "rtc_tools/simple_command_line_parser.h"
|
||||||
@ -133,8 +136,35 @@ int main(int argc, char* argv[]) {
|
|||||||
const std::vector<size_t> matching_indices =
|
const std::vector<size_t> matching_indices =
|
||||||
webrtc::test::FindMatchingFrameIndices(reference_video, test_video);
|
webrtc::test::FindMatchingFrameIndices(reference_video, test_video);
|
||||||
|
|
||||||
results.frames =
|
// Align the reference video both temporally and geometrically. I.e. align the
|
||||||
webrtc::test::RunAnalysis(reference_video, test_video, matching_indices);
|
// frames to match up in order to the test video, and align a crop region of
|
||||||
|
// the reference video to match up to the test video.
|
||||||
|
const rtc::scoped_refptr<webrtc::test::Video> aligned_reference_video =
|
||||||
|
AdjustCropping(ReorderVideo(reference_video, matching_indices),
|
||||||
|
test_video);
|
||||||
|
|
||||||
|
// Calculate if there is any systematic color difference between the reference
|
||||||
|
// and test video.
|
||||||
|
const webrtc::test::ColorTransformationMatrix color_transformation =
|
||||||
|
CalculateColorTransformationMatrix(aligned_reference_video, test_video);
|
||||||
|
|
||||||
|
char buf[256];
|
||||||
|
rtc::SimpleStringBuilder string_builder(buf);
|
||||||
|
for (int i = 0; i < 3; ++i) {
|
||||||
|
string_builder << "\n";
|
||||||
|
for (int j = 0; j < 4; ++j)
|
||||||
|
string_builder.AppendFormat("%6.2f ", color_transformation[i][j]);
|
||||||
|
}
|
||||||
|
printf("Adjusting test video with color transformation: %s\n",
|
||||||
|
string_builder.str());
|
||||||
|
|
||||||
|
// Adjust all frames in the test video with the calculated color
|
||||||
|
// transformation.
|
||||||
|
const rtc::scoped_refptr<webrtc::test::Video> color_adjusted_test_video =
|
||||||
|
AdjustColors(color_transformation, test_video);
|
||||||
|
|
||||||
|
results.frames = webrtc::test::RunAnalysis(
|
||||||
|
aligned_reference_video, color_adjusted_test_video, matching_indices);
|
||||||
|
|
||||||
const std::vector<webrtc::test::Cluster> clusters =
|
const std::vector<webrtc::test::Cluster> clusters =
|
||||||
webrtc::test::CalculateFrameClusters(matching_indices);
|
webrtc::test::CalculateFrameClusters(matching_indices);
|
||||||
@ -151,20 +181,17 @@ int main(int argc, char* argv[]) {
|
|||||||
if (!chartjson_result_file.empty()) {
|
if (!chartjson_result_file.empty()) {
|
||||||
webrtc::test::WritePerfResults(chartjson_result_file);
|
webrtc::test::WritePerfResults(chartjson_result_file);
|
||||||
}
|
}
|
||||||
rtc::scoped_refptr<webrtc::test::Video> reordered_video =
|
|
||||||
webrtc::test::GenerateAlignedReferenceVideo(reference_video,
|
|
||||||
matching_indices);
|
|
||||||
std::string aligned_output_file = parser.GetFlag("aligned_output_file");
|
std::string aligned_output_file = parser.GetFlag("aligned_output_file");
|
||||||
if (!aligned_output_file.empty()) {
|
if (!aligned_output_file.empty()) {
|
||||||
webrtc::test::WriteVideoToFile(reordered_video, aligned_output_file,
|
webrtc::test::WriteVideoToFile(aligned_reference_video, aligned_output_file,
|
||||||
/*fps=*/30);
|
/*fps=*/30);
|
||||||
}
|
}
|
||||||
std::string yuv_directory = parser.GetFlag("yuv_directory");
|
std::string yuv_directory = parser.GetFlag("yuv_directory");
|
||||||
if (!yuv_directory.empty()) {
|
if (!yuv_directory.empty()) {
|
||||||
webrtc::test::WriteVideoToFile(reordered_video,
|
webrtc::test::WriteVideoToFile(aligned_reference_video,
|
||||||
JoinFilename(yuv_directory, "ref.yuv"),
|
JoinFilename(yuv_directory, "ref.yuv"),
|
||||||
/*fps=*/30);
|
/*fps=*/30);
|
||||||
webrtc::test::WriteVideoToFile(test_video,
|
webrtc::test::WriteVideoToFile(color_adjusted_test_video,
|
||||||
JoinFilename(yuv_directory, "test.yuv"),
|
JoinFilename(yuv_directory, "test.yuv"),
|
||||||
/*fps=*/30);
|
/*fps=*/30);
|
||||||
}
|
}
|
||||||
|
@ -14,9 +14,6 @@
|
|||||||
#include <numeric>
|
#include <numeric>
|
||||||
|
|
||||||
#include "rtc_base/logging.h"
|
#include "rtc_base/logging.h"
|
||||||
#include "rtc_base/strings/string_builder.h"
|
|
||||||
#include "rtc_tools/frame_analyzer/video_color_aligner.h"
|
|
||||||
#include "rtc_tools/frame_analyzer/video_temporal_aligner.h"
|
|
||||||
#include "test/testsupport/perf_test.h"
|
#include "test/testsupport/perf_test.h"
|
||||||
#include "third_party/libyuv/include/libyuv/compare.h"
|
#include "third_party/libyuv/include/libyuv/compare.h"
|
||||||
#include "third_party/libyuv/include/libyuv/convert.h"
|
#include "third_party/libyuv/include/libyuv/convert.h"
|
||||||
@ -59,32 +56,12 @@ std::vector<AnalysisResult> RunAnalysis(
|
|||||||
const rtc::scoped_refptr<webrtc::test::Video>& reference_video,
|
const rtc::scoped_refptr<webrtc::test::Video>& reference_video,
|
||||||
const rtc::scoped_refptr<webrtc::test::Video>& test_video,
|
const rtc::scoped_refptr<webrtc::test::Video>& test_video,
|
||||||
const std::vector<size_t>& test_frame_indices) {
|
const std::vector<size_t>& test_frame_indices) {
|
||||||
const rtc::scoped_refptr<Video> temporally_aligned_reference_video =
|
|
||||||
ReorderVideo(reference_video, test_frame_indices);
|
|
||||||
|
|
||||||
const ColorTransformationMatrix color_transformation =
|
|
||||||
CalculateColorTransformationMatrix(temporally_aligned_reference_video,
|
|
||||||
test_video);
|
|
||||||
|
|
||||||
char buf[256];
|
|
||||||
rtc::SimpleStringBuilder string_builder(buf);
|
|
||||||
for (int i = 0; i < 3; ++i) {
|
|
||||||
string_builder << "\n";
|
|
||||||
for (int j = 0; j < 4; ++j)
|
|
||||||
string_builder.AppendFormat("%6.2f ", color_transformation[i][j]);
|
|
||||||
}
|
|
||||||
RTC_LOG(LS_INFO) << "Adjusting test video with color transformation: "
|
|
||||||
<< string_builder.str();
|
|
||||||
|
|
||||||
const rtc::scoped_refptr<Video> color_adjusted_test_video =
|
|
||||||
AdjustColors(color_transformation, test_video);
|
|
||||||
|
|
||||||
std::vector<AnalysisResult> results;
|
std::vector<AnalysisResult> results;
|
||||||
for (size_t i = 0; i < color_adjusted_test_video->number_of_frames(); ++i) {
|
for (size_t i = 0; i < test_video->number_of_frames(); ++i) {
|
||||||
const rtc::scoped_refptr<I420BufferInterface>& test_frame =
|
const rtc::scoped_refptr<I420BufferInterface>& test_frame =
|
||||||
color_adjusted_test_video->GetFrame(i);
|
test_video->GetFrame(i);
|
||||||
const rtc::scoped_refptr<I420BufferInterface>& reference_frame =
|
const rtc::scoped_refptr<I420BufferInterface>& reference_frame =
|
||||||
temporally_aligned_reference_video->GetFrame(i);
|
reference_video->GetFrame(i);
|
||||||
|
|
||||||
// Fill in the result struct.
|
// Fill in the result struct.
|
||||||
AnalysisResult result;
|
AnalysisResult result;
|
||||||
|
Reference in New Issue
Block a user