Add cpu measurements to VideoProcessorIntegrationTest.
Remove unused method ExcludeFrameTypesToStr. Bug: webrtc:6634 Change-Id: I2816466ed428b8ce13f3073ca496c2891d5d6368 Reviewed-on: https://webrtc-review.googlesource.com/9400 Commit-Queue: Åsa Persson <asapersson@webrtc.org> Reviewed-by: Rasmus Brandt <brandtr@webrtc.org> Cr-Commit-Position: refs/heads/master@{#20350}
This commit is contained in:
@ -446,6 +446,7 @@ if (rtc_include_tests) {
|
||||
"../../common_video",
|
||||
"../../media:rtc_audio_video",
|
||||
"../../rtc_base:rtc_base_approved",
|
||||
"../../rtc_base:rtc_base_tests_utils",
|
||||
"../../system_wrappers",
|
||||
"../../test:field_trial",
|
||||
"../../test:test_support",
|
||||
|
@ -36,6 +36,7 @@ DENOISING = ('Denoising', 'denoising')
|
||||
RESILIENCE = ('Resilience', 'resilience')
|
||||
ERROR_CONCEALMENT = ('Error concealment', 'error concealment')
|
||||
QP = ('Average QP', 'avg QP')
|
||||
CPU_USAGE = ('CPU usage %', 'CPU usage (%)')
|
||||
PSNR = ('PSNR avg', 'PSNR (dB)')
|
||||
SSIM = ('SSIM avg', 'SSIM')
|
||||
ENC_BITRATE = ('Encoded bitrate', 'encoded bitrate (kbps)')
|
||||
@ -93,6 +94,7 @@ RESULTS = [
|
||||
ENCODE_TIME_AVG,
|
||||
DECODE_TIME_AVG,
|
||||
QP,
|
||||
CPU_USAGE,
|
||||
AVG_KEY_FRAME_SIZE,
|
||||
AVG_NON_KEY_FRAME_SIZE,
|
||||
]
|
||||
|
@ -14,9 +14,7 @@
|
||||
|
||||
#include <algorithm>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include "api/video/i420_buffer.h"
|
||||
#include "common_types.h" // NOLINT(build/include)
|
||||
@ -137,16 +135,8 @@ int GetElapsedTimeMicroseconds(int64_t start_ns, int64_t stop_ns) {
|
||||
|
||||
} // namespace
|
||||
|
||||
const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e) {
|
||||
switch (e) {
|
||||
case kExcludeOnlyFirstKeyFrame:
|
||||
return "ExcludeOnlyFirstKeyFrame";
|
||||
case kExcludeAllKeyFrames:
|
||||
return "ExcludeAllKeyFrames";
|
||||
default:
|
||||
RTC_NOTREACHED();
|
||||
return "Unknown";
|
||||
}
|
||||
int TestConfig::NumberOfCores() const {
|
||||
return use_single_core ? 1 : CpuInfo::DetectNumberOfCores();
|
||||
}
|
||||
|
||||
VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
||||
@ -201,8 +191,7 @@ void VideoProcessor::Init() {
|
||||
<< "Failed to register decode complete callback";
|
||||
|
||||
// Initialize the encoder and decoder.
|
||||
uint32_t num_cores =
|
||||
config_.use_single_core ? 1 : CpuInfo::DetectNumberOfCores();
|
||||
int num_cores = config_.NumberOfCores();
|
||||
RTC_CHECK_EQ(
|
||||
encoder_->InitEncode(&config_.codec_settings, num_cores,
|
||||
config_.networking_config.max_payload_size_in_bytes),
|
||||
|
@ -49,11 +49,11 @@ enum ExcludeFrameTypes {
|
||||
kExcludeAllKeyFrames
|
||||
};
|
||||
|
||||
// Returns a string representation of the enum value.
|
||||
const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e);
|
||||
|
||||
// Test configuration for a test run.
|
||||
struct TestConfig {
|
||||
// Returns the number of cores to use.
|
||||
int NumberOfCores() const;
|
||||
|
||||
// Plain name of YUV file to process without file extension.
|
||||
std::string filename;
|
||||
|
||||
@ -82,6 +82,10 @@ struct TestConfig {
|
||||
// If set to false, the maximum number of available cores will be used.
|
||||
bool use_single_core = false;
|
||||
|
||||
// Should cpu usage be measured?
|
||||
// If set to true, the encoding will run in real-time.
|
||||
bool measure_cpu = false;
|
||||
|
||||
// If > 0: forces the encoder to create a keyframe every Nth frame.
|
||||
// Note that the encoder may create a keyframe in other locations in addition
|
||||
// to this setting. Forcing key frames may also affect encoder planning
|
||||
@ -89,9 +93,7 @@ struct TestConfig {
|
||||
// produce an expensive key frame.
|
||||
int keyframe_interval = 0;
|
||||
|
||||
// The codec settings to use for the test (target bitrate, video size,
|
||||
// framerate and so on). This struct should be filled in using the
|
||||
// VideoCodingModule::Codec() method.
|
||||
// Codec settings to use.
|
||||
webrtc::VideoCodec codec_settings;
|
||||
|
||||
// If printing of information to stdout shall be performed during processing.
|
||||
@ -126,8 +128,7 @@ struct TestConfig {
|
||||
// Video Engine, where signaling would request a retransmit of the lost packets,
|
||||
// since they're so important.
|
||||
//
|
||||
// Note this class is not thread safe in any way and is meant for simple testing
|
||||
// purposes.
|
||||
// Note this class is not thread safe and is meant for simple testing purposes.
|
||||
class VideoProcessor {
|
||||
public:
|
||||
VideoProcessor(webrtc::VideoEncoder* encoder,
|
||||
|
@ -29,6 +29,7 @@
|
||||
#include "modules/video_coding/include/video_codec_interface.h"
|
||||
#include "modules/video_coding/include/video_coding.h"
|
||||
#include "rtc_base/checks.h"
|
||||
#include "rtc_base/cpu_time.h"
|
||||
#include "rtc_base/event.h"
|
||||
#include "rtc_base/file.h"
|
||||
#include "rtc_base/logging.h"
|
||||
@ -77,8 +78,52 @@ int NumberOfTemporalLayers(const VideoCodec& codec_settings) {
|
||||
}
|
||||
}
|
||||
|
||||
bool RunEncodeInRealTime(const TestConfig& config) {
|
||||
if (config.measure_cpu) {
|
||||
return true;
|
||||
}
|
||||
#if defined(WEBRTC_ANDROID)
|
||||
// In order to not overwhelm the OpenMAX buffers in the Android MediaCodec.
|
||||
return (config.hw_encoder || config.hw_decoder);
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
} // namespace
|
||||
|
||||
class VideoProcessorIntegrationTest::CpuProcessTime final {
|
||||
public:
|
||||
explicit CpuProcessTime(const TestConfig& config) : config_(config) {}
|
||||
~CpuProcessTime() {}
|
||||
|
||||
void Start() {
|
||||
if (config_.measure_cpu) {
|
||||
cpu_time_ -= rtc::GetProcessCpuTimeNanos();
|
||||
wallclock_time_ -= rtc::SystemTimeNanos();
|
||||
}
|
||||
}
|
||||
void Stop() {
|
||||
if (config_.measure_cpu) {
|
||||
cpu_time_ += rtc::GetProcessCpuTimeNanos();
|
||||
wallclock_time_ += rtc::SystemTimeNanos();
|
||||
}
|
||||
}
|
||||
void Print() const {
|
||||
if (config_.measure_cpu) {
|
||||
printf("CPU usage %%: %f\n", GetUsagePercent() / config_.NumberOfCores());
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
double GetUsagePercent() const {
|
||||
return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
|
||||
}
|
||||
|
||||
const TestConfig config_;
|
||||
int64_t cpu_time_ = 0;
|
||||
int64_t wallclock_time_ = 0;
|
||||
};
|
||||
|
||||
VideoProcessorIntegrationTest::VideoProcessorIntegrationTest() {
|
||||
#if defined(WEBRTC_ANDROID)
|
||||
InitializeAndroidObjects();
|
||||
@ -158,20 +203,18 @@ void VideoProcessorIntegrationTest::ProcessFramesAndMaybeVerify(
|
||||
rate_profiles[rate_update_index].input_fps);
|
||||
});
|
||||
|
||||
cpu_process_time_->Start();
|
||||
|
||||
// Process all frames.
|
||||
int frame_number = 0;
|
||||
const int num_frames = config_.num_frames;
|
||||
RTC_DCHECK_GE(num_frames, 1);
|
||||
while (frame_number < num_frames) {
|
||||
// In order to not overwhelm the OpenMAX buffers in the Android
|
||||
// MediaCodec API, we roughly pace the frames here. The downside
|
||||
// of this is that the encode run will be done in real-time.
|
||||
#if defined(WEBRTC_ANDROID)
|
||||
if (config_.hw_encoder || config_.hw_decoder) {
|
||||
if (RunEncodeInRealTime(config_)) {
|
||||
// Roughly pace the frames.
|
||||
SleepMs(rtc::kNumMillisecsPerSec /
|
||||
rate_profiles[rate_update_index].input_fps);
|
||||
}
|
||||
#endif
|
||||
|
||||
task_queue.PostTask([this] { processor_->ProcessFrame(); });
|
||||
++frame_number;
|
||||
@ -193,6 +236,7 @@ void VideoProcessorIntegrationTest::ProcessFramesAndMaybeVerify(
|
||||
if (config_.hw_encoder || config_.hw_decoder) {
|
||||
SleepMs(1 * rtc::kNumMillisecsPerSec);
|
||||
}
|
||||
cpu_process_time_->Stop();
|
||||
ReleaseAndCloseObjects(&task_queue);
|
||||
|
||||
// Calculate and print rate control statistics.
|
||||
@ -238,6 +282,7 @@ void VideoProcessorIntegrationTest::ProcessFramesAndMaybeVerify(
|
||||
// Calculate and print other statistics.
|
||||
EXPECT_EQ(num_frames, static_cast<int>(stats_.size()));
|
||||
stats_.PrintSummary();
|
||||
cpu_process_time_->Print();
|
||||
|
||||
// Calculate and print image quality statistics.
|
||||
// TODO(marpan): Should compute these quality metrics per SetRates update.
|
||||
@ -382,6 +427,7 @@ void VideoProcessorIntegrationTest::SetUpAndInitObjects(
|
||||
}
|
||||
}
|
||||
|
||||
cpu_process_time_.reset(new CpuProcessTime(config_));
|
||||
packet_manipulator_.reset(new PacketManipulatorImpl(
|
||||
&packet_reader_, config_.networking_config, config_.verbose));
|
||||
|
||||
|
@ -115,6 +115,7 @@ class VideoProcessorIntegrationTest : public testing::Test {
|
||||
TestConfig config_;
|
||||
|
||||
private:
|
||||
class CpuProcessTime;
|
||||
static const int kMaxNumTemporalLayers = 3;
|
||||
|
||||
struct TestResults {
|
||||
@ -195,6 +196,7 @@ class VideoProcessorIntegrationTest : public testing::Test {
|
||||
std::unique_ptr<PacketManipulator> packet_manipulator_;
|
||||
Stats stats_;
|
||||
std::unique_ptr<VideoProcessor> processor_;
|
||||
std::unique_ptr<CpuProcessTime> cpu_process_time_;
|
||||
|
||||
// Quantities updated for every encoded frame.
|
||||
TestResults actual_;
|
||||
|
@ -32,6 +32,7 @@ const bool kFrameDropperOn = false;
|
||||
|
||||
// Test settings.
|
||||
const bool kUseSingleCore = false;
|
||||
const bool kMeasureCpu = false;
|
||||
const VisualizationParams kVisualizationParams = {
|
||||
false, // save_encoded_ivf
|
||||
false, // save_decoded_y4m
|
||||
@ -62,6 +63,7 @@ class VideoProcessorIntegrationTestParameterized
|
||||
config_.output_filename =
|
||||
TempFilename(OutputPath(), "plot_videoprocessor_integrationtest");
|
||||
config_.use_single_core = kUseSingleCore;
|
||||
config_.measure_cpu = kMeasureCpu;
|
||||
config_.verbose = true;
|
||||
config_.hw_encoder = hw_codec_;
|
||||
config_.hw_decoder = hw_codec_;
|
||||
|
Reference in New Issue
Block a user