VideoProcessorIntegrationTest: make it runnable on a task queue.

* First do all frame processing, then do all rate control
  statistics calculations. This means that we only need to
  synchronize once.
* Run the VideoProcessor on a task queue, thus supporting Android
  HW codecs.
* Add some unit tests for the VideoProcessor.

BUG=webrtc:6634

Review-Url: https://codereview.webrtc.org/2997283002
Cr-Commit-Position: refs/heads/master@{#19604}
This commit is contained in:
brandtr
2017-08-30 06:29:51 -07:00
committed by Commit Bot
parent cd8b079afb
commit b57f42676e
5 changed files with 299 additions and 163 deletions

View File

@ -409,6 +409,8 @@ if (rtc_include_tests) {
"../..:webrtc_common",
"../../media:rtc_media",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_task_queue",
"../../system_wrappers:system_wrappers",
"../../test:test_support",
"../../test:video_test_common",
"../../test:video_test_support",

View File

@ -155,8 +155,7 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
first_key_frame_has_been_excluded_(false),
last_decoded_frame_buffer_(analysis_frame_reader->FrameLength()),
stats_(stats),
num_dropped_frames_(0),
num_spatial_resizes_(0) {
rate_update_index_(-1) {
RTC_DCHECK(encoder);
RTC_DCHECK(decoder);
RTC_DCHECK(packet_manipulator);
@ -236,9 +235,15 @@ void VideoProcessor::ProcessFrame(int frame_number) {
rtc::scoped_refptr<I420BufferInterface> buffer(
analysis_frame_reader_->ReadFrame());
RTC_CHECK(buffer) << "Tried to read too many frames from the file.";
// Use the frame number as the basis for timestamp to identify frames. Let the
// first timestamp be non-zero, to not make the IvfFileWriter believe that we
// want to use capture timestamps in the IVF files.
const uint32_t rtp_timestamp = (frame_number + 1) * kRtpClockRateHz /
config_.codec_settings.maxFramerate;
rtp_timestamp_to_frame_num_[rtp_timestamp] = frame_number;
const int64_t kNoRenderTime = 0;
VideoFrame source_frame(buffer, FrameNumberToTimestamp(frame_number),
kNoRenderTime, webrtc::kVideoRotation_0);
VideoFrame source_frame(buffer, rtp_timestamp, kNoRenderTime,
webrtc::kVideoRotation_0);
// Decide if we are going to force a keyframe.
std::vector<FrameType> frame_types(1, kVideoFrameDelta);
@ -269,23 +274,23 @@ void VideoProcessor::ProcessFrame(int frame_number) {
void VideoProcessor::SetRates(int bitrate_kbps, int framerate_fps) {
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
config_.codec_settings.maxFramerate = framerate_fps;
int set_rates_result = encoder_->SetRateAllocation(
bitrate_allocator_->GetAllocation(bitrate_kbps * 1000, framerate_fps),
framerate_fps);
RTC_DCHECK_GE(set_rates_result, 0)
<< "Failed to update encoder with new rate " << bitrate_kbps << ".";
num_dropped_frames_ = 0;
num_spatial_resizes_ = 0;
++rate_update_index_;
num_dropped_frames_.push_back(0);
num_spatial_resizes_.push_back(0);
}
int VideoProcessor::NumberDroppedFrames() {
std::vector<int> VideoProcessor::NumberDroppedFramesPerRateUpdate() const {
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
return num_dropped_frames_;
}
int VideoProcessor::NumberSpatialResizes() {
std::vector<int> VideoProcessor::NumberSpatialResizesPerRateUpdate() const {
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
return num_spatial_resizes_;
}
@ -302,16 +307,17 @@ void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec,
RTC_CHECK(encoded_frame_writer_->WriteFrame(encoded_image, codec));
}
// Timestamp is proportional to frame number, so this gives us number of
// dropped frames.
int frame_number = TimestampToFrameNumber(encoded_image._timeStamp);
// Check for dropped frames.
const int frame_number =
rtp_timestamp_to_frame_num_[encoded_image._timeStamp];
bool last_frame_missing = false;
if (frame_number > 0) {
RTC_DCHECK_GE(last_encoded_frame_num_, 0);
int num_dropped_from_last_encode =
frame_number - last_encoded_frame_num_ - 1;
RTC_DCHECK_GE(num_dropped_from_last_encode, 0);
num_dropped_frames_ += num_dropped_from_last_encode;
RTC_CHECK_GE(rate_update_index_, 0);
num_dropped_frames_[rate_update_index_] += num_dropped_from_last_encode;
if (num_dropped_from_last_encode > 0) {
// For dropped frames, we write out the last decoded frame to avoid
// getting out of sync for the computation of PSNR and SSIM.
@ -328,7 +334,6 @@ void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec,
}
}
}
last_frame_missing =
(frame_infos_[last_encoded_frame_num_].manipulated_length == 0);
}
@ -336,7 +341,7 @@ void VideoProcessor::FrameEncoded(webrtc::VideoCodecType codec,
RTC_CHECK_GT(frame_number, last_encoded_frame_num_);
last_encoded_frame_num_ = frame_number;
// Frame is not dropped, so update frame information and statistics.
// Update frame information and statistics.
VerifyQpParser(encoded_image, config_);
RTC_CHECK_LT(frame_number, frame_infos_.size());
FrameInfo* frame_info = &frame_infos_[frame_number];
@ -420,8 +425,8 @@ void VideoProcessor::FrameDecoded(const VideoFrame& image) {
int64_t decode_stop_ns = rtc::TimeNanos();
// Update frame information and statistics.
int frame_number = TimestampToFrameNumber(image.timestamp());
RTC_DCHECK_LT(frame_number, frame_infos_.size());
const int frame_number = rtp_timestamp_to_frame_num_[image.timestamp()];
RTC_CHECK_LT(frame_number, frame_infos_.size());
FrameInfo* frame_info = &frame_infos_[frame_number];
frame_info->decoded_width = image.width();
frame_info->decoded_height = image.height();
@ -432,14 +437,15 @@ void VideoProcessor::FrameDecoded(const VideoFrame& image) {
// Check if the codecs have resized the frame since previously decoded frame.
if (frame_number > 0) {
RTC_DCHECK_GE(last_decoded_frame_num_, 0);
RTC_CHECK_GE(last_decoded_frame_num_, 0);
const FrameInfo& last_decoded_frame_info =
frame_infos_[last_decoded_frame_num_];
if (static_cast<int>(image.width()) !=
last_decoded_frame_info.decoded_width ||
static_cast<int>(image.height()) !=
last_decoded_frame_info.decoded_height) {
++num_spatial_resizes_;
RTC_CHECK_GE(rate_update_index_, 0);
++num_spatial_resizes_[rate_update_index_];
}
}
// Ensure strict monotonicity.
@ -482,24 +488,5 @@ void VideoProcessor::FrameDecoded(const VideoFrame& image) {
last_decoded_frame_buffer_ = std::move(extracted_buffer);
}
uint32_t VideoProcessor::FrameNumberToTimestamp(int frame_number) const {
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
RTC_DCHECK_GE(frame_number, 0);
const int ticks_per_frame =
kRtpClockRateHz / config_.codec_settings.maxFramerate;
return (frame_number + 1) * ticks_per_frame;
}
int VideoProcessor::TimestampToFrameNumber(uint32_t timestamp) const {
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
RTC_DCHECK_GT(timestamp, 0);
const int ticks_per_frame =
kRtpClockRateHz / config_.codec_settings.maxFramerate;
RTC_DCHECK_EQ(timestamp % ticks_per_frame, 0);
return (timestamp / ticks_per_frame) - 1;
}
} // namespace test
} // namespace webrtc

View File

@ -11,6 +11,7 @@
#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
#include <map>
#include <memory>
#include <string>
#include <vector>
@ -159,11 +160,11 @@ class VideoProcessor {
// Updates the encoder with target rates. Must be called at least once.
void SetRates(int bitrate_kbps, int framerate_fps);
// Return the number of dropped frames.
int NumberDroppedFrames();
// Returns the number of dropped frames.
std::vector<int> NumberDroppedFramesPerRateUpdate() const;
// Return the number of spatial resizes.
int NumberSpatialResizes();
// Returns the number of spatial resizes.
std::vector<int> NumberSpatialResizesPerRateUpdate() const;
private:
// Container that holds per-frame information that needs to be stored between
@ -275,14 +276,7 @@ class VideoProcessor {
// Invoked by the callback adapter when a frame has completed decoding.
void FrameDecoded(const webrtc::VideoFrame& image);
// Use the frame number as the basis for timestamp to identify frames. Let the
// first timestamp be non-zero, to not make the IvfFileWriter believe that we
// want to use capture timestamps in the IVF files.
uint32_t FrameNumberToTimestamp(int frame_number) const;
int TimestampToFrameNumber(uint32_t timestamp) const;
bool initialized_ GUARDED_BY(sequence_checker_);
TestConfig config_ GUARDED_BY(sequence_checker_);
webrtc::VideoEncoder* const encoder_;
@ -315,6 +309,11 @@ class VideoProcessor {
int last_encoded_frame_num_ GUARDED_BY(sequence_checker_);
int last_decoded_frame_num_ GUARDED_BY(sequence_checker_);
// Store an RTP timestamp -> frame number map, since the timestamps are
// based off of the frame rate, which can change mid-test.
std::map<uint32_t, int> rtp_timestamp_to_frame_num_
GUARDED_BY(sequence_checker_);
// Keep track of if we have excluded the first key frame from packet loss.
bool first_key_frame_has_been_excluded_ GUARDED_BY(sequence_checker_);
@ -324,8 +323,9 @@ class VideoProcessor {
// Statistics.
Stats* stats_;
int num_dropped_frames_ GUARDED_BY(sequence_checker_);
int num_spatial_resizes_ GUARDED_BY(sequence_checker_);
std::vector<int> num_dropped_frames_ GUARDED_BY(sequence_checker_);
std::vector<int> num_spatial_resizes_ GUARDED_BY(sequence_checker_);
int rate_update_index_ GUARDED_BY(sequence_checker_);
rtc::SequencedTaskChecker sequence_checker_;

View File

@ -38,9 +38,11 @@
#include "webrtc/modules/video_coding/include/video_coding.h"
#include "webrtc/modules/video_coding/utility/ivf_file_writer.h"
#include "webrtc/rtc_base/checks.h"
#include "webrtc/rtc_base/event.h"
#include "webrtc/rtc_base/file.h"
#include "webrtc/rtc_base/logging.h"
#include "webrtc/rtc_base/ptr_util.h"
#include "webrtc/system_wrappers/include/sleep.h"
#include "webrtc/test/gtest.h"
#include "webrtc/test/testsupport/fileutils.h"
#include "webrtc/test/testsupport/frame_reader.h"
@ -182,9 +184,10 @@ class VideoProcessorIntegrationTest : public testing::Test {
decoder_factory_->DestroyVideoDecoder(decoder_);
}
void SetUpObjects(const VisualizationParams* visualization_params,
const int initial_bitrate_kbps,
const int initial_framerate_fps) {
void SetUpAndInitObjects(rtc::TaskQueue* task_queue,
const int initial_bitrate_kbps,
const int initial_framerate_fps,
const VisualizationParams* visualization_params) {
CreateEncoderAndDecoder();
// Create file objects for quality analysis.
@ -223,54 +226,59 @@ class VideoProcessorIntegrationTest : public testing::Test {
packet_manipulator_.reset(new PacketManipulatorImpl(
&packet_reader_, config_.networking_config, config_.verbose));
processor_ = rtc::MakeUnique<VideoProcessor>(
encoder_, decoder_, analysis_frame_reader_.get(),
analysis_frame_writer_.get(), packet_manipulator_.get(), config_,
&stats_, encoded_frame_writer_.get(), decoded_frame_writer_.get());
processor_->Init();
config_.codec_settings.minBitrate = 0;
config_.codec_settings.startBitrate = initial_bitrate_kbps;
config_.codec_settings.maxFramerate = initial_framerate_fps;
rtc::Event sync_event(false, false);
task_queue->PostTask([this, &sync_event]() {
processor_ = rtc::MakeUnique<VideoProcessor>(
encoder_, decoder_, analysis_frame_reader_.get(),
analysis_frame_writer_.get(), packet_manipulator_.get(), config_,
&stats_, encoded_frame_writer_.get(), decoded_frame_writer_.get());
processor_->Init();
sync_event.Set();
});
sync_event.Wait(rtc::Event::kForever);
}
// Reset quantities after each encoder update, update the target per-frame
// bandwidth.
void ResetRateControlMetrics(int num_frames_to_hit_target) {
const int num_temporal_layers =
NumberOfTemporalLayers(config_.codec_settings);
for (int i = 0; i < num_temporal_layers; i++) {
num_frames_per_update_[i] = 0;
sum_frame_size_mismatch_[i] = 0.0f;
sum_encoded_frame_size_[i] = 0.0f;
encoding_bitrate_[i] = 0.0f;
// Update layer per-frame-bandwidth.
per_frame_bandwidth_[i] = static_cast<float>(bitrate_layer_[i]) /
static_cast<float>(framerate_layer_[i]);
void ReleaseAndCloseObjects(rtc::TaskQueue* task_queue) {
rtc::Event sync_event(false, false);
task_queue->PostTask([this, &sync_event]() {
processor_->Release();
sync_event.Set();
});
sync_event.Wait(rtc::Event::kForever);
// The VideoProcessor must be ::Release()'d before we destroy the codecs.
DestroyEncoderAndDecoder();
// Close the analysis files before we use them for SSIM/PSNR calculations.
analysis_frame_reader_->Close();
analysis_frame_writer_->Close();
// Close visualization files.
if (encoded_frame_writer_) {
EXPECT_TRUE(encoded_frame_writer_->Close());
}
if (decoded_frame_writer_) {
decoded_frame_writer_->Close();
}
// Set maximum size of key frames, following setting in the VP8 wrapper.
float max_key_size = kScaleKeyFrameSize * kOptimalBufferSize * framerate_;
// We don't know exact target size of the key frames (except for first one),
// but the minimum in libvpx is ~|3 * per_frame_bandwidth| and maximum is
// set by |max_key_size_ * per_frame_bandwidth|. Take middle point/average
// as reference for mismatch. Note key frames always correspond to base
// layer frame in this test.
target_size_key_frame_ = 0.5 * (3 + max_key_size) * per_frame_bandwidth_[0];
num_frames_total_ = 0;
sum_encoded_frame_size_total_ = 0.0f;
encoding_bitrate_total_ = 0.0f;
perc_encoding_rate_mismatch_ = 0.0f;
num_frames_to_hit_target_ = num_frames_to_hit_target;
encoding_rate_within_target_ = false;
sum_key_frame_size_mismatch_ = 0.0;
num_key_frames_ = 0;
}
// For every encoded frame, update the rate control metrics.
void UpdateRateControlMetrics(int frame_number) {
RTC_CHECK_GE(frame_number, 0);
const int tl_idx = TemporalLayerIndexForFrame(frame_number);
++num_frames_per_update_[tl_idx];
++num_frames_total_;
FrameType frame_type = stats_.stats_[frame_number].frame_type;
float encoded_size_kbits =
stats_.stats_[frame_number].encoded_frame_length_in_bytes * 8.0f /
1000.0f;
const int tl_idx = TemporalLayerIndexForFrame(frame_number);
// Update layer data.
// Update rate mismatch relative to per-frame bandwidth for delta frames.
@ -308,9 +316,9 @@ class VideoProcessorIntegrationTest : public testing::Test {
// Verify expected behavior of rate control and print out data.
void PrintAndMaybeVerifyRateControlMetrics(
int rate_update_index,
const std::vector<RateControlThresholds>* rc_thresholds) {
int num_dropped_frames = processor_->NumberDroppedFrames();
int num_resize_actions = processor_->NumberSpatialResizes();
const std::vector<RateControlThresholds>* rc_thresholds,
const std::vector<int>& num_dropped_frames,
const std::vector<int>& num_resize_actions) {
printf(
"Rate update #%d:\n"
" Target bitrate : %d\n"
@ -322,8 +330,9 @@ class VideoProcessorIntegrationTest : public testing::Test {
" # frames to convergence: %d\n"
" # dropped frames : %d\n"
" # spatial resizes : %d\n",
num_frames_total_, num_frames_to_hit_target_, num_dropped_frames,
num_resize_actions);
num_frames_total_, num_frames_to_hit_target_,
num_dropped_frames[rate_update_index],
num_resize_actions[rate_update_index]);
const RateControlThresholds* rc_threshold = nullptr;
if (rc_thresholds) {
@ -376,8 +385,10 @@ class VideoProcessorIntegrationTest : public testing::Test {
if (rc_threshold) {
EXPECT_LE(num_frames_to_hit_target_, rc_threshold->max_time_hit_target);
EXPECT_LE(num_dropped_frames, rc_threshold->max_num_dropped_frames);
EXPECT_EQ(rc_threshold->num_spatial_resizes, num_resize_actions);
EXPECT_LE(num_dropped_frames[rate_update_index],
rc_threshold->max_num_dropped_frames);
EXPECT_EQ(rc_threshold->num_spatial_resizes,
num_resize_actions[rate_update_index]);
EXPECT_EQ(rc_threshold->num_key_frames, num_key_frames_);
}
}
@ -434,8 +445,12 @@ class VideoProcessorIntegrationTest : public testing::Test {
return tl_idx;
}
// Set the bit rate and frame rate per temporal layer, for up to 3 layers.
void SetTemporalLayerRates() {
// Reset quantities before each encoder rate update.
void ResetRateControlMetrics(int rate_update_index,
const RateProfile& rate_profile) {
// Set new rates.
bitrate_kbps_ = rate_profile.target_bit_rate[rate_update_index];
framerate_ = rate_profile.input_frame_rate[rate_update_index];
const int num_temporal_layers =
NumberOfTemporalLayers(config_.codec_settings);
RTC_DCHECK_LE(num_temporal_layers, kMaxNumTemporalLayers);
@ -455,6 +470,38 @@ class VideoProcessorIntegrationTest : public testing::Test {
if (num_temporal_layers == 3) {
framerate_layer_[2] = framerate_ / 2.0f;
}
if (rate_update_index == 0) {
target_size_key_frame_initial_ =
0.5 * kInitialBufferSize * bitrate_layer_[0];
}
// Reset rate control metrics.
for (int i = 0; i < num_temporal_layers; i++) {
num_frames_per_update_[i] = 0;
sum_frame_size_mismatch_[i] = 0.0f;
sum_encoded_frame_size_[i] = 0.0f;
encoding_bitrate_[i] = 0.0f;
// Update layer per-frame-bandwidth.
per_frame_bandwidth_[i] = static_cast<float>(bitrate_layer_[i]) /
static_cast<float>(framerate_layer_[i]);
}
// Set maximum size of key frames, following setting in the VP8 wrapper.
float max_key_size = kScaleKeyFrameSize * kOptimalBufferSize * framerate_;
// We don't know exact target size of the key frames (except for first one),
// but the minimum in libvpx is ~|3 * per_frame_bandwidth| and maximum is
// set by |max_key_size_ * per_frame_bandwidth|. Take middle point/average
// as reference for mismatch. Note key frames always correspond to base
// layer frame in this test.
target_size_key_frame_ = 0.5 * (3 + max_key_size) * per_frame_bandwidth_[0];
num_frames_total_ = 0;
sum_encoded_frame_size_total_ = 0.0f;
encoding_bitrate_total_ = 0.0f;
perc_encoding_rate_mismatch_ = 0.0f;
num_frames_to_hit_target_ =
rate_profile.frame_index_rate_update[rate_update_index + 1];
encoding_rate_within_target_ = false;
sum_key_frame_size_mismatch_ = 0.0;
num_key_frames_ = 0;
}
// Processes all frames in the clip and verifies the result.
@ -463,73 +510,95 @@ class VideoProcessorIntegrationTest : public testing::Test {
const std::vector<RateControlThresholds>* rc_thresholds,
const QualityThresholds* quality_thresholds,
const VisualizationParams* visualization_params) {
config_.codec_settings.startBitrate = rate_profile.target_bit_rate[0];
SetUpObjects(visualization_params, rate_profile.target_bit_rate[0],
rate_profile.input_frame_rate[0]);
// The Android HW codec needs to be run on a task queue, so we simply always
// run the test on a task queue.
rtc::TaskQueue task_queue("VidProc TQ");
rtc::Event sync_event(false, false);
SetUpAndInitObjects(&task_queue, rate_profile.target_bit_rate[0],
rate_profile.input_frame_rate[0], visualization_params);
// Set initial rates.
bitrate_kbps_ = rate_profile.target_bit_rate[0];
framerate_ = rate_profile.input_frame_rate[0];
SetTemporalLayerRates();
// Set the initial target size for key frame.
target_size_key_frame_initial_ =
0.5 * kInitialBufferSize * bitrate_layer_[0];
processor_->SetRates(bitrate_kbps_, framerate_);
int rate_update_index = 0;
task_queue.PostTask([this, &rate_profile, rate_update_index] {
processor_->SetRates(rate_profile.target_bit_rate[rate_update_index],
rate_profile.input_frame_rate[rate_update_index]);
});
// Process each frame, up to |num_frames|.
// Process all frames.
int frame_number = 0;
int update_index = 0;
int num_frames = rate_profile.num_frames;
ResetRateControlMetrics(
rate_profile.frame_index_rate_update[update_index + 1]);
const int num_frames = rate_profile.num_frames;
RTC_DCHECK_GE(num_frames, 1);
while (frame_number < num_frames) {
// In order to not overwhelm the OpenMAX buffers in the Android
// MediaCodec API, we roughly pace the frames here. The downside
// of this is that the encode run will be done in real-time.
// TODO(brandtr): Investigate if this is needed on iOS.
if (config_.hw_codec) {
SleepMs(rtc::kNumMillisecsPerSec /
rate_profile.input_frame_rate[rate_update_index]);
}
task_queue.PostTask(
[this, frame_number] { processor_->ProcessFrame(frame_number); });
++frame_number;
if (frame_number ==
rate_profile.frame_index_rate_update[rate_update_index + 1]) {
++rate_update_index;
task_queue.PostTask([this, &rate_profile, rate_update_index] {
processor_->SetRates(
rate_profile.target_bit_rate[rate_update_index],
rate_profile.input_frame_rate[rate_update_index]);
});
}
}
// Give the VideoProcessor pipeline some time to process the last frame,
// and then release the codecs.
if (config_.hw_codec) {
SleepMs(1 * rtc::kNumMillisecsPerSec);
}
ReleaseAndCloseObjects(&task_queue);
// Calculate and print rate control statistics.
rate_update_index = 0;
frame_number = 0;
ResetRateControlMetrics(rate_update_index, rate_profile);
std::vector<int> num_dropped_frames;
std::vector<int> num_resize_actions;
sync_event.Reset();
task_queue.PostTask(
[this, &num_dropped_frames, &num_resize_actions, &sync_event]() {
num_dropped_frames = processor_->NumberDroppedFramesPerRateUpdate();
num_resize_actions = processor_->NumberSpatialResizesPerRateUpdate();
sync_event.Set();
});
sync_event.Wait(rtc::Event::kForever);
while (frame_number < num_frames) {
processor_->ProcessFrame(frame_number);
const int tl_idx = TemporalLayerIndexForFrame(frame_number);
++num_frames_per_update_[tl_idx];
++num_frames_total_;
UpdateRateControlMetrics(frame_number);
++frame_number;
// If we hit another/next update, verify stats for current state and
// update layers and codec with new rates.
if (frame_number ==
rate_profile.frame_index_rate_update[update_index + 1]) {
PrintAndMaybeVerifyRateControlMetrics(update_index, rc_thresholds);
// Update layer rates and the codec with new rates.
++update_index;
bitrate_kbps_ = rate_profile.target_bit_rate[update_index];
framerate_ = rate_profile.input_frame_rate[update_index];
SetTemporalLayerRates();
ResetRateControlMetrics(
rate_profile.frame_index_rate_update[update_index + 1]);
processor_->SetRates(bitrate_kbps_, framerate_);
rate_profile.frame_index_rate_update[rate_update_index + 1]) {
PrintAndMaybeVerifyRateControlMetrics(rate_update_index, rc_thresholds,
num_dropped_frames,
num_resize_actions);
++rate_update_index;
ResetRateControlMetrics(rate_update_index, rate_profile);
}
}
PrintAndMaybeVerifyRateControlMetrics(rate_update_index, rc_thresholds,
num_dropped_frames,
num_resize_actions);
// Verify rate control metrics for all frames since the last rate update.
PrintAndMaybeVerifyRateControlMetrics(update_index, rc_thresholds);
EXPECT_EQ(num_frames, frame_number);
// Calculate and print other statistics.
EXPECT_EQ(num_frames, static_cast<int>(stats_.stats_.size()));
stats_.PrintSummary();
// Release encoder and decoder to make sure they have finished processing.
processor_->Release();
DestroyEncoderAndDecoder();
// Close the analysis files before we use them for SSIM/PSNR calculations.
analysis_frame_reader_->Close();
analysis_frame_writer_->Close();
// Close visualization files.
if (encoded_frame_writer_) {
EXPECT_TRUE(encoded_frame_writer_->Close());
}
if (decoded_frame_writer_) {
decoded_frame_writer_->Close();
}
// Calculate and print image quality statistics.
// TODO(marpan): Should compute these quality metrics per SetRates update.
QualityMetricsResult psnr_result, ssim_result;
EXPECT_EQ(0, I420MetricsFromFiles(config_.input_filename.c_str(),
@ -540,7 +609,6 @@ class VideoProcessorIntegrationTest : public testing::Test {
if (quality_thresholds) {
VerifyQuality(psnr_result, ssim_result, *quality_thresholds);
}
stats_.PrintSummary();
printf("PSNR avg: %f, min: %f\nSSIM avg: %f, min: %f\n",
psnr_result.average, psnr_result.min, ssim_result.average,
ssim_result.min);

View File

@ -11,6 +11,7 @@
#include <memory>
#include "webrtc/api/video/i420_buffer.h"
#include "webrtc/common_types.h"
#include "webrtc/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h"
#include "webrtc/modules/video_coding/codecs/test/videoprocessor.h"
#include "webrtc/modules/video_coding/include/mock/mock_video_codec_interface.h"
@ -27,6 +28,8 @@
using ::testing::_;
using ::testing::AtLeast;
using ::testing::ElementsAre;
using ::testing::Property;
using ::testing::Return;
namespace webrtc {
@ -37,7 +40,6 @@ namespace {
const int kWidth = 352;
const int kHeight = 288;
const int kFrameSize = kWidth * kHeight * 3 / 2; // I420.
const int kFramerate = 30;
const int kNumFrames = 2;
} // namespace
@ -49,7 +51,6 @@ class VideoProcessorTest : public testing::Test {
webrtc::test::CodecSettings(kVideoCodecVP8, &config_.codec_settings);
config_.codec_settings.width = kWidth;
config_.codec_settings.height = kHeight;
config_.codec_settings.maxFramerate = kFramerate;
EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
.WillRepeatedly(Return(kNumFrames));
@ -63,11 +64,16 @@ class VideoProcessorTest : public testing::Test {
void ExpectInit() {
EXPECT_CALL(encoder_mock_, InitEncode(_, _, _)).Times(1);
EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_))
.Times(AtLeast(1));
EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_)).Times(1);
EXPECT_CALL(decoder_mock_, InitDecode(_, _)).Times(1);
EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_))
.Times(AtLeast(1));
EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_)).Times(1);
}
void ExpectRelease() {
EXPECT_CALL(encoder_mock_, Release()).Times(1);
EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_)).Times(1);
EXPECT_CALL(decoder_mock_, Release()).Times(1);
EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_)).Times(1);
}
TestConfig config_;
@ -81,28 +87,101 @@ class VideoProcessorTest : public testing::Test {
std::unique_ptr<VideoProcessor> video_processor_;
};
TEST_F(VideoProcessorTest, Init) {
TEST_F(VideoProcessorTest, InitRelease) {
ExpectInit();
video_processor_->Init();
ExpectRelease();
video_processor_->Release();
}
TEST_F(VideoProcessorTest, ProcessFrames) {
TEST_F(VideoProcessorTest, ProcessFrames_FixedFramerate) {
ExpectInit();
video_processor_->Init();
const int kBitrateKbps = 456;
const int kFramerateFps = 31;
video_processor_->SetRates(kBitrateKbps, kFramerateFps);
EXPECT_CALL(frame_reader_mock_, ReadFrame())
.WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight)));
EXPECT_CALL(encoder_mock_, Encode(testing::Property(&VideoFrame::timestamp,
1 * 90000 / kFramerate),
EXPECT_CALL(
encoder_mock_,
Encode(Property(&VideoFrame::timestamp, 1 * 90000 / kFramerateFps), _, _))
.Times(1);
video_processor_->ProcessFrame(0);
EXPECT_CALL(
encoder_mock_,
Encode(Property(&VideoFrame::timestamp, 2 * 90000 / kFramerateFps), _, _))
.Times(1);
video_processor_->ProcessFrame(1);
ExpectRelease();
video_processor_->Release();
}
TEST_F(VideoProcessorTest, ProcessFrames_VariableFramerate) {
ExpectInit();
video_processor_->Init();
const int kBitrateKbps = 456;
const int kStartFramerateFps = 27;
video_processor_->SetRates(kBitrateKbps, kStartFramerateFps);
EXPECT_CALL(frame_reader_mock_, ReadFrame())
.WillRepeatedly(Return(I420Buffer::Create(kWidth, kHeight)));
EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::timestamp,
1 * 90000 / kStartFramerateFps),
_, _))
.Times(1);
video_processor_->ProcessFrame(0);
EXPECT_CALL(encoder_mock_, Encode(testing::Property(&VideoFrame::timestamp,
2 * 90000 / kFramerate),
const int kNewFramerateFps = 13;
video_processor_->SetRates(kBitrateKbps, kNewFramerateFps);
EXPECT_CALL(encoder_mock_, Encode(Property(&VideoFrame::timestamp,
2 * 90000 / kNewFramerateFps),
_, _))
.Times(1);
video_processor_->ProcessFrame(1);
ExpectRelease();
video_processor_->Release();
}
TEST_F(VideoProcessorTest, SetRates) {
ExpectInit();
video_processor_->Init();
const int kBitrateKbps = 123;
const int kFramerateFps = 17;
EXPECT_CALL(encoder_mock_,
SetRateAllocation(
Property(&BitrateAllocation::get_sum_kbps, kBitrateKbps),
kFramerateFps))
.Times(1);
video_processor_->SetRates(kBitrateKbps, kFramerateFps);
EXPECT_THAT(video_processor_->NumberDroppedFramesPerRateUpdate(),
ElementsAre(0));
EXPECT_THAT(video_processor_->NumberSpatialResizesPerRateUpdate(),
ElementsAre(0));
const int kNewBitrateKbps = 456;
const int kNewFramerateFps = 34;
EXPECT_CALL(encoder_mock_,
SetRateAllocation(
Property(&BitrateAllocation::get_sum_kbps, kNewBitrateKbps),
kNewFramerateFps))
.Times(1);
video_processor_->SetRates(kNewBitrateKbps, kNewFramerateFps);
EXPECT_THAT(video_processor_->NumberDroppedFramesPerRateUpdate(),
ElementsAre(0, 0));
EXPECT_THAT(video_processor_->NumberSpatialResizesPerRateUpdate(),
ElementsAre(0, 0));
ExpectRelease();
video_processor_->Release();
}
} // namespace test