Minor improvements to TestConfig and VideoProcessor.

* Do not simulate freeze in decoded output file when frames have been dropped.
* Add more DCHECKs and consts.
* Remove unused members |num_encoded_frames_| and |num_decoded_frames_|.
* Move SdpVideoFormat conversion to TestConfig.

Bug: webrtc:8448
Change-Id: Ia879141f36dc23427cd1abcaa66716656fbaac2a
Reviewed-on: https://webrtc-review.googlesource.com/56802
Commit-Queue: Rasmus Brandt <brandtr@webrtc.org>
Reviewed-by: Sergey Silkin <ssilkin@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#22239}
This commit is contained in:
Rasmus Brandt
2018-02-28 17:17:15 +01:00
committed by Commit Bot
parent 1f4cb9f22d
commit 5f7a891257
8 changed files with 92 additions and 131 deletions

View File

@ -12,6 +12,8 @@
#include <sstream>
#include "media/base/h264_profile_level_id.h"
#include "media/base/mediaconstants.h"
#include "media/engine/simulcast.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/checks.h"
@ -22,12 +24,32 @@ namespace webrtc {
namespace test {
namespace {
const int kBaseKeyFrameInterval = 3000;
const int kMaxBitrateBps = 5000 * 1000; // From kSimulcastFormats.
const int kMaxFramerateFps = 30;
const int kMaxQp = 56;
std::string CodecSpecificToString(const webrtc::VideoCodec& codec) {
void ConfigureSimulcast(VideoCodec* codec_settings) {
const std::vector<webrtc::VideoStream> streams = cricket::GetSimulcastConfig(
codec_settings->numberOfSimulcastStreams, codec_settings->width,
codec_settings->height, kMaxBitrateBps, kMaxQp, kMaxFramerateFps, false);
for (size_t i = 0; i < streams.size(); ++i) {
SimulcastStream* ss = &codec_settings->simulcastStream[i];
ss->width = static_cast<uint16_t>(streams[i].width);
ss->height = static_cast<uint16_t>(streams[i].height);
ss->numberOfTemporalLayers = static_cast<unsigned char>(
streams[i].temporal_layer_thresholds_bps.size() + 1);
ss->maxBitrate = streams[i].max_bitrate_bps / 1000;
ss->targetBitrate = streams[i].target_bitrate_bps / 1000;
ss->minBitrate = streams[i].min_bitrate_bps / 1000;
ss->qpMax = streams[i].max_qp;
ss->active = true;
}
}
std::string CodecSpecificToString(const VideoCodec& codec) {
std::stringstream ss;
switch (codec.codecType) {
case kVideoCodecVP8:
@ -65,6 +87,7 @@ std::string CodecSpecificToString(const webrtc::VideoCodec& codec) {
ss << "\n";
return ss.str();
}
} // namespace
void TestConfig::SetCodecSettings(VideoCodecType codec_type,
@ -87,6 +110,8 @@ void TestConfig::SetCodecSettings(VideoCodecType codec_type,
RTC_CHECK(num_simulcast_streams >= 1 &&
num_simulcast_streams <= kMaxSimulcastStreams);
RTC_CHECK(num_spatial_layers >= 1 && num_spatial_layers <= kMaxSpatialLayers);
RTC_CHECK(num_temporal_layers >= 1 &&
num_temporal_layers <= kMaxTemporalStreams);
// Simulcast is only available with VP8.
RTC_CHECK(num_simulcast_streams < 2 || codec_type == kVideoCodecVP8);
@ -103,6 +128,9 @@ void TestConfig::SetCodecSettings(VideoCodecType codec_type,
codec_settings.numberOfSimulcastStreams =
num_simulcast_streams <= 1 ? 0
: static_cast<uint8_t>(num_simulcast_streams);
if (codec_settings.numberOfSimulcastStreams > 1) {
ConfigureSimulcast(&codec_settings);
}
switch (codec_settings.codecType) {
case kVideoCodecVP8:
@ -134,29 +162,6 @@ void TestConfig::SetCodecSettings(VideoCodecType codec_type,
RTC_NOTREACHED();
break;
}
if (codec_settings.numberOfSimulcastStreams > 1) {
ConfigureSimulcast();
}
}
void TestConfig::ConfigureSimulcast() {
std::vector<webrtc::VideoStream> stream = cricket::GetSimulcastConfig(
codec_settings.numberOfSimulcastStreams, codec_settings.width,
codec_settings.height, kMaxBitrateBps, kMaxQp, kMaxFramerateFps, false);
for (size_t i = 0; i < stream.size(); ++i) {
SimulcastStream* ss = &codec_settings.simulcastStream[i];
ss->width = static_cast<uint16_t>(stream[i].width);
ss->height = static_cast<uint16_t>(stream[i].height);
ss->numberOfTemporalLayers = static_cast<unsigned char>(
stream[i].temporal_layer_thresholds_bps.size() + 1);
ss->maxBitrate = stream[i].max_bitrate_bps / 1000;
ss->targetBitrate = stream[i].target_bitrate_bps / 1000;
ss->minBitrate = stream[i].min_bitrate_bps / 1000;
ss->qpMax = stream[i].max_qp;
ss->active = true;
}
}
size_t TestConfig::NumberOfCores() const {
@ -215,6 +220,33 @@ std::string TestConfig::ToString() const {
return ss.str();
}
SdpVideoFormat TestConfig::ToSdpVideoFormat() const {
switch (codec_settings.codecType) {
case kVideoCodecVP8:
return SdpVideoFormat(cricket::kVp8CodecName);
case kVideoCodecVP9:
return SdpVideoFormat(cricket::kVp9CodecName);
case kVideoCodecH264: {
const char* packetization_mode =
h264_codec_settings.packetization_mode ==
H264PacketizationMode::NonInterleaved
? "1"
: "0";
return SdpVideoFormat(
cricket::kH264CodecName,
{{cricket::kH264FmtpProfileLevelId,
*H264::ProfileLevelIdToString(H264::ProfileLevelId(
h264_codec_settings.profile, H264::kLevel3_1))},
{cricket::kH264FmtpPacketizationMode, packetization_mode}});
}
default:
RTC_NOTREACHED();
return SdpVideoFormat("");
}
}
std::string TestConfig::CodecName() const {
std::string codec_name = CodecTypeToPayloadString(codec_settings.codecType);
if (codec_settings.codecType == kVideoCodecH264) {

View File

@ -14,6 +14,7 @@
#include <string>
#include <vector>
#include "api/video_codecs/sdp_video_format.h"
#include "common_types.h" // NOLINT(build/include)
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
#include "modules/video_coding/include/video_codec_interface.h"
@ -21,7 +22,6 @@
namespace webrtc {
namespace test {
// Test configuration for a test run.
struct TestConfig {
class EncodedFrameChecker {
public:
@ -42,8 +42,6 @@ struct TestConfig {
size_t width,
size_t height);
void ConfigureSimulcast();
size_t NumberOfCores() const;
size_t NumberOfTemporalLayers() const;
size_t NumberOfSpatialLayers() const;
@ -51,6 +49,7 @@ struct TestConfig {
std::vector<FrameType> FrameTypeForFrame(size_t frame_idx) const;
std::string ToString() const;
SdpVideoFormat ToSdpVideoFormat() const;
std::string CodecName() const;
std::string FilenameWithParams() const;
bool IsAsyncCodec() const;
@ -68,11 +67,6 @@ struct TestConfig {
size_t max_payload_size_bytes = 1440;
// Force the encoder and decoder to use a single core for processing.
// Using a single core is necessary to get a deterministic behavior for the
// encoded frames - using multiple cores will produce different encoded frames
// since multiple cores are competing to consume the byte budget for each
// frame in parallel.
// If set to false, the maximum number of available cores will be used.
bool use_single_core = false;
// Should cpu usage be measured?
@ -80,10 +74,6 @@ struct TestConfig {
bool measure_cpu = false;
// If > 0: forces the encoder to create a keyframe every Nth frame.
// Note that the encoder may create a keyframe in other locations in addition
// to this setting. Forcing key frames may also affect encoder planning
// optimizations in a negative way, since it will suddenly be forced to
// produce an expensive key frame.
size_t keyframe_interval = 0;
// Codec settings to use.

View File

@ -17,6 +17,7 @@
#include "api/video/i420_buffer.h"
#include "common_types.h" // NOLINT(build/include)
#include "common_video/h264/h264_common.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include "modules/video_coding/include/video_codec_initializer.h"
@ -115,12 +116,11 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
input_frame_reader_(input_frame_reader),
encoded_frame_writers_(encoded_frame_writers),
decoded_frame_writers_(decoded_frame_writers),
first_encoded_frame(true),
last_inputed_frame_num_(0),
last_encoded_frame_num_(0),
last_encoded_simulcast_svc_idx_(0),
last_decoded_frame_num_(0),
num_encoded_frames_(0),
num_decoded_frames_(0),
stats_(stats) {
RTC_CHECK(rtc::TaskQueue::Current())
<< "VideoProcessor must be run on a task queue.";
@ -186,7 +186,7 @@ void VideoProcessor::ProcessFrame() {
std::vector<FrameType> frame_types = config_.FrameTypeForFrame(frame_number);
// Create frame statistics object for all simulcast /spatial layers.
// Create frame statistics object for all simulcast/spatial layers.
for (size_t simulcast_svc_idx = 0;
simulcast_svc_idx < num_simulcast_or_spatial_layers_;
++simulcast_svc_idx) {
@ -270,7 +270,7 @@ void VideoProcessor::FrameEncoded(
// For the highest measurement accuracy of the encode time, the start/stop
// time recordings should wrap the Encode call as tightly as possible.
int64_t encode_stop_ns = rtc::TimeNanos();
const int64_t encode_stop_ns = rtc::TimeNanos();
const VideoCodecType codec = codec_specific.codecType;
if (config_.encoded_frame_checker) {
@ -310,11 +310,10 @@ void VideoProcessor::FrameEncoded(
// Ensure SVC spatial layers are delivered in ascending order.
if (config_.NumberOfSpatialLayers() > 1) {
RTC_CHECK(simulcast_svc_idx > last_encoded_simulcast_svc_idx_ ||
frame_number != last_encoded_frame_num_ ||
num_encoded_frames_ == 0);
RTC_CHECK(first_encoded_frame || frame_number >= last_encoded_frame_num_ ||
simulcast_svc_idx > last_encoded_simulcast_svc_idx_);
}
first_encoded_frame = false;
last_encoded_frame_num_ = frame_number;
last_encoded_simulcast_svc_idx_ = simulcast_svc_idx;
@ -356,8 +355,6 @@ void VideoProcessor::FrameEncoded(
frame_stat->decode_return_code =
decoders_->at(simulcast_idx)->Decode(encoded_image, false, nullptr);
}
++num_encoded_frames_;
}
void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame) {
@ -365,7 +362,7 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame) {
// For the highest measurement accuracy of the decode time, the start/stop
// time recordings should wrap the Decode call as tightly as possible.
int64_t decode_stop_ns = rtc::TimeNanos();
const int64_t decode_stop_ns = rtc::TimeNanos();
RTC_CHECK(frame_wxh_to_simulcast_svc_idx_.find(decoded_frame.size()) !=
frame_wxh_to_simulcast_svc_idx_.end());
@ -379,22 +376,6 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame) {
// Reordering is unexpected. Frames of different layers have the same value
// of frame_number.
RTC_CHECK_GE(frame_number, last_decoded_frame_num_);
if (decoded_frame_writers_ && num_decoded_frames_ > 0) {
// For dropped frames, write out the last decoded frame to make it look like
// a freeze at playback.
for (size_t num_dropped_frames = 0; num_dropped_frames < frame_number;
++num_dropped_frames) {
const FrameStatistics* prev_frame_stat = stats_->GetFrame(
frame_number - num_dropped_frames - 1, simulcast_svc_idx);
if (prev_frame_stat->decoding_successful) {
break;
}
WriteDecodedFrameToFile(&last_decoded_frame_buffers_[simulcast_svc_idx],
simulcast_svc_idx);
}
}
last_decoded_frame_num_ = frame_number;
// Update frame statistics.
@ -420,12 +401,13 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame) {
if (decoded_frame_writers_) {
ExtractBufferWithSize(decoded_frame, config_.codec_settings.width,
config_.codec_settings.height,
&last_decoded_frame_buffers_[simulcast_svc_idx]);
WriteDecodedFrameToFile(&last_decoded_frame_buffers_[simulcast_svc_idx],
simulcast_svc_idx);
&tmp_planar_i420_buffer_);
RTC_CHECK(simulcast_svc_idx < decoded_frame_writers_->size());
RTC_CHECK_EQ(tmp_planar_i420_buffer_.size(),
decoded_frame_writers_->at(simulcast_svc_idx)->FrameLength());
RTC_CHECK(decoded_frame_writers_->at(simulcast_svc_idx)
->WriteFrame(tmp_planar_i420_buffer_.data()));
}
++num_decoded_frames_;
}
void VideoProcessor::CopyEncodedImage(const EncodedImage& encoded_image,
@ -500,14 +482,5 @@ void VideoProcessor::CalculateFrameQuality(const VideoFrame& ref_frame,
}
}
void VideoProcessor::WriteDecodedFrameToFile(rtc::Buffer* buffer,
size_t simulcast_svc_idx) {
RTC_CHECK(simulcast_svc_idx < decoded_frame_writers_->size());
RTC_DCHECK_EQ(buffer->size(),
decoded_frame_writers_->at(simulcast_svc_idx)->FrameLength());
RTC_CHECK(decoded_frame_writers_->at(simulcast_svc_idx)
->WriteFrame(buffer->data()));
}
} // namespace test
} // namespace webrtc

View File

@ -17,16 +17,10 @@
#include <vector>
#include "api/video/video_frame.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/codecs/h264/include/h264_globals.h"
#include "modules/video_coding/codecs/test/stats.h"
#include "modules/video_coding/codecs/test/test_config.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/utility/ivf_file_writer.h"
#include "modules/video_coding/utility/vp8_header_parser.h"
#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "rtc_base/buffer.h"
#include "rtc_base/checks.h"
#include "rtc_base/constructormagic.h"
#include "rtc_base/sequenced_task_checker.h"
#include "rtc_base/task_queue.h"
@ -78,6 +72,7 @@ class VideoProcessor {
VideoProcessor* video_processor)
: video_processor_(video_processor),
task_queue_(rtc::TaskQueue::Current()) {
RTC_DCHECK(video_processor_);
RTC_DCHECK(task_queue_);
}
@ -135,6 +130,7 @@ class VideoProcessor {
VideoProcessor* video_processor)
: video_processor_(video_processor),
task_queue_(rtc::TaskQueue::Current()) {
RTC_DCHECK(video_processor_);
RTC_DCHECK(task_queue_);
}
@ -181,8 +177,6 @@ class VideoProcessor {
const VideoFrame& dec_frame,
FrameStatistics* frame_stat);
void WriteDecodedFrameToFile(rtc::Buffer* buffer, size_t simulcast_svc_idx);
TestConfig config_ RTC_GUARDED_BY(sequence_checker_);
const size_t num_simulcast_or_spatial_layers_;
@ -213,12 +207,11 @@ class VideoProcessor {
FrameWriterList* const decoded_frame_writers_;
// Keep track of inputed/encoded/decoded frames, so we can detect frame drops.
bool first_encoded_frame;
size_t last_inputed_frame_num_ RTC_GUARDED_BY(sequence_checker_);
size_t last_encoded_frame_num_ RTC_GUARDED_BY(sequence_checker_);
size_t last_encoded_simulcast_svc_idx_ RTC_GUARDED_BY(sequence_checker_);
size_t last_decoded_frame_num_ RTC_GUARDED_BY(sequence_checker_);
size_t num_encoded_frames_ RTC_GUARDED_BY(sequence_checker_);
size_t num_decoded_frames_ RTC_GUARDED_BY(sequence_checker_);
// Map of frame size (in pixels) to simulcast/spatial layer index.
std::map<size_t, size_t> frame_wxh_to_simulcast_svc_idx_
@ -231,10 +224,7 @@ class VideoProcessor {
std::map<size_t, EncodedImage> last_encoded_frames_
RTC_GUARDED_BY(sequence_checker_);
// Keep track of the last successfully decoded frame, since we write that
// frame to disk when frame got dropped or decoding fails.
std::map<size_t, rtc::Buffer> last_decoded_frame_buffers_
RTC_GUARDED_BY(sequence_checker_);
rtc::Buffer tmp_planar_i420_buffer_;
// Statistics.
Stats* const stats_;

View File

@ -20,8 +20,8 @@
#include "modules/video_coding/codecs/test/objc_codec_factory_helper.h"
#endif
#include "api/video_codecs/sdp_video_format.h"
#include "common_types.h" // NOLINT(build/include)
#include "media/base/h264_profile_level_id.h"
#include "media/engine/internaldecoderfactory.h"
#include "media/engine/internalencoderfactory.h"
#include "media/engine/videodecodersoftwarefallbackwrapper.h"
@ -55,33 +55,6 @@ bool RunEncodeInRealTime(const TestConfig& config) {
#endif
}
SdpVideoFormat CreateSdpVideoFormat(const TestConfig& config) {
switch (config.codec_settings.codecType) {
case kVideoCodecVP8:
return SdpVideoFormat(cricket::kVp8CodecName);
case kVideoCodecVP9:
return SdpVideoFormat(cricket::kVp9CodecName);
case kVideoCodecH264: {
const char* packetization_mode =
config.h264_codec_settings.packetization_mode ==
H264PacketizationMode::NonInterleaved
? "1"
: "0";
return SdpVideoFormat(
cricket::kH264CodecName,
{{cricket::kH264FmtpProfileLevelId,
*H264::ProfileLevelIdToString(H264::ProfileLevelId(
config.h264_codec_settings.profile, H264::kLevel3_1))},
{cricket::kH264FmtpPacketizationMode, packetization_mode}});
}
default:
RTC_NOTREACHED();
return SdpVideoFormat("");
}
}
} // namespace
void VideoProcessorIntegrationTest::H264KeyframeChecker::CheckEncodedFrame(
@ -169,6 +142,7 @@ void VideoProcessorIntegrationTest::ProcessFramesAndMaybeVerify(
const BitstreamThresholds* bs_thresholds,
const VisualizationParams* visualization_params) {
RTC_DCHECK(!rate_profiles.empty());
// The Android HW codec needs to be run on a task queue, so we simply always
// run the test on a task queue.
rtc::TaskQueue task_queue("VidProc TQ");
@ -177,9 +151,7 @@ void VideoProcessorIntegrationTest::ProcessFramesAndMaybeVerify(
&task_queue, static_cast<const int>(rate_profiles[0].target_kbps),
static_cast<const int>(rate_profiles[0].input_fps), visualization_params);
PrintSettings(&task_queue);
ProcessAllFrames(&task_queue, rate_profiles);
ReleaseAndCloseObjects(&task_queue);
AnalyzeAllFrames(rate_profiles, rc_thresholds, quality_thresholds,
@ -217,7 +189,7 @@ void VideoProcessorIntegrationTest::ProcessAllFrames(
if (RunEncodeInRealTime(config_)) {
// Roughly pace the frames.
size_t frame_duration_ms =
const size_t frame_duration_ms =
rtc::kNumMillisecsPerSec / rate_profiles[rate_update_index].input_fps;
SleepMs(static_cast<int>(frame_duration_ms));
}
@ -229,7 +201,7 @@ void VideoProcessorIntegrationTest::ProcessAllFrames(
// Give the VideoProcessor pipeline some time to process the last frame,
// and then release the codecs.
if (config_.hw_encoder || config_.hw_decoder) {
if (config_.IsAsyncCodec()) {
SleepMs(1 * rtc::kNumMillisecsPerSec);
}
@ -353,7 +325,7 @@ void VideoProcessorIntegrationTest::CreateEncoderAndDecoder() {
decoder_factory = rtc::MakeUnique<InternalDecoderFactory>();
}
const SdpVideoFormat format = CreateSdpVideoFormat(config_);
const SdpVideoFormat format = config_.ToSdpVideoFormat();
encoder_ = encoder_factory->CreateVideoEncoder(format);
const size_t num_simulcast_or_spatial_layers = std::max(
@ -391,8 +363,8 @@ void VideoProcessorIntegrationTest::DestroyEncoderAndDecoder() {
void VideoProcessorIntegrationTest::SetUpAndInitObjects(
rtc::TaskQueue* task_queue,
const int initial_bitrate_kbps,
const int initial_framerate_fps,
int initial_bitrate_kbps,
int initial_framerate_fps,
const VisualizationParams* visualization_params) {
CreateEncoderAndDecoder();

View File

@ -103,8 +103,8 @@ class VideoProcessorIntegrationTest : public testing::Test {
void CreateEncoderAndDecoder();
void DestroyEncoderAndDecoder();
void SetUpAndInitObjects(rtc::TaskQueue* task_queue,
const int initial_bitrate_kbps,
const int initial_framerate_fps,
int initial_bitrate_kbps,
int initial_framerate_fps,
const VisualizationParams* visualization_params);
void ReleaseAndCloseObjects(rtc::TaskQueue* task_queue);
@ -127,12 +127,12 @@ class VideoProcessorIntegrationTest : public testing::Test {
// Codecs.
std::unique_ptr<VideoEncoder> encoder_;
std::vector<std::unique_ptr<VideoDecoder>> decoders_;
VideoProcessor::VideoDecoderList decoders_;
// Helper objects.
std::unique_ptr<FrameReader> source_frame_reader_;
std::vector<std::unique_ptr<IvfFileWriter>> encoded_frame_writers_;
std::vector<std::unique_ptr<FrameWriter>> decoded_frame_writers_;
VideoProcessor::IvfFileWriterList encoded_frame_writers_;
VideoProcessor::FrameWriterList decoded_frame_writers_;
std::unique_ptr<VideoProcessor> processor_;
std::unique_ptr<CpuProcessTime> cpu_process_time_;
};

View File

@ -13,6 +13,8 @@
#include <vector>
#include "modules/video_coding/codecs/test/test_config.h"
#include "modules/video_coding/utility/vp8_header_parser.h"
#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
#include "rtc_base/ptr_util.h"
#include "test/testsupport/fileutils.h"