Prepare VideoProcessor for async simulcast support.
* Add support for SimulcastEncoderAdapter wrapping of encoder. * Store input frame timestamps out-of-band, so we don't need to keep a raw VideoFrame around just for it's timestamp. * Store current frame rate in |framerate_fps_|, instead of in codec settings struct. * Add some comments and reorder some data members. * Explicitly include VideoBitrateAllocator. * Change type of |input_frames_|, to avoid one layer of indirection. * Move VideoProcessor::CalculateFrameQuality to anonymous namespace. This change should have no functional implications. Bug: webrtc:8448 Change-Id: I10c140eeda750d9bd37bfb6cb1e8acb401fb91d3 Reviewed-on: https://webrtc-review.googlesource.com/60520 Commit-Queue: Rasmus Brandt <brandtr@webrtc.org> Reviewed-by: Sergey Silkin <ssilkin@webrtc.org> Cr-Commit-Position: refs/heads/master@{#22346}
This commit is contained in:
committed by
Commit Bot
parent
22229215a9
commit
d062a3c626
@ -90,6 +90,9 @@ struct TestConfig {
|
||||
bool hw_encoder = false;
|
||||
bool hw_decoder = false;
|
||||
|
||||
// Should the encoder be wrapped in a SimulcastEncoderAdapter?
|
||||
bool simulcast_adapted_encoder = false;
|
||||
|
||||
// Should the hardware codecs be wrapped in software fallbacks?
|
||||
bool sw_fallback_encoder = false;
|
||||
bool sw_fallback_decoder = false;
|
||||
|
||||
@ -63,6 +63,24 @@ size_t GetMaxNaluSizeBytes(const EncodedImage& encoded_frame,
|
||||
return max_size;
|
||||
}
|
||||
|
||||
void GetLayerIndices(const CodecSpecificInfo& codec_specific,
|
||||
size_t* simulcast_svc_idx,
|
||||
size_t* temporal_idx) {
|
||||
if (codec_specific.codecType == kVideoCodecVP8) {
|
||||
*simulcast_svc_idx = codec_specific.codecSpecific.VP8.simulcastIdx;
|
||||
*temporal_idx = codec_specific.codecSpecific.VP8.temporalIdx;
|
||||
} else if (codec_specific.codecType == kVideoCodecVP9) {
|
||||
*simulcast_svc_idx = codec_specific.codecSpecific.VP9.spatial_idx;
|
||||
*temporal_idx = codec_specific.codecSpecific.VP9.temporal_idx;
|
||||
}
|
||||
if (*simulcast_svc_idx == kNoSpatialIdx) {
|
||||
*simulcast_svc_idx = 0;
|
||||
}
|
||||
if (*temporal_idx == kNoTemporalIdx) {
|
||||
*temporal_idx = 0;
|
||||
}
|
||||
}
|
||||
|
||||
int GetElapsedTimeMicroseconds(int64_t start_ns, int64_t stop_ns) {
|
||||
int64_t diff_us = (stop_ns - start_ns) / rtc::kNumNanosecsPerMicrosec;
|
||||
RTC_DCHECK_GE(diff_us, std::numeric_limits<int>::min());
|
||||
@ -95,6 +113,38 @@ void ExtractBufferWithSize(const VideoFrame& image,
|
||||
RTC_CHECK_NE(ExtractBuffer(image, length, buffer->data()), -1);
|
||||
}
|
||||
|
||||
void CalculateFrameQuality(const VideoFrame& ref_frame,
|
||||
const VideoFrame& dec_frame,
|
||||
FrameStatistics* frame_stat) {
|
||||
if (ref_frame.width() == dec_frame.width() ||
|
||||
ref_frame.height() == dec_frame.height()) {
|
||||
frame_stat->psnr = I420PSNR(&ref_frame, &dec_frame);
|
||||
frame_stat->ssim = I420SSIM(&ref_frame, &dec_frame);
|
||||
} else {
|
||||
RTC_CHECK_GE(ref_frame.width(), dec_frame.width());
|
||||
RTC_CHECK_GE(ref_frame.height(), dec_frame.height());
|
||||
// Downscale reference frame. Use bilinear interpolation since it is used
|
||||
// to get lowres inputs for encoder at simulcasting.
|
||||
// TODO(ssilkin): Sync with VP9 SVC which uses 8-taps polyphase.
|
||||
rtc::scoped_refptr<I420Buffer> scaled_buffer =
|
||||
I420Buffer::Create(dec_frame.width(), dec_frame.height());
|
||||
const I420BufferInterface& ref_buffer =
|
||||
*ref_frame.video_frame_buffer()->ToI420();
|
||||
I420Scale(ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(),
|
||||
ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(),
|
||||
ref_buffer.width(), ref_buffer.height(),
|
||||
scaled_buffer->MutableDataY(), scaled_buffer->StrideY(),
|
||||
scaled_buffer->MutableDataU(), scaled_buffer->StrideU(),
|
||||
scaled_buffer->MutableDataV(), scaled_buffer->StrideV(),
|
||||
scaled_buffer->width(), scaled_buffer->height(),
|
||||
libyuv::kFilterBox);
|
||||
frame_stat->psnr =
|
||||
I420PSNR(*scaled_buffer, *dec_frame.video_frame_buffer()->ToI420());
|
||||
frame_stat->ssim =
|
||||
I420SSIM(*scaled_buffer, *dec_frame.video_frame_buffer()->ToI420());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
||||
@ -108,24 +158,28 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
||||
num_simulcast_or_spatial_layers_(
|
||||
std::max(config_.NumberOfSimulcastStreams(),
|
||||
config_.NumberOfSpatialLayers())),
|
||||
stats_(stats),
|
||||
encoder_(encoder),
|
||||
decoders_(decoders),
|
||||
bitrate_allocator_(CreateBitrateAllocator(&config_)),
|
||||
framerate_fps_(0),
|
||||
encode_callback_(this),
|
||||
decode_callback_(this),
|
||||
input_frame_reader_(input_frame_reader),
|
||||
encoded_frame_writers_(encoded_frame_writers),
|
||||
decoded_frame_writers_(decoded_frame_writers),
|
||||
first_encoded_frame(true),
|
||||
last_inputed_frame_num_(0),
|
||||
last_inputed_timestamp_(0),
|
||||
first_encoded_frame(true),
|
||||
last_encoded_frame_num_(0),
|
||||
last_encoded_simulcast_svc_idx_(0),
|
||||
last_decoded_frame_num_(0),
|
||||
stats_(stats) {
|
||||
last_decoded_frame_num_(0) {
|
||||
// Sanity checks.
|
||||
RTC_CHECK(rtc::TaskQueue::Current())
|
||||
<< "VideoProcessor must be run on a task queue.";
|
||||
RTC_CHECK(encoder);
|
||||
RTC_CHECK(decoders && decoders->size() == num_simulcast_or_spatial_layers_);
|
||||
RTC_CHECK(decoders);
|
||||
RTC_CHECK_EQ(decoders->size(), num_simulcast_or_spatial_layers_);
|
||||
RTC_CHECK(input_frame_reader);
|
||||
RTC_CHECK(stats);
|
||||
RTC_CHECK(!encoded_frame_writers ||
|
||||
@ -137,6 +191,7 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
||||
RTC_CHECK_EQ(encoder_->RegisterEncodeCompleteCallback(&encode_callback_),
|
||||
WEBRTC_VIDEO_CODEC_OK);
|
||||
|
||||
// Initialize codecs so that they are ready to receive frames.
|
||||
RTC_CHECK_EQ(encoder_->InitEncode(&config_.codec_settings,
|
||||
static_cast<int>(config_.NumberOfCores()),
|
||||
config_.max_payload_size_bytes),
|
||||
@ -154,9 +209,10 @@ VideoProcessor::VideoProcessor(webrtc::VideoEncoder* encoder,
|
||||
VideoProcessor::~VideoProcessor() {
|
||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
||||
|
||||
// Explicitly reset codecs, in case they don't do that themselves when they
|
||||
// go out of scope.
|
||||
RTC_CHECK_EQ(encoder_->Release(), WEBRTC_VIDEO_CODEC_OK);
|
||||
encoder_->RegisterEncodeCompleteCallback(nullptr);
|
||||
|
||||
for (auto& decoder : *decoders_) {
|
||||
RTC_CHECK_EQ(decoder->Release(), WEBRTC_VIDEO_CODEC_OK);
|
||||
decoder->RegisterDecodeCompleteCallback(nullptr);
|
||||
@ -169,28 +225,23 @@ void VideoProcessor::ProcessFrame() {
|
||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
||||
const size_t frame_number = last_inputed_frame_num_++;
|
||||
|
||||
// Get frame from file.
|
||||
rtc::scoped_refptr<I420BufferInterface> buffer(
|
||||
input_frame_reader_->ReadFrame());
|
||||
// Get input frame and store for future quality calculation.
|
||||
rtc::scoped_refptr<I420BufferInterface> buffer =
|
||||
input_frame_reader_->ReadFrame();
|
||||
RTC_CHECK(buffer) << "Tried to read too many frames from the file.";
|
||||
|
||||
size_t rtp_timestamp =
|
||||
(frame_number > 0) ? input_frames_[frame_number - 1]->timestamp() : 0;
|
||||
rtp_timestamp +=
|
||||
kVideoPayloadTypeFrequency / config_.codec_settings.maxFramerate;
|
||||
|
||||
input_frames_[frame_number] = rtc::MakeUnique<VideoFrame>(
|
||||
buffer, static_cast<uint32_t>(rtp_timestamp),
|
||||
static_cast<int64_t>(rtp_timestamp / kMsToRtpTimestamp),
|
||||
webrtc::kVideoRotation_0);
|
||||
|
||||
std::vector<FrameType> frame_types = config_.FrameTypeForFrame(frame_number);
|
||||
const size_t timestamp =
|
||||
last_inputed_timestamp_ + kVideoPayloadTypeFrequency / framerate_fps_;
|
||||
VideoFrame input_frame(buffer, static_cast<uint32_t>(timestamp),
|
||||
static_cast<int64_t>(timestamp / kMsToRtpTimestamp),
|
||||
webrtc::kVideoRotation_0);
|
||||
input_frames_.emplace(frame_number, input_frame);
|
||||
last_inputed_timestamp_ = timestamp;
|
||||
|
||||
// Create frame statistics object for all simulcast/spatial layers.
|
||||
for (size_t simulcast_svc_idx = 0;
|
||||
simulcast_svc_idx < num_simulcast_or_spatial_layers_;
|
||||
++simulcast_svc_idx) {
|
||||
stats_->AddFrame(rtp_timestamp, simulcast_svc_idx);
|
||||
stats_->AddFrame(timestamp, simulcast_svc_idx);
|
||||
}
|
||||
|
||||
// For the highest measurement accuracy of the encode time, the start/stop
|
||||
@ -204,8 +255,11 @@ void VideoProcessor::ProcessFrame() {
|
||||
frame_stat->encode_start_ns = encode_start_ns;
|
||||
}
|
||||
|
||||
// Encode.
|
||||
const std::vector<FrameType> frame_types =
|
||||
config_.FrameTypeForFrame(frame_number);
|
||||
const int encode_return_code =
|
||||
encoder_->Encode(*input_frames_[frame_number], nullptr, &frame_types);
|
||||
encoder_->Encode(input_frame, nullptr, &frame_types);
|
||||
|
||||
for (size_t simulcast_svc_idx = 0;
|
||||
simulcast_svc_idx < num_simulcast_or_spatial_layers_;
|
||||
@ -253,12 +307,11 @@ void VideoProcessor::ProcessFrame() {
|
||||
|
||||
void VideoProcessor::SetRates(size_t bitrate_kbps, size_t framerate_fps) {
|
||||
RTC_DCHECK_CALLED_SEQUENTIALLY(&sequence_checker_);
|
||||
config_.codec_settings.maxFramerate = static_cast<uint32_t>(framerate_fps);
|
||||
framerate_fps_ = static_cast<uint32_t>(framerate_fps);
|
||||
bitrate_allocation_ = bitrate_allocator_->GetAllocation(
|
||||
static_cast<uint32_t>(bitrate_kbps * 1000),
|
||||
static_cast<uint32_t>(framerate_fps));
|
||||
const int set_rates_result = encoder_->SetRateAllocation(
|
||||
bitrate_allocation_, static_cast<uint32_t>(framerate_fps));
|
||||
static_cast<uint32_t>(bitrate_kbps * 1000), framerate_fps_);
|
||||
const int set_rates_result =
|
||||
encoder_->SetRateAllocation(bitrate_allocation_, framerate_fps_);
|
||||
RTC_DCHECK_GE(set_rates_result, 0)
|
||||
<< "Failed to update encoder with new rate " << bitrate_kbps << ".";
|
||||
}
|
||||
@ -272,30 +325,15 @@ void VideoProcessor::FrameEncoded(
|
||||
// time recordings should wrap the Encode call as tightly as possible.
|
||||
const int64_t encode_stop_ns = rtc::TimeNanos();
|
||||
|
||||
const VideoCodecType codec = codec_specific.codecType;
|
||||
const VideoCodecType codec_type = codec_specific.codecType;
|
||||
if (config_.encoded_frame_checker) {
|
||||
config_.encoded_frame_checker->CheckEncodedFrame(codec, encoded_image);
|
||||
config_.encoded_frame_checker->CheckEncodedFrame(codec_type, encoded_image);
|
||||
}
|
||||
|
||||
// Layer metadata.
|
||||
size_t simulcast_svc_idx = 0;
|
||||
size_t temporal_idx = 0;
|
||||
|
||||
if (codec == kVideoCodecVP8) {
|
||||
simulcast_svc_idx = codec_specific.codecSpecific.VP8.simulcastIdx;
|
||||
temporal_idx = codec_specific.codecSpecific.VP8.temporalIdx;
|
||||
} else if (codec == kVideoCodecVP9) {
|
||||
simulcast_svc_idx = codec_specific.codecSpecific.VP9.spatial_idx;
|
||||
temporal_idx = codec_specific.codecSpecific.VP9.temporal_idx;
|
||||
}
|
||||
|
||||
if (simulcast_svc_idx == kNoSpatialIdx) {
|
||||
simulcast_svc_idx = 0;
|
||||
}
|
||||
|
||||
if (temporal_idx == kNoTemporalIdx) {
|
||||
temporal_idx = 0;
|
||||
}
|
||||
|
||||
GetLayerIndices(codec_specific, &simulcast_svc_idx, &temporal_idx);
|
||||
const size_t frame_wxh =
|
||||
encoded_image._encodedWidth * encoded_image._encodedHeight;
|
||||
frame_wxh_to_simulcast_svc_idx_[frame_wxh] = simulcast_svc_idx;
|
||||
@ -321,8 +359,7 @@ void VideoProcessor::FrameEncoded(
|
||||
frame_stat->encoding_successful = true;
|
||||
frame_stat->encode_time_us =
|
||||
GetElapsedTimeMicroseconds(frame_stat->encode_start_ns, encode_stop_ns);
|
||||
|
||||
if (codec == kVideoCodecVP9) {
|
||||
if (codec_type == kVideoCodecVP9) {
|
||||
const CodecSpecificInfoVP9& vp9_info = codec_specific.codecSpecific.VP9;
|
||||
frame_stat->inter_layer_predicted = vp9_info.inter_layer_predicted;
|
||||
|
||||
@ -346,11 +383,13 @@ void VideoProcessor::FrameEncoded(
|
||||
|
||||
if (!config_.IsAsyncCodec()) {
|
||||
// Store encoded frame. It will be decoded after all layers are encoded.
|
||||
CopyEncodedImage(encoded_image, codec, frame_number, simulcast_svc_idx);
|
||||
CopyEncodedImage(encoded_image, codec_type, frame_number,
|
||||
simulcast_svc_idx);
|
||||
} else {
|
||||
const size_t simulcast_idx =
|
||||
codec == kVideoCodecVP8 ? codec_specific.codecSpecific.VP8.simulcastIdx
|
||||
: 0;
|
||||
codec_type == kVideoCodecVP8
|
||||
? codec_specific.codecSpecific.VP8.simulcastIdx
|
||||
: 0;
|
||||
frame_stat->decode_start_ns = rtc::TimeNanos();
|
||||
frame_stat->decode_return_code =
|
||||
decoders_->at(simulcast_idx)->Decode(encoded_image, false, nullptr);
|
||||
@ -366,9 +405,9 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame) {
|
||||
|
||||
RTC_CHECK(frame_wxh_to_simulcast_svc_idx_.find(decoded_frame.size()) !=
|
||||
frame_wxh_to_simulcast_svc_idx_.end());
|
||||
// Layer metadata.
|
||||
const size_t simulcast_svc_idx =
|
||||
frame_wxh_to_simulcast_svc_idx_[decoded_frame.size()];
|
||||
|
||||
frame_wxh_to_simulcast_svc_idx_.at(decoded_frame.size());
|
||||
FrameStatistics* frame_stat = stats_->GetFrameWithTimestamp(
|
||||
decoded_frame.timestamp(), simulcast_svc_idx);
|
||||
const size_t frame_number = frame_stat->frame_number;
|
||||
@ -387,7 +426,7 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame) {
|
||||
|
||||
// Skip quality metrics calculation to not affect CPU usage.
|
||||
if (!config_.measure_cpu) {
|
||||
CalculateFrameQuality(*input_frames_[frame_number], decoded_frame,
|
||||
CalculateFrameQuality(input_frames_.at(frame_number), decoded_frame,
|
||||
frame_stat);
|
||||
}
|
||||
|
||||
@ -400,13 +439,12 @@ void VideoProcessor::FrameDecoded(const VideoFrame& decoded_frame) {
|
||||
|
||||
if (decoded_frame_writers_) {
|
||||
ExtractBufferWithSize(decoded_frame, config_.codec_settings.width,
|
||||
config_.codec_settings.height,
|
||||
&tmp_planar_i420_buffer_);
|
||||
config_.codec_settings.height, &tmp_i420_buffer_);
|
||||
RTC_CHECK(simulcast_svc_idx < decoded_frame_writers_->size());
|
||||
RTC_CHECK_EQ(tmp_planar_i420_buffer_.size(),
|
||||
RTC_CHECK_EQ(tmp_i420_buffer_.size(),
|
||||
decoded_frame_writers_->at(simulcast_svc_idx)->FrameLength());
|
||||
RTC_CHECK(decoded_frame_writers_->at(simulcast_svc_idx)
|
||||
->WriteFrame(tmp_planar_i420_buffer_.data()));
|
||||
->WriteFrame(tmp_i420_buffer_.data()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -450,37 +488,5 @@ void VideoProcessor::CopyEncodedImage(const EncodedImage& encoded_image,
|
||||
last_encoded_frames_[simulcast_svc_idx] = copied_image;
|
||||
}
|
||||
|
||||
void VideoProcessor::CalculateFrameQuality(const VideoFrame& ref_frame,
|
||||
const VideoFrame& dec_frame,
|
||||
FrameStatistics* frame_stat) {
|
||||
if (ref_frame.width() == dec_frame.width() ||
|
||||
ref_frame.height() == dec_frame.height()) {
|
||||
frame_stat->psnr = I420PSNR(&ref_frame, &dec_frame);
|
||||
frame_stat->ssim = I420SSIM(&ref_frame, &dec_frame);
|
||||
} else {
|
||||
RTC_CHECK_GE(ref_frame.width(), dec_frame.width());
|
||||
RTC_CHECK_GE(ref_frame.height(), dec_frame.height());
|
||||
// Downscale reference frame. Use bilinear interpolation since it is used
|
||||
// to get lowres inputs for encoder at simulcasting.
|
||||
// TODO(ssilkin): Sync with VP9 SVC which uses 8-taps polyphase.
|
||||
rtc::scoped_refptr<I420Buffer> scaled_buffer =
|
||||
I420Buffer::Create(dec_frame.width(), dec_frame.height());
|
||||
const I420BufferInterface& ref_buffer =
|
||||
*ref_frame.video_frame_buffer()->ToI420();
|
||||
I420Scale(ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(),
|
||||
ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(),
|
||||
ref_buffer.width(), ref_buffer.height(),
|
||||
scaled_buffer->MutableDataY(), scaled_buffer->StrideY(),
|
||||
scaled_buffer->MutableDataU(), scaled_buffer->StrideU(),
|
||||
scaled_buffer->MutableDataV(), scaled_buffer->StrideV(),
|
||||
scaled_buffer->width(), scaled_buffer->height(),
|
||||
libyuv::kFilterBox);
|
||||
frame_stat->psnr =
|
||||
I420PSNR(*scaled_buffer, *dec_frame.video_frame_buffer()->ToI420());
|
||||
frame_stat->ssim =
|
||||
I420SSIM(*scaled_buffer, *dec_frame.video_frame_buffer()->ToI420());
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
#include <vector>
|
||||
|
||||
#include "api/video/video_frame.h"
|
||||
#include "common_video/include/video_bitrate_allocator.h"
|
||||
#include "modules/video_coding/codecs/test/stats.h"
|
||||
#include "modules/video_coding/codecs/test/test_config.h"
|
||||
#include "modules/video_coding/utility/ivf_file_writer.h"
|
||||
@ -28,9 +29,6 @@
|
||||
#include "test/testsupport/frame_writer.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class VideoBitrateAllocator;
|
||||
|
||||
namespace test {
|
||||
|
||||
// Handles encoding/decoding of video using the VideoEncoder/VideoDecoder
|
||||
@ -38,8 +36,6 @@ namespace test {
|
||||
// measure times properly.
|
||||
// The class processes a frame at the time for the configured input file.
|
||||
// It maintains state of where in the source input file the processing is at.
|
||||
//
|
||||
// Note this class is not thread safe and is meant for simple testing purposes.
|
||||
class VideoProcessor {
|
||||
public:
|
||||
using VideoDecoderList = std::vector<std::unique_ptr<VideoDecoder>>;
|
||||
@ -173,49 +169,31 @@ class VideoProcessor {
|
||||
size_t frame_number,
|
||||
size_t simulcast_svc_idx);
|
||||
|
||||
void CalculateFrameQuality(const VideoFrame& ref_frame,
|
||||
const VideoFrame& dec_frame,
|
||||
FrameStatistics* frame_stat);
|
||||
|
||||
// Test input/output.
|
||||
TestConfig config_ RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
const size_t num_simulcast_or_spatial_layers_;
|
||||
Stats* const stats_;
|
||||
|
||||
// Codecs.
|
||||
webrtc::VideoEncoder* const encoder_;
|
||||
VideoDecoderList* const decoders_;
|
||||
const std::unique_ptr<VideoBitrateAllocator> bitrate_allocator_;
|
||||
BitrateAllocation bitrate_allocation_ RTC_GUARDED_BY(sequence_checker_);
|
||||
uint32_t framerate_fps_ RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
// Adapters for the codec callbacks.
|
||||
VideoProcessorEncodeCompleteCallback encode_callback_;
|
||||
VideoProcessorDecodeCompleteCallback decode_callback_;
|
||||
|
||||
// Input frames. Used as reference at frame quality evaluation.
|
||||
// Each call to ProcessFrame() will read one frame from |input_frame_reader_|.
|
||||
FrameReader* const input_frame_reader_;
|
||||
|
||||
// Input frames are used as reference for frame quality evaluations.
|
||||
// Async codecs might queue frames. To handle that we keep input frame
|
||||
// and release it after corresponding coded frame is decoded and quality
|
||||
// measurement is done.
|
||||
std::map<size_t, std::unique_ptr<VideoFrame>> input_frames_
|
||||
RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
FrameReader* const input_frame_reader_;
|
||||
|
||||
// These (optional) file writers are used to persistently store the encoded
|
||||
// and decoded bitstreams. The purpose is to give the experimenter an option
|
||||
// to subjectively evaluate the quality of the processing. Each frame writer
|
||||
// is enabled by being non-null.
|
||||
IvfFileWriterList* const encoded_frame_writers_;
|
||||
FrameWriterList* const decoded_frame_writers_;
|
||||
|
||||
// Keep track of inputed/encoded/decoded frames, so we can detect frame drops.
|
||||
bool first_encoded_frame;
|
||||
size_t last_inputed_frame_num_ RTC_GUARDED_BY(sequence_checker_);
|
||||
size_t last_encoded_frame_num_ RTC_GUARDED_BY(sequence_checker_);
|
||||
size_t last_encoded_simulcast_svc_idx_ RTC_GUARDED_BY(sequence_checker_);
|
||||
size_t last_decoded_frame_num_ RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
// Map of frame size (in pixels) to simulcast/spatial layer index.
|
||||
std::map<size_t, size_t> frame_wxh_to_simulcast_svc_idx_
|
||||
RTC_GUARDED_BY(sequence_checker_);
|
||||
// frame_number -> frame.
|
||||
std::map<size_t, VideoFrame> input_frames_ RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
// Encoder delivers coded frame layer-by-layer. We store coded frames and
|
||||
// then, after all layers are encoded, decode them. Such separation of
|
||||
@ -224,11 +202,26 @@ class VideoProcessor {
|
||||
std::map<size_t, EncodedImage> last_encoded_frames_
|
||||
RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
rtc::Buffer tmp_planar_i420_buffer_;
|
||||
// These (optional) file writers are used to persistently store the encoded
|
||||
// and decoded bitstreams. Each frame writer is enabled by being non-null.
|
||||
IvfFileWriterList* const encoded_frame_writers_;
|
||||
FrameWriterList* const decoded_frame_writers_;
|
||||
rtc::Buffer tmp_i420_buffer_; // Temp storage for format conversion.
|
||||
|
||||
// Statistics.
|
||||
Stats* const stats_;
|
||||
// Metadata of inputed/encoded/decoded frames. Used for frame drop detection
|
||||
// and other purposes.
|
||||
size_t last_inputed_frame_num_ RTC_GUARDED_BY(sequence_checker_);
|
||||
size_t last_inputed_timestamp_ RTC_GUARDED_BY(sequence_checker_);
|
||||
bool first_encoded_frame RTC_GUARDED_BY(sequence_checker_);
|
||||
size_t last_encoded_frame_num_ RTC_GUARDED_BY(sequence_checker_);
|
||||
size_t last_encoded_simulcast_svc_idx_ RTC_GUARDED_BY(sequence_checker_);
|
||||
size_t last_decoded_frame_num_ RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
// Map of frame size (in pixels) to simulcast/spatial layer index.
|
||||
std::map<size_t, size_t> frame_wxh_to_simulcast_svc_idx_
|
||||
RTC_GUARDED_BY(sequence_checker_);
|
||||
|
||||
// This class must be operated on a TaskQueue.
|
||||
rtc::SequencedTaskChecker sequence_checker_;
|
||||
|
||||
RTC_DISALLOW_COPY_AND_ASSIGN(VideoProcessor);
|
||||
|
||||
@ -24,6 +24,7 @@
|
||||
#include "common_types.h" // NOLINT(build/include)
|
||||
#include "media/engine/internaldecoderfactory.h"
|
||||
#include "media/engine/internalencoderfactory.h"
|
||||
#include "media/engine/simulcast_encoder_adapter.h"
|
||||
#include "media/engine/videodecodersoftwarefallbackwrapper.h"
|
||||
#include "media/engine/videoencodersoftwarefallbackwrapper.h"
|
||||
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
|
||||
@ -294,19 +295,18 @@ void VideoProcessorIntegrationTest::VerifyVideoStatistic(
|
||||
}
|
||||
|
||||
void VideoProcessorIntegrationTest::CreateEncoderAndDecoder() {
|
||||
std::unique_ptr<VideoEncoderFactory> encoder_factory;
|
||||
if (config_.hw_encoder) {
|
||||
#if defined(WEBRTC_ANDROID)
|
||||
encoder_factory = CreateAndroidEncoderFactory();
|
||||
encoder_factory_ = CreateAndroidEncoderFactory();
|
||||
#elif defined(WEBRTC_IOS)
|
||||
EXPECT_EQ(kVideoCodecH264, config_.codec_settings.codecType)
|
||||
<< "iOS HW codecs only support H264.";
|
||||
encoder_factory = CreateObjCEncoderFactory();
|
||||
encoder_factory_ = CreateObjCEncoderFactory();
|
||||
#else
|
||||
RTC_NOTREACHED() << "Only support HW encoder on Android and iOS.";
|
||||
#endif
|
||||
} else {
|
||||
encoder_factory = rtc::MakeUnique<InternalEncoderFactory>();
|
||||
encoder_factory_ = rtc::MakeUnique<InternalEncoderFactory>();
|
||||
}
|
||||
|
||||
std::unique_ptr<VideoDecoderFactory> decoder_factory;
|
||||
@ -325,7 +325,12 @@ void VideoProcessorIntegrationTest::CreateEncoderAndDecoder() {
|
||||
}
|
||||
|
||||
const SdpVideoFormat format = config_.ToSdpVideoFormat();
|
||||
encoder_ = encoder_factory->CreateVideoEncoder(format);
|
||||
if (config_.simulcast_adapted_encoder) {
|
||||
EXPECT_EQ("VP8", format.name);
|
||||
encoder_.reset(new SimulcastEncoderAdapter(encoder_factory_.get()));
|
||||
} else {
|
||||
encoder_ = encoder_factory_->CreateVideoEncoder(format);
|
||||
}
|
||||
|
||||
const size_t num_simulcast_or_spatial_layers = std::max(
|
||||
config_.NumberOfSimulcastStreams(), config_.NumberOfSpatialLayers());
|
||||
@ -336,6 +341,9 @@ void VideoProcessorIntegrationTest::CreateEncoderAndDecoder() {
|
||||
}
|
||||
|
||||
if (config_.sw_fallback_encoder) {
|
||||
EXPECT_FALSE(config_.simulcast_adapted_encoder)
|
||||
<< "SimulcastEncoderAdapter and VideoEncoderSoftwareFallbackWrapper "
|
||||
"are not jointly supported.";
|
||||
encoder_ = rtc::MakeUnique<VideoEncoderSoftwareFallbackWrapper>(
|
||||
InternalEncoderFactory().CreateVideoEncoder(format),
|
||||
std::move(encoder_));
|
||||
@ -356,8 +364,9 @@ void VideoProcessorIntegrationTest::CreateEncoderAndDecoder() {
|
||||
}
|
||||
|
||||
void VideoProcessorIntegrationTest::DestroyEncoderAndDecoder() {
|
||||
encoder_.reset();
|
||||
decoders_.clear();
|
||||
encoder_.reset();
|
||||
encoder_factory_.reset();
|
||||
}
|
||||
|
||||
void VideoProcessorIntegrationTest::SetUpAndInitObjects(
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "api/video_codecs/video_encoder_factory.h"
|
||||
#include "common_types.h" // NOLINT(build/include)
|
||||
#include "common_video/h264/h264_common.h"
|
||||
#include "modules/video_coding/codecs/test/stats.h"
|
||||
@ -126,6 +127,7 @@ class VideoProcessorIntegrationTest : public testing::Test {
|
||||
void PrintSettings(rtc::TaskQueue* task_queue) const;
|
||||
|
||||
// Codecs.
|
||||
std::unique_ptr<VideoEncoderFactory> encoder_factory_;
|
||||
std::unique_ptr<VideoEncoder> encoder_;
|
||||
VideoProcessor::VideoDecoderList decoders_;
|
||||
|
||||
|
||||
Reference in New Issue
Block a user