Revert "Implement H264 simulcast support and generalize SimulcastEncoderAdapter use for H264 & VP8."

This reverts commit 07efe436c9002e139845f62486e3ee4e29f0d85b.

Reason for revert: Breaks downstream project.

cricket::GetSimulcastConfig method signature has been updated.
I think you can get away with a default value for temporal_layers_supported (and then you can remove it after a few days when projects will be updated).


Original change's description:
> Implement H264 simulcast support and generalize SimulcastEncoderAdapter use for H264 & VP8.
> 
> * Move SimulcastEncoderAdapter out under modules/video_coding
> * Move SimulcastRateAllocator back out to modules/video_coding/utility
> * Move TemporalLayers and ScreenshareLayers to modules/video_coding/utility
> * Move any VP8 specific code - such as temporal layer bitrate budgeting -
>   under codec type dependent conditionals.
> * Plumb the simulcast index for H264 in the codec specific and RTP format data structures.
> 
> Bug: webrtc:5840
> Change-Id: Ieced8a00e38f273c1a6cfd0f5431a87d07b8f44e
> Reviewed-on: https://webrtc-review.googlesource.com/64100
> Commit-Queue: Harald Alvestrand <hta@webrtc.org>
> Reviewed-by: Stefan Holmer <stefan@webrtc.org>
> Reviewed-by: Erik Språng <sprang@webrtc.org>
> Cr-Commit-Position: refs/heads/master@{#23705}

TBR=sprang@webrtc.org,stefan@webrtc.org,mflodman@webrtc.org,hta@webrtc.org,sergio.garcia.murillo@gmail.com,titovartem@webrtc.org,agouaillard@gmail.com

Change-Id: Ic9d3b1eeaf195bb5ec2063954421f5e77866d663
No-Presubmit: true
No-Tree-Checks: true
No-Try: true
Bug: webrtc:5840
Reviewed-on: https://webrtc-review.googlesource.com/84760
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23710}
This commit is contained in:
Mirko Bonadei
2018-06-21 13:41:01 +00:00
committed by Commit Bot
parent f341f3feb5
commit 6f440ed5b5
51 changed files with 530 additions and 918 deletions

View File

@ -152,6 +152,7 @@ rtc_static_library("video_coding") {
":packet",
":video_codec_interface",
":video_coding_utility",
":webrtc_vp8_helpers",
":webrtc_vp9_helpers",
"..:module_api",
"..:module_api_public",
@ -236,10 +237,6 @@ rtc_source_set("video_coding_utility") {
"utility/moving_average.h",
"utility/quality_scaler.cc",
"utility/quality_scaler.h",
"utility/simulcast_rate_allocator.cc",
"utility/simulcast_rate_allocator.h",
"utility/simulcast_utility.cc",
"utility/simulcast_utility.h",
"utility/vp8_header_parser.cc",
"utility/vp8_header_parser.h",
"utility/vp9_uncompressed_header_parser.cc",
@ -266,8 +263,6 @@ rtc_source_set("video_coding_utility") {
"../../rtc_base:sequenced_task_checker",
"../../rtc_base/experiments:quality_scaling_experiment",
"../../system_wrappers",
"../../system_wrappers:field_trial_api",
"../../system_wrappers:metrics_api",
"../rtp_rtcp:rtp_rtcp_format",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -296,7 +291,6 @@ rtc_static_library("webrtc_h264") {
"../../rtc_base:checks",
"../../rtc_base:rtc_base",
"../../system_wrappers:metrics_api",
"//third_party/libyuv",
]
if (rtc_use_h264) {
@ -383,20 +377,18 @@ rtc_static_library("webrtc_multiplex") {
]
}
# This target includes the internal SW codec.
rtc_static_library("webrtc_vp8") {
# This target includes VP8 files that may be used for any VP8 codec, internal SW or external HW.
rtc_static_library("webrtc_vp8_helpers") {
visibility = [ "*" ]
poisonous = [ "software_video_codecs" ]
sources = [
"codecs/vp8/default_temporal_layers.cc",
"codecs/vp8/default_temporal_layers.h",
"codecs/vp8/include/vp8.h",
"codecs/vp8/libvpx_vp8_decoder.cc",
"codecs/vp8/libvpx_vp8_decoder.h",
"codecs/vp8/libvpx_vp8_encoder.cc",
"codecs/vp8/libvpx_vp8_encoder.h",
"codecs/vp8/include/vp8_common_types.h",
"codecs/vp8/screenshare_layers.cc",
"codecs/vp8/screenshare_layers.h",
"codecs/vp8/simulcast_rate_allocator.cc",
"codecs/vp8/simulcast_rate_allocator.h",
"codecs/vp8/temporal_layers.cc",
"codecs/vp8/temporal_layers.h",
]
@ -424,6 +416,45 @@ rtc_static_library("webrtc_vp8") {
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
}
# This target includes the internal SW codec.
rtc_static_library("webrtc_vp8") {
visibility = [ "*" ]
poisonous = [ "software_video_codecs" ]
sources = [
"codecs/vp8/include/vp8.h",
"codecs/vp8/include/vp8_common_types.h",
"codecs/vp8/libvpx_vp8_decoder.cc",
"codecs/vp8/libvpx_vp8_decoder.h",
"codecs/vp8/libvpx_vp8_encoder.cc",
"codecs/vp8/libvpx_vp8_encoder.h",
]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
deps = [
":video_codec_interface",
":video_coding_utility",
":webrtc_vp8_helpers",
"..:module_api",
"../..:webrtc_common",
"../../:typedefs",
"../../api/video:video_frame",
"../../api/video_codecs:video_codecs_api",
"../../common_video",
"../../rtc_base:checks",
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_numerics",
"../../system_wrappers",
"../../system_wrappers:field_trial_api",
"../../system_wrappers:metrics_api",
"//third_party/abseil-cpp/absl/types:optional",
"//third_party/libyuv",
]
if (rtc_build_libvpx) {
deps += [ rtc_libvpx_dir ]
}
@ -543,8 +574,8 @@ if (rtc_include_tests) {
rtc_source_set("simulcast_test_fixture_impl") {
testonly = true
sources = [
"utility/simulcast_test_fixture_impl.cc",
"utility/simulcast_test_fixture_impl.h",
"codecs/vp8/simulcast_test_fixture_impl.cc",
"codecs/vp8/simulcast_test_fixture_impl.h",
]
if (!build_with_chromium && is_clang) {
@ -556,7 +587,7 @@ if (rtc_include_tests) {
":mock_headers",
":video_codec_interface",
":video_coding",
":video_coding_utility",
":webrtc_vp8_helpers",
"../../:webrtc_common",
"../../api:simulcast_test_fixture_api",
"../../api/video:video_frame",
@ -587,6 +618,7 @@ if (rtc_include_tests) {
":video_codec_interface",
":video_coding",
":video_coding_utility",
":webrtc_vp8_helpers",
":webrtc_vp9_helpers",
"../..:webrtc_common",
"../../:typedefs",
@ -706,7 +738,7 @@ if (rtc_include_tests) {
":videocodec_test_impl",
":webrtc_h264",
":webrtc_multiplex",
":webrtc_vp8",
":webrtc_vp8_helpers",
":webrtc_vp9",
":webrtc_vp9_helpers",
"../..:webrtc_common",
@ -796,10 +828,7 @@ if (rtc_include_tests) {
"video_sender_unittest.cc",
]
if (rtc_use_h264) {
sources += [
"codecs/h264/h264_encoder_impl_unittest.cc",
"codecs/h264/h264_simulcast_unittest.cc",
]
sources += [ "codecs/h264/h264_encoder_impl_unittest.cc" ]
}
deps = [
@ -808,7 +837,6 @@ if (rtc_include_tests) {
":mock_headers",
":nack_module",
":packet",
":simulcast_test_fixture_impl",
":video_codec_interface",
":video_codecs_test_framework",
":video_coding",
@ -816,6 +844,7 @@ if (rtc_include_tests) {
":videocodec_test_impl",
":webrtc_h264",
":webrtc_vp8",
":webrtc_vp8_helpers",
":webrtc_vp9",
":webrtc_vp9_helpers",
"..:module_api",

View File

@ -20,14 +20,10 @@
#include "third_party/openh264/src/codec/api/svc/codec_ver.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "modules/video_coding/utility/simulcast_utility.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/timeutils.h"
#include "system_wrappers/include/metrics.h"
#include "third_party/libyuv/include/libyuv/convert.h"
#include "third_party/libyuv/include/libyuv/scale.h"
namespace webrtc {
@ -161,7 +157,16 @@ static void RtpFragmentize(EncodedImage* encoded_image,
}
H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec)
: packetization_mode_(H264PacketizationMode::SingleNalUnit),
: openh264_encoder_(nullptr),
width_(0),
height_(0),
max_frame_rate_(0.0f),
target_bps_(0),
max_bps_(0),
mode_(VideoCodecMode::kRealtimeVideo),
frame_dropping_on_(false),
key_frame_interval_(0),
packetization_mode_(H264PacketizationMode::SingleNalUnit),
max_payload_size_(0),
number_of_cores_(0),
encoded_image_callback_(nullptr),
@ -174,30 +179,25 @@ H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec)
packetization_mode_string == "1") {
packetization_mode_ = H264PacketizationMode::NonInterleaved;
}
downscaled_buffers_.reserve(kMaxSimulcastStreams - 1);
encoded_images_.reserve(kMaxSimulcastStreams);
encoded_image_buffers_.reserve(kMaxSimulcastStreams);
encoders_.reserve(kMaxSimulcastStreams);
configurations_.reserve(kMaxSimulcastStreams);
}
H264EncoderImpl::~H264EncoderImpl() {
Release();
}
int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst,
int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings,
int32_t number_of_cores,
size_t max_payload_size) {
ReportInit();
if (!inst || inst->codecType != kVideoCodecH264) {
if (!codec_settings || codec_settings->codecType != kVideoCodecH264) {
ReportError();
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (inst->maxFramerate == 0) {
if (codec_settings->maxFramerate == 0) {
ReportError();
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
if (inst->width < 1 || inst->height < 1) {
if (codec_settings->width < 1 || codec_settings->height < 1) {
ReportError();
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
}
@ -207,134 +207,73 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst,
ReportError();
return release_ret;
}
RTC_DCHECK(!openh264_encoder_);
int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst);
bool doing_simulcast = (number_of_streams > 1);
if (doing_simulcast && (!SimulcastUtility::ValidSimulcastResolutions(
*inst, number_of_streams) ||
!SimulcastUtility::ValidSimulcastTemporalLayers(
*inst, number_of_streams))) {
return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;
// Create encoder.
if (WelsCreateSVCEncoder(&openh264_encoder_) != 0) {
// Failed to create encoder.
RTC_LOG(LS_ERROR) << "Failed to create OpenH264 encoder";
RTC_DCHECK(!openh264_encoder_);
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
downscaled_buffers_.resize(number_of_streams - 1);
encoded_images_.resize(number_of_streams);
encoded_image_buffers_.resize(number_of_streams);
encoders_.resize(number_of_streams);
pictures_.resize(number_of_streams);
configurations_.resize(number_of_streams);
RTC_DCHECK(openh264_encoder_);
if (kOpenH264EncoderDetailedLogging) {
int trace_level = WELS_LOG_DETAIL;
openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);
}
// else WELS_LOG_DEFAULT is used by default.
number_of_cores_ = number_of_cores;
// Set internal settings from codec_settings
width_ = codec_settings->width;
height_ = codec_settings->height;
max_frame_rate_ = static_cast<float>(codec_settings->maxFramerate);
mode_ = codec_settings->mode;
frame_dropping_on_ = codec_settings->H264().frameDroppingOn;
key_frame_interval_ = codec_settings->H264().keyFrameInterval;
max_payload_size_ = max_payload_size;
codec_ = *inst;
// Code expects simulcastStream resolutions to be correct, make sure they are
// filled even when there are no simulcast layers.
if (codec_.numberOfSimulcastStreams == 0) {
codec_.simulcastStream[0].width = codec_.width;
codec_.simulcastStream[0].height = codec_.height;
// Codec_settings uses kbits/second; encoder uses bits/second.
max_bps_ = codec_settings->maxBitrate * 1000;
if (codec_settings->targetBitrate == 0)
target_bps_ = codec_settings->startBitrate * 1000;
else
target_bps_ = codec_settings->targetBitrate * 1000;
SEncParamExt encoder_params = CreateEncoderParams();
// Initialize.
if (openh264_encoder_->InitializeExt(&encoder_params) != 0) {
RTC_LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder";
Release();
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
// TODO(pbos): Base init params on these values before submitting.
int video_format = EVideoFormatType::videoFormatI420;
openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format);
for (int i = 0, idx = number_of_streams - 1; i < number_of_streams;
++i, --idx) {
// Temporal layers still not supported.
if (inst->simulcastStream[i].numberOfTemporalLayers > 1) {
Release();
return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;
}
ISVCEncoder* openh264_encoder;
// Create encoder.
if (WelsCreateSVCEncoder(&openh264_encoder) != 0) {
// Failed to create encoder.
RTC_LOG(LS_ERROR) << "Failed to create OpenH264 encoder";
RTC_DCHECK(!openh264_encoder);
Release();
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
RTC_DCHECK(openh264_encoder);
if (kOpenH264EncoderDetailedLogging) {
int trace_level = WELS_LOG_DETAIL;
openh264_encoder->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level);
}
// else WELS_LOG_DEFAULT is used by default.
// Store h264 encoder.
encoders_[i] = openh264_encoder;
// Set internal settings from codec_settings
configurations_[i].simulcast_idx = idx;
configurations_[i].sending = false;
configurations_[i].width = codec_.simulcastStream[idx].width;
configurations_[i].height = codec_.simulcastStream[idx].height;
configurations_[i].max_frame_rate = static_cast<float>(codec_.maxFramerate);
configurations_[i].frame_dropping_on = codec_.H264()->frameDroppingOn;
configurations_[i].key_frame_interval = codec_.H264()->keyFrameInterval;
// Create downscaled image buffers.
if (i > 0) {
downscaled_buffers_[i - 1] = I420Buffer::Create(
configurations_[i].width, configurations_[i].height,
configurations_[i].width, configurations_[i].width / 2,
configurations_[i].width / 2);
}
// Codec_settings uses kbits/second; encoder uses bits/second.
configurations_[i].max_bps = codec_.maxBitrate * 1000;
if (codec_.targetBitrate == 0) {
configurations_[i].target_bps = codec_.startBitrate * 1000;
} else {
configurations_[i].target_bps = codec_.targetBitrate * 1000;
}
// Create encoder parameters based on the layer configuration.
SEncParamExt encoder_params = CreateEncoderParams(i);
// Initialize.
if (openh264_encoder->InitializeExt(&encoder_params) != 0) {
RTC_LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder";
Release();
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
// TODO(pbos): Base init params on these values before submitting.
int video_format = EVideoFormatType::videoFormatI420;
openh264_encoder->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format);
// Initialize encoded image. Default buffer size: size of unencoded data.
encoded_images_[i]._size =
CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width,
codec_.simulcastStream[idx].height);
encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size];
encoded_image_buffers_[i].reset(encoded_images_[i]._buffer);
encoded_images_[i]._completeFrame = true;
encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width;
encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height;
encoded_images_[i]._length = 0;
}
SimulcastRateAllocator init_allocator(codec_);
BitrateAllocation allocation = init_allocator.GetAllocation(
codec_.targetBitrate ? codec_.targetBitrate * 1000
: codec_.startBitrate * 1000,
codec_.maxFramerate);
return SetRateAllocation(allocation, codec_.maxFramerate);
// Initialize encoded image. Default buffer size: size of unencoded data.
encoded_image_._size = CalcBufferSize(VideoType::kI420, codec_settings->width,
codec_settings->height);
encoded_image_._buffer = new uint8_t[encoded_image_._size];
encoded_image_buffer_.reset(encoded_image_._buffer);
encoded_image_._completeFrame = true;
encoded_image_._encodedWidth = 0;
encoded_image_._encodedHeight = 0;
encoded_image_._length = 0;
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t H264EncoderImpl::Release() {
while (!encoders_.empty()) {
ISVCEncoder* openh264_encoder = encoders_.back();
if (openh264_encoder) {
RTC_CHECK_EQ(0, openh264_encoder->Uninitialize());
WelsDestroySVCEncoder(openh264_encoder);
}
encoders_.pop_back();
if (openh264_encoder_) {
RTC_CHECK_EQ(0, openh264_encoder_->Uninitialize());
WelsDestroySVCEncoder(openh264_encoder_);
openh264_encoder_ = nullptr;
}
downscaled_buffers_.clear();
configurations_.clear();
encoded_images_.clear();
encoded_image_buffers_.clear();
pictures_.clear();
encoded_image_._buffer = nullptr;
encoded_image_buffer_.reset();
return WEBRTC_VIDEO_CODEC_OK;
}
@ -345,59 +284,27 @@ int32_t H264EncoderImpl::RegisterEncodeCompleteCallback(
}
int32_t H264EncoderImpl::SetRateAllocation(
const BitrateAllocation& bitrate,
uint32_t new_framerate) {
if (encoders_.empty())
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
if (new_framerate < 1)
const VideoBitrateAllocation& bitrate_allocation,
uint32_t framerate) {
if (bitrate_allocation.get_sum_bps() <= 0 || framerate <= 0)
return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
if (bitrate.get_sum_bps() == 0) {
// Encoder paused, turn off all encoding.
for (size_t i = 0; i < configurations_.size(); ++i)
configurations_[i].SetStreamState(false);
return WEBRTC_VIDEO_CODEC_OK;
}
// At this point, bitrate allocation should already match codec settings.
if (codec_.maxBitrate > 0)
RTC_DCHECK_LE(bitrate.get_sum_kbps(), codec_.maxBitrate);
RTC_DCHECK_GE(bitrate.get_sum_kbps(), codec_.minBitrate);
if (codec_.numberOfSimulcastStreams > 0)
RTC_DCHECK_GE(bitrate.get_sum_kbps(), codec_.simulcastStream[0].minBitrate);
codec_.maxFramerate = new_framerate;
size_t stream_idx = encoders_.size() - 1;
for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) {
// Update layer config.
configurations_[i].target_bps = bitrate.GetSpatialLayerSum(stream_idx);
configurations_[i].max_frame_rate = static_cast<float>(new_framerate);
if (configurations_[i].target_bps) {
configurations_[i].SetStreamState(true);
// Update h264 encoder.
SBitrateInfo target_bitrate;
memset(&target_bitrate, 0, sizeof(SBitrateInfo));
target_bitrate.iLayer = SPATIAL_LAYER_ALL,
target_bitrate.iBitrate = configurations_[i].target_bps;
encoders_[i]->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate);
encoders_[i]->SetOption(ENCODER_OPTION_FRAME_RATE,
&configurations_[i].max_frame_rate);
} else {
configurations_[i].SetStreamState(false);
}
}
target_bps_ = bitrate_allocation.get_sum_bps();
max_frame_rate_ = static_cast<float>(framerate);
SBitrateInfo target_bitrate;
memset(&target_bitrate, 0, sizeof(SBitrateInfo));
target_bitrate.iLayer = SPATIAL_LAYER_ALL,
target_bitrate.iBitrate = target_bps_;
openh264_encoder_->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate);
openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE, &max_frame_rate_);
return WEBRTC_VIDEO_CODEC_OK;
}
int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
const CodecSpecificInfo* codec_specific_info,
const std::vector<FrameType>* frame_types) {
if (encoders_.empty()) {
if (!IsInitialized()) {
ReportError();
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
@ -409,134 +316,83 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
}
bool force_key_frame = false;
if (frame_types != nullptr) {
// We only support a single stream.
RTC_DCHECK_EQ(frame_types->size(), 1);
// Skip frame?
if ((*frame_types)[0] == kEmptyFrame) {
return WEBRTC_VIDEO_CODEC_OK;
}
// Force key frame?
force_key_frame = (*frame_types)[0] == kVideoFrameKey;
}
if (force_key_frame) {
// API doc says ForceIntraFrame(false) does nothing, but calling this
// function forces a key frame regardless of the |bIDR| argument's value.
// (If every frame is a key frame we get lag/delays.)
openh264_encoder_->ForceIntraFrame(true);
}
rtc::scoped_refptr<const I420BufferInterface> frame_buffer =
input_frame.video_frame_buffer()->ToI420();
// EncodeFrame input.
SSourcePicture picture;
memset(&picture, 0, sizeof(SSourcePicture));
picture.iPicWidth = frame_buffer->width();
picture.iPicHeight = frame_buffer->height();
picture.iColorFormat = EVideoFormatType::videoFormatI420;
picture.uiTimeStamp = input_frame.ntp_time_ms();
picture.iStride[0] = frame_buffer->StrideY();
picture.iStride[1] = frame_buffer->StrideU();
picture.iStride[2] = frame_buffer->StrideV();
picture.pData[0] = const_cast<uint8_t*>(frame_buffer->DataY());
picture.pData[1] = const_cast<uint8_t*>(frame_buffer->DataU());
picture.pData[2] = const_cast<uint8_t*>(frame_buffer->DataV());
bool send_key_frame = false;
for (size_t i = 0; i < configurations_.size(); ++i) {
if (configurations_[i].key_frame_request && configurations_[i].sending) {
send_key_frame = true;
break;
}
}
if (!send_key_frame && frame_types) {
for (size_t i = 0; i < frame_types->size() && i < configurations_.size();
++i) {
if ((*frame_types)[i] == kVideoFrameKey && configurations_[i].sending) {
send_key_frame = true;
break;
}
}
// EncodeFrame output.
SFrameBSInfo info;
memset(&info, 0, sizeof(SFrameBSInfo));
// Encode!
int enc_ret = openh264_encoder_->EncodeFrame(&picture, &info);
if (enc_ret != 0) {
RTC_LOG(LS_ERROR) << "OpenH264 frame encoding failed, EncodeFrame returned "
<< enc_ret << ".";
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
RTC_DCHECK_EQ(configurations_[0].width, frame_buffer->width());
RTC_DCHECK_EQ(configurations_[0].height, frame_buffer->height());
encoded_image_._encodedWidth = frame_buffer->width();
encoded_image_._encodedHeight = frame_buffer->height();
encoded_image_._timeStamp = input_frame.timestamp();
encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms();
encoded_image_.capture_time_ms_ = input_frame.render_time_ms();
encoded_image_.rotation_ = input_frame.rotation();
encoded_image_.content_type_ = (mode_ == VideoCodecMode::kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_image_.timing_.flags = VideoSendTiming::kInvalid;
encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType);
// Encode image for each layer.
for (size_t i = 0; i < encoders_.size(); ++i) {
// EncodeFrame input.
pictures_[i] = {0};
pictures_[i].iPicWidth = configurations_[i].width;
pictures_[i].iPicHeight = configurations_[i].height;
pictures_[i].iColorFormat = EVideoFormatType::videoFormatI420;
pictures_[i].uiTimeStamp = input_frame.ntp_time_ms();
// Downscale images on second and ongoing layers.
if (i == 0) {
pictures_[i].iStride[0] = frame_buffer->StrideY();
pictures_[i].iStride[1] = frame_buffer->StrideU();
pictures_[i].iStride[2] = frame_buffer->StrideV();
pictures_[i].pData[0] = const_cast<uint8_t*>(frame_buffer->DataY());
pictures_[i].pData[1] = const_cast<uint8_t*>(frame_buffer->DataU());
pictures_[i].pData[2] = const_cast<uint8_t*>(frame_buffer->DataV());
} else {
pictures_[i].iStride[0] = downscaled_buffers_[i - 1]->StrideY();
pictures_[i].iStride[1] = downscaled_buffers_[i - 1]->StrideU();
pictures_[i].iStride[2] = downscaled_buffers_[i - 1]->StrideV();
pictures_[i].pData[0] =
const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataY());
pictures_[i].pData[1] =
const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataU());
pictures_[i].pData[2] =
const_cast<uint8_t*>(downscaled_buffers_[i - 1]->DataV());
// Scale the image down a number of times by downsampling factor.
libyuv::I420Scale(pictures_[i - 1].pData[0], pictures_[i - 1].iStride[0],
pictures_[i - 1].pData[1], pictures_[i - 1].iStride[1],
pictures_[i - 1].pData[2], pictures_[i - 1].iStride[2],
configurations_[i - 1].width,
configurations_[i - 1].height, pictures_[i].pData[0],
pictures_[i].iStride[0], pictures_[i].pData[1],
pictures_[i].iStride[1], pictures_[i].pData[2],
pictures_[i].iStride[2], configurations_[i].width,
configurations_[i].height, libyuv::kFilterBilinear);
}
// Split encoded image up into fragments. This also updates |encoded_image_|.
RTPFragmentationHeader frag_header;
RtpFragmentize(&encoded_image_, &encoded_image_buffer_, *frame_buffer, &info,
&frag_header);
if (!configurations_[i].sending) {
continue;
}
if (frame_types != nullptr) {
// Skip frame?
if ((*frame_types)[i] == kEmptyFrame) {
continue;
}
}
if (send_key_frame) {
// API doc says ForceIntraFrame(false) does nothing, but calling this
// function forces a key frame regardless of the |bIDR| argument's value.
// (If every frame is a key frame we get lag/delays.)
encoders_[i]->ForceIntraFrame(true);
configurations_[i].key_frame_request = false;
}
// EncodeFrame output.
SFrameBSInfo info;
memset(&info, 0, sizeof(SFrameBSInfo));
// Encoder can skip frames to save bandwidth in which case
// |encoded_image_._length| == 0.
if (encoded_image_._length > 0) {
// Parse QP.
h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer,
encoded_image_._length);
h264_bitstream_parser_.GetLastSliceQp(&encoded_image_.qp_);
// Encode!
int enc_ret = encoders_[i]->EncodeFrame(&pictures_[i], &info);
if (enc_ret != 0) {
RTC_LOG(LS_ERROR)
<< "OpenH264 frame encoding failed, EncodeFrame returned " << enc_ret
<< ".";
ReportError();
return WEBRTC_VIDEO_CODEC_ERROR;
}
encoded_images_[i]._encodedWidth = configurations_[i].width;
encoded_images_[i]._encodedHeight = configurations_[i].height;
encoded_images_[i]._timeStamp = input_frame.timestamp();
encoded_images_[i].ntp_time_ms_ = input_frame.ntp_time_ms();
encoded_images_[i].capture_time_ms_ = input_frame.render_time_ms();
encoded_images_[i].rotation_ = input_frame.rotation();
encoded_images_[i].content_type_ =
(codec_.mode == VideoCodecMode::kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_images_[i].timing_.flags = VideoSendTiming::kInvalid;
encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType);
// Split encoded image up into fragments. This also updates
// |encoded_image_|.
RTPFragmentationHeader frag_header;
RtpFragmentize(&encoded_images_[i], &encoded_image_buffers_[i],
*frame_buffer, &info, &frag_header);
// Encoder can skip frames to save bandwidth in which case
// |encoded_images_[i]._length| == 0.
if (encoded_images_[i]._length > 0) {
// Parse QP.
h264_bitstream_parser_.ParseBitstream(encoded_images_[i]._buffer,
encoded_images_[i]._length);
h264_bitstream_parser_.GetLastSliceQp(&encoded_images_[i].qp_);
// Deliver encoded image.
CodecSpecificInfo codec_specific;
codec_specific.codecType = kVideoCodecH264;
codec_specific.codecSpecific.H264.packetization_mode =
packetization_mode_;
codec_specific.codecSpecific.H264.simulcast_idx =
configurations_[i].simulcast_idx;
encoded_image_callback_->OnEncodedImage(encoded_images_[i],
&codec_specific, &frag_header);
}
// Deliver encoded image.
CodecSpecificInfo codec_specific;
codec_specific.codecType = kVideoCodecH264;
codec_specific.codecSpecific.H264.packetization_mode = packetization_mode_;
encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific,
&frag_header);
}
return WEBRTC_VIDEO_CODEC_OK;
}
@ -545,35 +401,40 @@ const char* H264EncoderImpl::ImplementationName() const {
return "OpenH264";
}
bool H264EncoderImpl::IsInitialized() const {
return openh264_encoder_ != nullptr;
}
// Initialization parameters.
// There are two ways to initialize. There is SEncParamBase (cleared with
// memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt
// which is a superset of SEncParamBase (cleared with GetDefaultParams) used
// in InitializeExt.
SEncParamExt H264EncoderImpl::CreateEncoderParams(size_t i) const {
SEncParamExt H264EncoderImpl::CreateEncoderParams() const {
RTC_DCHECK(openh264_encoder_);
SEncParamExt encoder_params;
encoders_[i]->GetDefaultParams(&encoder_params);
if (codec_.mode == VideoCodecMode::kRealtimeVideo) {
openh264_encoder_->GetDefaultParams(&encoder_params);
if (mode_ == VideoCodecMode::kRealtimeVideo) {
encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
} else if (codec_.mode == VideoCodecMode::kScreensharing) {
} else if (mode_ == VideoCodecMode::kScreensharing) {
encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
} else {
RTC_NOTREACHED();
}
encoder_params.iPicWidth = configurations_[i].width;
encoder_params.iPicHeight = configurations_[i].height;
encoder_params.iTargetBitrate = configurations_[i].target_bps;
encoder_params.iMaxBitrate = configurations_[i].max_bps;
encoder_params.iPicWidth = width_;
encoder_params.iPicHeight = height_;
encoder_params.iTargetBitrate = target_bps_;
encoder_params.iMaxBitrate = max_bps_;
// Rate Control mode
encoder_params.iRCMode = RC_BITRATE_MODE;
encoder_params.fMaxFrameRate = configurations_[i].max_frame_rate;
encoder_params.fMaxFrameRate = max_frame_rate_;
// The following parameters are extension parameters (they're in SEncParamExt,
// not in SEncParamBase).
encoder_params.bEnableFrameSkip = configurations_[i].frame_dropping_on;
encoder_params.bEnableFrameSkip = frame_dropping_on_;
// |uiIntraPeriod| - multiple of GOP size
// |keyFrameInterval| - number of frames
encoder_params.uiIntraPeriod = configurations_[i].key_frame_interval;
encoder_params.uiIntraPeriod = key_frame_interval_;
encoder_params.uiMaxNalSize = 0;
// Threading model: use auto.
// 0: auto (dynamic imp. internal encoder)
@ -641,12 +502,4 @@ VideoEncoder::ScalingSettings H264EncoderImpl::GetScalingSettings() const {
kHighH264QpThreshold);
}
void H264EncoderImpl::LayerConfig::SetStreamState(bool send_stream) {
if (send_stream && !sending) {
// Need a key frame if we have not sent this stream before.
key_frame_request = true;
}
sending = send_stream;
}
} // namespace webrtc

View File

@ -15,7 +15,6 @@
#include <memory>
#include <vector>
#include "api/video/i420_buffer.h"
#include "common_video/h264/h264_bitstream_parser.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
#include "modules/video_coding/utility/quality_scaler.h"
@ -27,22 +26,6 @@ class ISVCEncoder;
namespace webrtc {
class H264EncoderImpl : public H264Encoder {
public:
struct LayerConfig {
int simulcast_idx = 0;
int width = -1;
int height = -1;
bool sending = true;
bool key_frame_request = false;
float max_frame_rate = 0;
uint32_t target_bps = 0;
uint32_t max_bps = 0;
bool frame_dropping_on = false;
int key_frame_interval = 0;
void SetStreamState(bool send_stream);
};
public:
explicit H264EncoderImpl(const cricket::VideoCodec& codec);
~H264EncoderImpl() override;
@ -83,24 +66,32 @@ class H264EncoderImpl : public H264Encoder {
}
private:
SEncParamExt CreateEncoderParams(size_t i) const;
bool IsInitialized() const;
SEncParamExt CreateEncoderParams() const;
webrtc::H264BitstreamParser h264_bitstream_parser_;
// Reports statistics with histograms.
void ReportInit();
void ReportError();
std::vector<ISVCEncoder*> encoders_;
std::vector<SSourcePicture> pictures_;
std::vector<rtc::scoped_refptr<I420Buffer>> downscaled_buffers_;
std::vector<LayerConfig> configurations_;
std::vector<EncodedImage> encoded_images_;
std::vector<std::unique_ptr<uint8_t[]>> encoded_image_buffers_;
VideoCodec codec_;
ISVCEncoder* openh264_encoder_;
// Settings that are used by this encoder.
int width_;
int height_;
float max_frame_rate_;
uint32_t target_bps_;
uint32_t max_bps_;
VideoCodecMode mode_;
// H.264 specifc parameters
bool frame_dropping_on_;
int key_frame_interval_;
H264PacketizationMode packetization_mode_;
size_t max_payload_size_;
int32_t number_of_cores_;
EncodedImage encoded_image_;
std::unique_ptr<uint8_t[]> encoded_image_buffer_;
EncodedImageCallback* encoded_image_callback_;
bool has_reported_init_;

View File

@ -1,99 +0,0 @@
/*
* Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "api/test/create_simulcast_test_fixture.h"
#include "api/test/simulcast_test_fixture.h"
#include "modules/video_coding/codecs/h264/include/h264.h"
#include "rtc_base/ptr_util.h"
#include "test/function_video_decoder_factory.h"
#include "test/function_video_encoder_factory.h"
#include "test/gtest.h"
namespace webrtc {
namespace test {
namespace {
std::unique_ptr<SimulcastTestFixture> CreateSpecificSimulcastTestFixture() {
std::unique_ptr<VideoEncoderFactory> encoder_factory =
rtc::MakeUnique<FunctionVideoEncoderFactory>(
[]() { return H264Encoder::Create(cricket::VideoCodec("H264")); });
std::unique_ptr<VideoDecoderFactory> decoder_factory =
rtc::MakeUnique<FunctionVideoDecoderFactory>(
[]() { return H264Decoder::Create(); });
return CreateSimulcastTestFixture(std::move(encoder_factory),
std::move(decoder_factory),
SdpVideoFormat("H264"));
}
} // namespace
TEST(TestH264Simulcast, TestKeyFrameRequestsOnAllStreams) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestKeyFrameRequestsOnAllStreams();
}
TEST(TestH264Simulcast, TestPaddingAllStreams) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestPaddingAllStreams();
}
TEST(TestH264Simulcast, TestPaddingTwoStreams) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestPaddingTwoStreams();
}
TEST(TestH264Simulcast, TestPaddingTwoStreamsOneMaxedOut) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestPaddingTwoStreamsOneMaxedOut();
}
TEST(TestH264Simulcast, TestPaddingOneStream) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestPaddingOneStream();
}
TEST(TestH264Simulcast, TestPaddingOneStreamTwoMaxedOut) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestPaddingOneStreamTwoMaxedOut();
}
TEST(TestH264Simulcast, TestSendAllStreams) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestSendAllStreams();
}
TEST(TestH264Simulcast, TestDisablingStreams) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestDisablingStreams();
}
TEST(TestH264Simulcast, TestActiveStreams) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestActiveStreams();
}
TEST(TestH264Simulcast, TestSwitchingToOneStream) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestSwitchingToOneStream();
}
TEST(TestH264Simulcast, TestSwitchingToOneOddStream) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestSwitchingToOneOddStream();
}
TEST(TestH264Simulcast, TestStrideEncodeDecode) {
auto fixture = CreateSpecificSimulcastTestFixture();
fixture->TestStrideEncodeDecode();
}
} // namespace test
} // namespace webrtc

View File

@ -52,7 +52,7 @@ void ConfigureSimulcast(VideoCodec* codec_settings) {
const std::vector<webrtc::VideoStream> streams = cricket::GetSimulcastConfig(
codec_settings->numberOfSimulcastStreams, codec_settings->width,
codec_settings->height, kMaxBitrateBps, kBitratePriority, kMaxQp,
kMaxFramerateFps, /* is_screenshare = */ false, true);
kMaxFramerateFps, /* is_screenshare = */ false);
for (size_t i = 0; i < streams.size(); ++i) {
SimulcastStream* ss = &codec_settings->simulcastStream[i];

View File

@ -19,10 +19,10 @@
#include "common_video/h264/h264_common.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include "modules/video_coding/include/video_codec_initializer.h"
#include "modules/video_coding/include/video_error_codes.h"
#include "modules/video_coding/utility/default_video_bitrate_allocator.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "rtc_base/checks.h"
#include "rtc_base/timeutils.h"
#include "test/gtest.h"

View File

@ -18,6 +18,7 @@
#include <vector>
#include "modules/include/module_common_types.h"
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"

View File

@ -10,8 +10,8 @@
#include "modules/video_coding/codecs/vp8/default_temporal_layers.h"
#include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "test/field_trial.h"
#include "test/gtest.h"

View File

@ -0,0 +1,29 @@
/*
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
#define MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_
#include "common_types.h" // NOLINT(build/include)
namespace webrtc {
// Ratio allocation between temporal streams:
// Values as required for the VP8 codec (accumulating).
static const float
kVp8LayerRateAlloction[kMaxSimulcastStreams][kMaxTemporalStreams] = {
{1.0f, 1.0f, 1.0f, 1.0f}, // 1 layer
{0.6f, 1.0f, 1.0f, 1.0f}, // 2 layers {60%, 40%}
{0.4f, 0.6f, 1.0f, 1.0f}, // 3 layers {40%, 20%, 40%}
{0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%}
};
} // namespace webrtc
#endif // MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -14,8 +14,7 @@
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "modules/video_coding/utility/simulcast_utility.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include "rtc_base/checks.h"
#include "rtc_base/ptr_util.h"
#include "rtc_base/timeutils.h"
@ -48,7 +47,7 @@ enum denoiserState {
};
// Greatest common divisior
static int GCD(int a, int b) {
int GCD(int a, int b) {
int c = a % b;
while (c != 0) {
a = b;
@ -58,6 +57,53 @@ static int GCD(int a, int b) {
return b;
}
uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec) {
uint32_t bitrate_sum = 0;
for (int i = 0; i < streams; ++i) {
bitrate_sum += codec.simulcastStream[i].maxBitrate;
}
return bitrate_sum;
}
int NumberOfStreams(const VideoCodec& codec) {
int streams =
codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
if (simulcast_max_bitrate == 0) {
streams = 1;
}
return streams;
}
bool ValidSimulcastResolutions(const VideoCodec& codec, int num_streams) {
if (codec.width != codec.simulcastStream[num_streams - 1].width ||
codec.height != codec.simulcastStream[num_streams - 1].height) {
return false;
}
for (int i = 0; i < num_streams; ++i) {
if (codec.width * codec.simulcastStream[i].height !=
codec.height * codec.simulcastStream[i].width) {
return false;
}
}
for (int i = 1; i < num_streams; ++i) {
if (codec.simulcastStream[i].width !=
codec.simulcastStream[i - 1].width * 2) {
return false;
}
}
return true;
}
bool ValidSimulcastTemporalLayers(const VideoCodec& codec, int num_streams) {
for (int i = 0; i < num_streams - 1; ++i) {
if (codec.simulcastStream[i].numberOfTemporalLayers !=
codec.simulcastStream[i + 1].numberOfTemporalLayers)
return false;
}
return true;
}
bool GetGfBoostPercentageFromFieldTrialGroup(int* boost_percentage) {
std::string group = webrtc::field_trial::FindFullName(kVp8GfBoostFieldTrial);
if (group.empty())
@ -323,13 +369,12 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
return retVal;
}
int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst);
int number_of_streams = NumberOfStreams(*inst);
bool doing_simulcast = (number_of_streams > 1);
if (doing_simulcast && (!SimulcastUtility::ValidSimulcastResolutions(
*inst, number_of_streams) ||
!SimulcastUtility::ValidSimulcastTemporalLayers(
*inst, number_of_streams))) {
if (doing_simulcast &&
(!ValidSimulcastResolutions(*inst, number_of_streams) ||
!ValidSimulcastTemporalLayers(*inst, number_of_streams))) {
return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;
}

View File

@ -30,8 +30,7 @@ std::unique_ptr<SimulcastTestFixture> CreateSpecificSimulcastTestFixture() {
rtc::MakeUnique<FunctionVideoDecoderFactory>(
[]() { return VP8Decoder::Create(); });
return CreateSimulcastTestFixture(std::move(encoder_factory),
std::move(decoder_factory),
SdpVideoFormat("VP8"));
std::move(decoder_factory));
}
} // namespace

View File

@ -37,8 +37,7 @@ constexpr int ScreenshareLayers::kMaxNumTemporalLayers;
// been exceeded. This prevents needless keyframe requests.
const int ScreenshareLayers::kMaxFrameIntervalMs = 2750;
ScreenshareLayers::ScreenshareLayers(int num_temporal_layers,
Clock* clock)
ScreenshareLayers::ScreenshareLayers(int num_temporal_layers, Clock* clock)
: clock_(clock),
number_of_temporal_layers_(
std::min(kMaxNumTemporalLayers, num_temporal_layers)),

View File

@ -28,8 +28,7 @@ class ScreenshareLayers : public TemporalLayers {
static const double kAcceptableTargetOvershoot;
static const int kMaxFrameIntervalMs;
ScreenshareLayers(int num_temporal_layers,
Clock* clock);
ScreenshareLayers(int num_temporal_layers, Clock* clock);
virtual ~ScreenshareLayers();
// Returns the recommended VP8 encode flags needed. May refresh the decoder

View File

@ -8,14 +8,14 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include <algorithm>
#include <memory>
#include <utility>
#include <vector>
#include "common_types.h" // NOLINT(build/include)
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "rtc_base/checks.h"
namespace webrtc {
@ -190,7 +190,7 @@ std::vector<uint32_t> SimulcastRateAllocator::DefaultTemporalLayerAllocation(
std::vector<uint32_t> bitrates;
for (size_t i = 0; i < num_temporal_layers; ++i) {
float layer_bitrate =
bitrate_kbps * kLayerRateAllocation[num_temporal_layers - 1][i];
bitrate_kbps * kVp8LayerRateAlloction[num_temporal_layers - 1][i];
bitrates.push_back(static_cast<uint32_t>(layer_bitrate + 0.5));
}
@ -235,10 +235,9 @@ const VideoCodec& webrtc::SimulcastRateAllocator::GetCodec() const {
int SimulcastRateAllocator::NumTemporalStreams(size_t simulcast_id) const {
return std::max<uint8_t>(
1,
codec_.codecType == kVideoCodecVP8 && codec_.numberOfSimulcastStreams == 0
? codec_.VP8().numberOfTemporalLayers
: codec_.simulcastStream[simulcast_id].numberOfTemporalLayers);
1, codec_.numberOfSimulcastStreams == 0
? codec_.VP8().numberOfTemporalLayers
: codec_.simulcastStream[simulcast_id].numberOfTemporalLayers);
}
} // namespace webrtc

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_RATE_ALLOCATOR_H_
#define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_RATE_ALLOCATOR_H_
#ifndef MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_
#define MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_
#include <stdint.h>
@ -20,20 +20,11 @@
#include "api/video_codecs/video_encoder.h"
#include "common_types.h" // NOLINT(build/include)
#include "common_video/include/video_bitrate_allocator.h"
#include "modules/video_coding/codecs/vp8/temporal_layers.h"
#include "rtc_base/constructormagic.h"
namespace webrtc {
// Ratio allocation between temporal streams:
// Values as required for the VP8 codec (accumulating).
static const float
kLayerRateAllocation[kMaxSimulcastStreams][kMaxTemporalStreams] = {
{1.0f, 1.0f, 1.0f, 1.0f}, // 1 layer
{0.6f, 1.0f, 1.0f, 1.0f}, // 2 layers {60%, 40%}
{0.4f, 0.6f, 1.0f, 1.0f}, // 3 layers {40%, 20%, 40%}
{0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%}
};
class SimulcastRateAllocator : public VideoBitrateAllocator {
public:
explicit SimulcastRateAllocator(const VideoCodec& codec);
@ -67,4 +58,4 @@ class SimulcastRateAllocator : public VideoBitrateAllocator {
} // namespace webrtc
#endif // MODULES_VIDEO_CODING_UTILITY_SIMULCAST_RATE_ALLOCATOR_H_
#endif // MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_

View File

@ -8,7 +8,7 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_coding/utility/simulcast_test_fixture_impl.h"
#include "modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.h"
#include <algorithm>
#include <map>
@ -18,6 +18,8 @@
#include "api/video_codecs/sdp_video_format.h"
#include "common_video/include/video_frame.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp8/temporal_layers.h"
#include "modules/video_coding/include/video_coding_defines.h"
#include "rtc_base/checks.h"
#include "test/gtest.h"
@ -42,7 +44,6 @@ const int kMaxBitrates[kNumberOfSimulcastStreams] = {150, 600, 1200};
const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600};
const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000};
const int kDefaultTemporalLayerProfile[3] = {3, 3, 3};
const int kNoTemporalLayerProfile[3] = {0, 0, 0};
template <typename T>
void SetExpectedValues3(T value0, T value1, T value2, T* expected_values) {
@ -60,15 +61,15 @@ enum PlaneType {
} // namespace
class SimulcastTestFixtureImpl::TestEncodedImageCallback
class SimulcastTestFixtureImpl::Vp8TestEncodedImageCallback
: public EncodedImageCallback {
public:
TestEncodedImageCallback() {
Vp8TestEncodedImageCallback() : picture_id_(-1) {
memset(temporal_layer_, -1, sizeof(temporal_layer_));
memset(layer_sync_, false, sizeof(layer_sync_));
}
~TestEncodedImageCallback() {
~Vp8TestEncodedImageCallback() {
delete[] encoded_key_frame_._buffer;
delete[] encoded_frame_._buffer;
}
@ -76,15 +77,8 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback
virtual Result OnEncodedImage(const EncodedImage& encoded_image,
const CodecSpecificInfo* codec_specific_info,
const RTPFragmentationHeader* fragmentation) {
uint16_t simulcast_idx = 0;
bool is_vp8 = (codec_specific_info->codecType == kVideoCodecVP8);
if (is_vp8) {
simulcast_idx = codec_specific_info->codecSpecific.VP8.simulcastIdx;
} else {
simulcast_idx = codec_specific_info->codecSpecific.H264.simulcast_idx;
}
// Only store the base layer.
if (simulcast_idx) {
if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) {
if (encoded_image._frameType == kVideoFrameKey) {
delete[] encoded_key_frame_._buffer;
encoded_key_frame_._buffer = new uint8_t[encoded_image._size];
@ -103,18 +97,17 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback
encoded_image._length);
}
}
if (is_vp8) {
layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
codec_specific_info->codecSpecific.VP8.layerSync;
temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
codec_specific_info->codecSpecific.VP8.temporalIdx;
}
layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
codec_specific_info->codecSpecific.VP8.layerSync;
temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] =
codec_specific_info->codecSpecific.VP8.temporalIdx;
return Result(Result::OK, encoded_image._timeStamp);
}
// This method only makes sense for VP8.
void GetLastEncodedFrameInfo(int* temporal_layer,
void GetLastEncodedFrameInfo(int* picture_id,
int* temporal_layer,
bool* layer_sync,
int stream) {
*picture_id = picture_id_;
*temporal_layer = temporal_layer_[stream];
*layer_sync = layer_sync_[stream];
}
@ -128,14 +121,15 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback
private:
EncodedImage encoded_key_frame_;
EncodedImage encoded_frame_;
int picture_id_;
int temporal_layer_[kNumberOfSimulcastStreams];
bool layer_sync_[kNumberOfSimulcastStreams];
};
class SimulcastTestFixtureImpl::TestDecodedImageCallback
class SimulcastTestFixtureImpl::Vp8TestDecodedImageCallback
: public DecodedImageCallback {
public:
TestDecodedImageCallback() : decoded_frames_(0) {}
Vp8TestDecodedImageCallback() : decoded_frames_(0) {}
int32_t Decoded(VideoFrame& decoded_image) override {
rtc::scoped_refptr<I420BufferInterface> i420_buffer =
decoded_image.video_frame_buffer()->ToI420();
@ -204,9 +198,7 @@ void ConfigureStream(int width,
stream->maxBitrate = max_bitrate;
stream->minBitrate = min_bitrate;
stream->targetBitrate = target_bitrate;
if (num_temporal_layers >= 0) {
stream->numberOfTemporalLayers = num_temporal_layers;
}
stream->numberOfTemporalLayers = num_temporal_layers;
stream->qpMax = 45;
stream->active = true;
}
@ -215,11 +207,10 @@ void ConfigureStream(int width,
void SimulcastTestFixtureImpl::DefaultSettings(
VideoCodec* settings,
const int* temporal_layer_profile,
VideoCodecType codec_type) {
const int* temporal_layer_profile) {
RTC_CHECK(settings);
memset(settings, 0, sizeof(VideoCodec));
settings->codecType = codec_type;
settings->codecType = kVideoCodecVP8;
// 96 to 127 dynamic payload types for video codecs
settings->plType = 120;
settings->startBitrate = 300;
@ -242,26 +233,18 @@ void SimulcastTestFixtureImpl::DefaultSettings(
ConfigureStream(kDefaultWidth, kDefaultHeight, kMaxBitrates[2],
kMinBitrates[2], kTargetBitrates[2],
&settings->simulcastStream[2], temporal_layer_profile[2]);
if (codec_type == kVideoCodecVP8) {
settings->VP8()->denoisingOn = true;
settings->VP8()->automaticResizeOn = false;
settings->VP8()->frameDroppingOn = true;
settings->VP8()->keyFrameInterval = 3000;
} else {
settings->H264()->frameDroppingOn = true;
settings->H264()->keyFrameInterval = 3000;
}
settings->VP8()->denoisingOn = true;
settings->VP8()->automaticResizeOn = false;
settings->VP8()->frameDroppingOn = true;
settings->VP8()->keyFrameInterval = 3000;
}
SimulcastTestFixtureImpl::SimulcastTestFixtureImpl(
std::unique_ptr<VideoEncoderFactory> encoder_factory,
std::unique_ptr<VideoDecoderFactory> decoder_factory,
SdpVideoFormat video_format)
: codec_type_(PayloadStringToCodecType(video_format.name)) {
encoder_ = encoder_factory->CreateVideoEncoder(video_format);
decoder_ = decoder_factory->CreateVideoDecoder(video_format);
SetUpCodec(codec_type_ == kVideoCodecVP8 ? kDefaultTemporalLayerProfile
: kNoTemporalLayerProfile);
std::unique_ptr<VideoDecoderFactory> decoder_factory) {
encoder_ = encoder_factory->CreateVideoEncoder(SdpVideoFormat("VP8"));
decoder_ = decoder_factory->CreateVideoDecoder(SdpVideoFormat("VP8"));
SetUpCodec(kDefaultTemporalLayerProfile);
}
SimulcastTestFixtureImpl::~SimulcastTestFixtureImpl() {
@ -272,7 +255,7 @@ SimulcastTestFixtureImpl::~SimulcastTestFixtureImpl() {
void SimulcastTestFixtureImpl::SetUpCodec(const int* temporal_layer_profile) {
encoder_->RegisterEncodeCompleteCallback(&encoder_callback_);
decoder_->RegisterDecodeCompleteCallback(&decoder_callback_);
DefaultSettings(&settings_, temporal_layer_profile, codec_type_);
DefaultSettings(&settings_, temporal_layer_profile);
SetUpRateAllocator();
EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200));
EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1));
@ -378,14 +361,16 @@ void SimulcastTestFixtureImpl::ExpectStreams(FrameType frame_type,
}
void SimulcastTestFixtureImpl::VerifyTemporalIdxAndSyncForAllSpatialLayers(
TestEncodedImageCallback* encoder_callback,
Vp8TestEncodedImageCallback* encoder_callback,
const int* expected_temporal_idx,
const bool* expected_layer_sync,
int num_spatial_layers) {
int picture_id = -1;
int temporal_layer = -1;
bool layer_sync = false;
for (int i = 0; i < num_spatial_layers; i++) {
encoder_callback->GetLastEncodedFrameInfo(&temporal_layer, &layer_sync, i);
encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer,
&layer_sync, i);
EXPECT_EQ(expected_temporal_idx[i], temporal_layer);
EXPECT_EQ(expected_layer_sync[i], layer_sync);
}
@ -573,15 +558,9 @@ void SimulcastTestFixtureImpl::TestActiveStreams() {
}
void SimulcastTestFixtureImpl::SwitchingToOneStream(int width, int height) {
const int* temporal_layer_profile = nullptr;
// Disable all streams except the last and set the bitrate of the last to
// 100 kbps. This verifies the way GTP switches to screenshare mode.
if (codec_type_ == kVideoCodecVP8) {
settings_.VP8()->numberOfTemporalLayers = 1;
temporal_layer_profile = kDefaultTemporalLayerProfile;
} else {
temporal_layer_profile = kNoTemporalLayerProfile;
}
settings_.VP8()->numberOfTemporalLayers = 1;
settings_.maxBitrate = 100;
settings_.startBitrate = 100;
settings_.width = width;
@ -626,7 +605,7 @@ void SimulcastTestFixtureImpl::SwitchingToOneStream(int width, int height) {
EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types));
// Switch back.
DefaultSettings(&settings_, temporal_layer_profile, codec_type_);
DefaultSettings(&settings_, kDefaultTemporalLayerProfile);
// Start at the lowest bitrate for enabling base stream.
settings_.startBitrate = kMinBitrates[0];
SetUpRateAllocator();
@ -657,8 +636,7 @@ void SimulcastTestFixtureImpl::TestSwitchingToOneSmallStream() {
// 3-3-3 pattern: 3 temporal layers for all spatial streams, so same
// temporal_layer id and layer_sync is expected for all streams.
void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() {
EXPECT_EQ(codec_type_, kVideoCodecVP8);
TestEncodedImageCallback encoder_callback;
Vp8TestEncodedImageCallback encoder_callback;
encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
SetRates(kMaxBitrates[2], 30); // To get all three streams.
@ -725,10 +703,9 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() {
// Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255.
// TODO(marpan): Although this seems safe for now, we should fix this.
void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() {
EXPECT_EQ(codec_type_, kVideoCodecVP8);
int temporal_layer_profile[3] = {3, 2, 1};
SetUpCodec(temporal_layer_profile);
TestEncodedImageCallback encoder_callback;
Vp8TestEncodedImageCallback encoder_callback;
encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
SetRates(kMaxBitrates[2], 30); // To get all three streams.
@ -784,8 +761,8 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() {
}
void SimulcastTestFixtureImpl::TestStrideEncodeDecode() {
TestEncodedImageCallback encoder_callback;
TestDecodedImageCallback decoder_callback;
Vp8TestEncodedImageCallback encoder_callback;
Vp8TestDecodedImageCallback decoder_callback;
encoder_->RegisterEncodeCompleteCallback(&encoder_callback);
decoder_->RegisterDecodeCompleteCallback(&decoder_callback);

View File

@ -8,8 +8,8 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_
#define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_
#ifndef MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_FIXTURE_IMPL_H_
#define MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_FIXTURE_IMPL_H_
#include <memory>
#include <vector>
@ -20,7 +20,7 @@
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "common_types.h" // NOLINT(build/include)
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include "modules/video_coding/include/mock/mock_video_codec_interface.h"
namespace webrtc {
@ -30,8 +30,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture {
public:
SimulcastTestFixtureImpl(
std::unique_ptr<VideoEncoderFactory> encoder_factory,
std::unique_ptr<VideoDecoderFactory> decoder_factory,
SdpVideoFormat video_format);
std::unique_ptr<VideoDecoderFactory> decoder_factory);
~SimulcastTestFixtureImpl() final;
// Implements SimulcastTestFixture.
@ -52,12 +51,11 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture {
void TestStrideEncodeDecode() override;
static void DefaultSettings(VideoCodec* settings,
const int* temporal_layer_profile,
VideoCodecType codec_type);
const int* temporal_layer_profile);
private:
class TestEncodedImageCallback;
class TestDecodedImageCallback;
class Vp8TestEncodedImageCallback;
class Vp8TestDecodedImageCallback;
void SetUpCodec(const int* temporal_layer_profile);
void SetUpRateAllocator();
@ -68,7 +66,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture {
const std::vector<bool> expected_streams_active);
void ExpectStreams(FrameType frame_type, int expected_video_streams);
void VerifyTemporalIdxAndSyncForAllSpatialLayers(
TestEncodedImageCallback* encoder_callback,
Vp8TestEncodedImageCallback* encoder_callback,
const int* expected_temporal_idx,
const bool* expected_layer_sync,
int num_spatial_layers);
@ -82,10 +80,9 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture {
rtc::scoped_refptr<I420Buffer> input_buffer_;
std::unique_ptr<VideoFrame> input_frame_;
std::unique_ptr<SimulcastRateAllocator> rate_allocator_;
VideoCodecType codec_type_;
};
} // namespace test
} // namespace webrtc
#endif // MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_
#endif // MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_FIXTURE_IMPL_H_

View File

@ -16,6 +16,7 @@
#include "modules/include/module_common_types.h"
#include "modules/video_coding/codecs/vp8/default_temporal_layers.h"
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "modules/video_coding/codecs/vp8/screenshare_layers.h"
#include "modules/video_coding/include/video_codec_interface.h"
#include "rtc_base/checks.h"

View File

@ -70,7 +70,6 @@ struct CodecSpecificInfoGeneric {
struct CodecSpecificInfoH264 {
H264PacketizationMode packetization_mode;
uint8_t simulcast_idx;
};
union CodecSpecificInfoUnion {
@ -84,9 +83,7 @@ union CodecSpecificInfoUnion {
// must be fitted with a copy-constructor. This is because it is copied
// in the copy-constructor of VCMEncodedFrame.
struct CodecSpecificInfo {
CodecSpecificInfo() : codecType(kVideoCodecUnknown), codec_name(nullptr) {
memset(&codecSpecific, 0, sizeof(codecSpecific));
}
CodecSpecificInfo() : codecType(kVideoCodecUnknown), codec_name(nullptr) {}
VideoCodecType codecType;
const char* codec_name;
CodecSpecificInfoUnion codecSpecific;

View File

@ -18,10 +18,10 @@
#include <limits>
#include "modules/include/module_common_types.h"
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "modules/video_coding/fec_rate_table.h"
#include "modules/video_coding/include/video_coding_defines.h"
#include "modules/video_coding/nack_fec_tables.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
namespace webrtc {
// Max value of loss rates in off-line model
@ -400,7 +400,8 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) {
int VCMFecMethod::BitsPerFrame(const VCMProtectionParameters* parameters) {
// When temporal layers are available FEC will only be applied on the base
// layer.
const float bitRateRatio = kLayerRateAllocation[parameters->numLayers - 1][0];
const float bitRateRatio =
kVp8LayerRateAlloction[parameters->numLayers - 1][0];
float frameRateRatio = powf(1 / 2.0, parameters->numLayers - 1);
float bitRate = parameters->bitRate * bitRateRatio;
float frameRate = parameters->frameRate * frameRateRatio;

View File

@ -8,15 +8,13 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include <limits>
#include <memory>
#include <utility>
#include <vector>
#include "modules/video_coding/codecs/vp8/temporal_layers.h"
#include "test/gmock.h"
#include "test/gtest.h"

View File

@ -1,65 +0,0 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_coding/utility/simulcast_utility.h"
namespace webrtc {
uint32_t SimulcastUtility::SumStreamMaxBitrate(int streams,
const VideoCodec& codec) {
uint32_t bitrate_sum = 0;
for (int i = 0; i < streams; ++i) {
bitrate_sum += codec.simulcastStream[i].maxBitrate;
}
return bitrate_sum;
}
int SimulcastUtility::NumberOfSimulcastStreams(const VideoCodec& codec) {
int streams =
codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
if (simulcast_max_bitrate == 0) {
streams = 1;
}
return streams;
}
bool SimulcastUtility::ValidSimulcastResolutions(const VideoCodec& codec,
int num_streams) {
if (codec.width != codec.simulcastStream[num_streams - 1].width ||
codec.height != codec.simulcastStream[num_streams - 1].height) {
return false;
}
for (int i = 0; i < num_streams; ++i) {
if (codec.width * codec.simulcastStream[i].height !=
codec.height * codec.simulcastStream[i].width) {
return false;
}
}
for (int i = 1; i < num_streams; ++i) {
if (codec.simulcastStream[i].width !=
codec.simulcastStream[i - 1].width * 2) {
return false;
}
}
return true;
}
bool SimulcastUtility::ValidSimulcastTemporalLayers(const VideoCodec& codec,
int num_streams) {
for (int i = 0; i < num_streams - 1; ++i) {
if (codec.simulcastStream[i].numberOfTemporalLayers !=
codec.simulcastStream[i + 1].numberOfTemporalLayers)
return false;
}
return true;
}
} // namespace webrtc

View File

@ -1,30 +0,0 @@
/*
* Copyright (c) 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_UTILITY_H_
#define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_UTILITY_H_
#include "api/video_codecs/video_encoder.h"
namespace webrtc {
class SimulcastUtility {
public:
static uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec);
static int NumberOfSimulcastStreams(const VideoCodec& codec);
static bool ValidSimulcastResolutions(const VideoCodec& codec,
int num_streams);
static bool ValidSimulcastTemporalLayers(const VideoCodec& codec,
int num_streams);
};
} // namespace webrtc
#endif // MODULES_VIDEO_CODING_UTILITY_SIMULCAST_UTILITY_H_

View File

@ -13,13 +13,14 @@
#include "api/video_codecs/video_encoder.h"
#include "common_types.h" // NOLINT(build/include)
#include "common_video/include/video_bitrate_allocator.h"
#include "modules/video_coding/codecs/vp8/screenshare_layers.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include "modules/video_coding/codecs/vp8/temporal_layers.h"
#include "modules/video_coding/codecs/vp9/svc_config.h"
#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h"
#include "modules/video_coding/include/video_coding_defines.h"
#include "modules/video_coding/utility/default_video_bitrate_allocator.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "rtc_base/logging.h"
#include "rtc_base/system/fallthrough.h"
#include "system_wrappers/include/clock.h"
namespace webrtc {
@ -52,8 +53,7 @@ VideoCodecInitializer::CreateBitrateAllocator(const VideoCodec& codec) {
switch (codec.codecType) {
case kVideoCodecVP8:
RTC_FALLTHROUGH();
case kVideoCodecH264:
// Set up default VP8 temporal layer factory, if not provided.
rate_allocator.reset(new SimulcastRateAllocator(codec));
break;
case kVideoCodecVP9:

View File

@ -16,6 +16,7 @@
#include "common_types.h" // NOLINT(build/include)
#include "common_video/include/video_bitrate_allocator.h"
#include "common_video/libyuv/include/webrtc_libyuv.h"
#include "modules/video_coding/codecs/vp8/temporal_layers.h"
#include "modules/video_coding/encoded_frame.h"
#include "modules/video_coding/include/video_codec_initializer.h"
#include "modules/video_coding/include/video_codec_interface.h"
@ -104,16 +105,17 @@ class VideoCodingModuleImpl : public VideoCodingModule {
int32_t RegisterSendCodec(const VideoCodec* sendCodec,
uint32_t numberOfCores,
uint32_t maxPayloadSize) override {
if (sendCodec != nullptr && ((sendCodec->codecType == kVideoCodecVP8) ||
(sendCodec->codecType == kVideoCodecH264))) {
// Set up a rate allocator and temporal layers factory for this codec
if (sendCodec != nullptr && sendCodec->codecType == kVideoCodecVP8) {
// Set up a rate allocator and temporal layers factory for this vp8
// instance. The codec impl will have a raw pointer to the TL factory,
// and will call it when initializing. Since this can happen
// asynchronously keep the instance alive until destruction or until a
// new send codec is registered.
VideoCodec codec = *sendCodec;
rate_allocator_ = VideoCodecInitializer::CreateBitrateAllocator(codec);
return sender_.RegisterSendCodec(&codec, numberOfCores, maxPayloadSize);
VideoCodec vp8_codec = *sendCodec;
rate_allocator_ =
VideoCodecInitializer::CreateBitrateAllocator(vp8_codec);
return sender_.RegisterSendCodec(&vp8_codec, numberOfCores,
maxPayloadSize);
}
return sender_.RegisterSendCodec(sendCodec, numberOfCores, maxPayloadSize);
}

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <algorithm> // std::max
#include "common_types.h" // NOLINT(build/include)
@ -137,8 +136,7 @@ void VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder,
}
return;
}
_codecDataBase.RegisterExternalEncoder(externalEncoder,
internalSource);
_codecDataBase.RegisterExternalEncoder(externalEncoder, internalSource);
}
EncoderParameters VideoSender::UpdateEncoderParameters(
@ -291,8 +289,7 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame,
RTC_LOG(LS_ERROR) << "Frame conversion failed, dropping frame.";
return VCM_PARAMETER_ERROR;
}
converted_frame = VideoFrame(converted_buffer,
converted_frame.timestamp(),
converted_frame = VideoFrame(converted_buffer, converted_frame.timestamp(),
converted_frame.render_time_ms(),
converted_frame.rotation());
}

View File

@ -13,12 +13,13 @@
#include "api/video/i420_buffer.h"
#include "modules/video_coding/codecs/vp8/include/vp8.h"
#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
#include "modules/video_coding/codecs/vp8/temporal_layers.h"
#include "modules/video_coding/include/mock/mock_vcm_callbacks.h"
#include "modules/video_coding/include/mock/mock_video_codec_interface.h"
#include "modules/video_coding/include/video_coding.h"
#include "modules/video_coding/utility/default_video_bitrate_allocator.h"
#include "modules/video_coding/utility/simulcast_rate_allocator.h"
#include "modules/video_coding/video_coding_impl.h"
#include "system_wrappers/include/clock.h"
#include "test/frame_generator.h"
@ -471,9 +472,9 @@ class TestVideoSenderWithVp8 : public TestVideoSender {
#define MAYBE_FixedTemporalLayersStrategy FixedTemporalLayersStrategy
#endif
TEST_F(TestVideoSenderWithVp8, MAYBE_FixedTemporalLayersStrategy) {
const int low_b = codec_bitrate_kbps_ * kLayerRateAllocation[2][0];
const int mid_b = codec_bitrate_kbps_ * kLayerRateAllocation[2][1];
const int high_b = codec_bitrate_kbps_ * kLayerRateAllocation[2][2];
const int low_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][0];
const int mid_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][1];
const int high_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][2];
{
Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}};
EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected));