Original description:
Add proper lifetime of encoder-specific settings.

Permits passing VideoEncoderConfig between threads and not worry about
the lifetime of an underlying void pointer. Also adds type safety to
unpacking of codec-specific settings.

These settings are not yet propagating to VideoEncoder interfaces, but
the aim is to get rid of webrtc::VideoCodec for VideoEncoder.

BUG=webrtc:3424
R=perkj@webrtc.org, pbos@webrtc.org
TBR=mflodman@webrtc.org

Review-Url: https://codereview.webrtc.org/2347843002
Cr-Commit-Position: refs/heads/master@{#14396}
This commit is contained in:
kthelgason
2016-09-27 03:52:02 -07:00
committed by Commit bot
parent 5f8ebaeffd
commit 29a44e351e
10 changed files with 196 additions and 75 deletions

View File

@ -12,6 +12,8 @@
#include <sstream>
#include <string>
#include "webrtc/base/checks.h"
namespace webrtc {
std::string NackConfig::ToString() const {
std::stringstream ss;
@ -113,7 +115,7 @@ std::string VideoStream::ToString() const {
VideoEncoderConfig::VideoEncoderConfig()
: content_type(ContentType::kRealtimeVideo),
encoder_specific_settings(NULL),
encoder_specific_settings(nullptr),
min_transmit_bitrate_bps(0),
expect_encode_from_texture(false) {}
@ -146,4 +148,59 @@ std::string VideoEncoderConfig::ToString() const {
return ss.str();
}
void VideoEncoderConfig::EncoderSpecificSettings::FillEncoderSpecificSettings(
VideoCodec* codec) const {
if (codec->codecType == kVideoCodecH264) {
FillVideoCodecH264(&codec->codecSpecific.H264);
} else if (codec->codecType == kVideoCodecVP8) {
FillVideoCodecVp8(&codec->codecSpecific.VP8);
} else if (codec->codecType == kVideoCodecVP9) {
FillVideoCodecVp9(&codec->codecSpecific.VP9);
} else {
RTC_NOTREACHED() << "Encoder specifics set/used for unknown codec type.";
}
}
void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecH264(
VideoCodecH264* h264_settings) const {
RTC_NOTREACHED();
}
void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecVp8(
VideoCodecVP8* vp8_settings) const {
RTC_NOTREACHED();
}
void VideoEncoderConfig::EncoderSpecificSettings::FillVideoCodecVp9(
VideoCodecVP9* vp9_settings) const {
RTC_NOTREACHED();
}
VideoEncoderConfig::H264EncoderSpecificSettings::H264EncoderSpecificSettings(
const VideoCodecH264& specifics)
: specifics_(specifics) {}
void VideoEncoderConfig::H264EncoderSpecificSettings::FillVideoCodecH264(
VideoCodecH264* h264_settings) const {
*h264_settings = specifics_;
}
VideoEncoderConfig::Vp8EncoderSpecificSettings::Vp8EncoderSpecificSettings(
const VideoCodecVP8& specifics)
: specifics_(specifics) {}
void VideoEncoderConfig::Vp8EncoderSpecificSettings::FillVideoCodecVp8(
VideoCodecVP8* vp8_settings) const {
*vp8_settings = specifics_;
}
VideoEncoderConfig::Vp9EncoderSpecificSettings::Vp9EncoderSpecificSettings(
const VideoCodecVP9& specifics)
: specifics_(specifics) {}
void VideoEncoderConfig::Vp9EncoderSpecificSettings::FillVideoCodecVp9(
VideoCodecVP9* vp9_settings) const {
*vp9_settings = specifics_;
}
} // namespace webrtc

View File

@ -17,6 +17,8 @@
#include <vector>
#include "webrtc/base/optional.h"
#include "webrtc/base/refcount.h"
#include "webrtc/base/scoped_ref_ptr.h"
#include "webrtc/common_types.h"
#include "webrtc/typedefs.h"
@ -118,13 +120,60 @@ struct VideoStream {
// bitrate threshold of 100k and an estimate of 105k does not imply that we
// get 100k in one temporal layer and 5k in the other, just that the bitrate
// in the first temporal layer should not exceed 100k.
// TODO(pbos): Apart from a special case for two-layer screencast these
// TODO(kthelgason): Apart from a special case for two-layer screencast these
// thresholds are not propagated to the VideoEncoder. To be implemented.
std::vector<int> temporal_layer_thresholds_bps;
};
struct VideoEncoderConfig {
public:
// These are reference counted to permit copying VideoEncoderConfig and be
// kept alive until all encoder_specific_settings go out of scope.
// TODO(kthelgason): Consider removing the need for copying VideoEncoderConfig
// and use rtc::Optional for encoder_specific_settings instead.
class EncoderSpecificSettings : public rtc::RefCountInterface {
public:
// TODO(pbos): Remove FillEncoderSpecificSettings as soon as VideoCodec is
// not in use and encoder implementations ask for codec-specific structs
// directly.
void FillEncoderSpecificSettings(VideoCodec* codec_struct) const;
virtual void FillVideoCodecVp8(VideoCodecVP8* vp8_settings) const;
virtual void FillVideoCodecVp9(VideoCodecVP9* vp9_settings) const;
virtual void FillVideoCodecH264(VideoCodecH264* h264_settings) const;
private:
virtual ~EncoderSpecificSettings() {}
friend struct VideoEncoderConfig;
};
class H264EncoderSpecificSettings : public EncoderSpecificSettings {
public:
explicit H264EncoderSpecificSettings(const VideoCodecH264& specifics);
virtual void FillVideoCodecH264(
VideoCodecH264* h264_settings) const override;
private:
VideoCodecH264 specifics_;
};
class Vp8EncoderSpecificSettings : public EncoderSpecificSettings {
public:
explicit Vp8EncoderSpecificSettings(const VideoCodecVP8& specifics);
virtual void FillVideoCodecVp8(VideoCodecVP8* vp8_settings) const override;
private:
VideoCodecVP8 specifics_;
};
class Vp9EncoderSpecificSettings : public EncoderSpecificSettings {
public:
explicit Vp9EncoderSpecificSettings(const VideoCodecVP9& specifics);
virtual void FillVideoCodecVp9(VideoCodecVP9* vp9_settings) const override;
private:
VideoCodecVP9 specifics_;
};
enum class ContentType {
kRealtimeVideo,
kScreen,
@ -144,7 +193,7 @@ struct VideoEncoderConfig {
std::vector<VideoStream> streams;
std::vector<SpatialLayer> spatial_layers;
ContentType content_type;
void* encoder_specific_settings;
rtc::scoped_refptr<const EncoderSpecificSettings> encoder_specific_settings;
// Padding will be used up to this bitrate regardless of the bitrate produced
// by the encoder. Padding above what's actually produced by the encoder helps

View File

@ -186,15 +186,13 @@ void FakeVideoSendStream::ReconfigureVideoEncoder(
webrtc::VideoEncoderConfig config) {
if (config.encoder_specific_settings != NULL) {
if (config_.encoder_settings.payload_name == "VP8") {
vpx_settings_.vp8 = *reinterpret_cast<const webrtc::VideoCodecVP8*>(
config.encoder_specific_settings);
config.encoder_specific_settings->FillVideoCodecVp8(&vpx_settings_.vp8);
if (!config.streams.empty()) {
vpx_settings_.vp8.numberOfTemporalLayers = static_cast<unsigned char>(
config.streams.back().temporal_layer_thresholds_bps.size() + 1);
}
} else if (config_.encoder_settings.payload_name == "VP9") {
vpx_settings_.vp9 = *reinterpret_cast<const webrtc::VideoCodecVP9*>(
config.encoder_specific_settings);
config.encoder_specific_settings->FillVideoCodecVp9(&vpx_settings_.vp9);
if (!config.streams.empty()) {
vpx_settings_.vp9.numberOfTemporalLayers = static_cast<unsigned char>(
config.streams.back().temporal_layer_thresholds_bps.size() + 1);
@ -204,8 +202,8 @@ void FakeVideoSendStream::ReconfigureVideoEncoder(
<< config_.encoder_settings.payload_name;
}
}
encoder_config_ = std::move(config);
codec_settings_set_ = config.encoder_specific_settings != NULL;
encoder_config_ = std::move(config);
++num_encoder_reconfigurations_;
}

View File

@ -450,7 +450,8 @@ WebRtcVideoChannel2::WebRtcVideoSendStream::CreateVideoStreams(
return streams;
}
void* WebRtcVideoChannel2::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
WebRtcVideoChannel2::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
const VideoCodec& codec) {
bool is_screencast = parameters_.options.is_screencast.value_or(false);
// No automatic resizing when using simulcast or screencast.
@ -468,36 +469,39 @@ void* WebRtcVideoChannel2::WebRtcVideoSendStream::ConfigureVideoEncoderSettings(
}
if (CodecNamesEq(codec.name, kH264CodecName)) {
encoder_settings_.h264 = webrtc::VideoEncoder::GetDefaultH264Settings();
encoder_settings_.h264.frameDroppingOn = frame_dropping;
return &encoder_settings_.h264;
webrtc::VideoCodecH264 h264_settings =
webrtc::VideoEncoder::GetDefaultH264Settings();
h264_settings.frameDroppingOn = frame_dropping;
return new rtc::RefCountedObject<
webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings);
}
if (CodecNamesEq(codec.name, kVp8CodecName)) {
encoder_settings_.vp8 = webrtc::VideoEncoder::GetDefaultVp8Settings();
encoder_settings_.vp8.automaticResizeOn = automatic_resize;
webrtc::VideoCodecVP8 vp8_settings =
webrtc::VideoEncoder::GetDefaultVp8Settings();
vp8_settings.automaticResizeOn = automatic_resize;
// VP8 denoising is enabled by default.
encoder_settings_.vp8.denoisingOn =
codec_default_denoising ? true : denoising;
encoder_settings_.vp8.frameDroppingOn = frame_dropping;
return &encoder_settings_.vp8;
vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
vp8_settings.frameDroppingOn = frame_dropping;
return new rtc::RefCountedObject<
webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
}
if (CodecNamesEq(codec.name, kVp9CodecName)) {
encoder_settings_.vp9 = webrtc::VideoEncoder::GetDefaultVp9Settings();
webrtc::VideoCodecVP9 vp9_settings =
webrtc::VideoEncoder::GetDefaultVp9Settings();
if (is_screencast) {
// TODO(asapersson): Set to 2 for now since there is a DCHECK in
// VideoSendStream::ReconfigureVideoEncoder.
encoder_settings_.vp9.numberOfSpatialLayers = 2;
vp9_settings.numberOfSpatialLayers = 2;
} else {
encoder_settings_.vp9.numberOfSpatialLayers =
GetDefaultVp9SpatialLayers();
vp9_settings.numberOfSpatialLayers = GetDefaultVp9SpatialLayers();
}
// VP9 denoising is disabled by default.
encoder_settings_.vp9.denoisingOn =
codec_default_denoising ? false : denoising;
encoder_settings_.vp9.frameDroppingOn = frame_dropping;
return &encoder_settings_.vp9;
vp9_settings.denoisingOn = codec_default_denoising ? false : denoising;
vp9_settings.frameDroppingOn = frame_dropping;
return new rtc::RefCountedObject<
webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
}
return NULL;
return nullptr;
}
DefaultUnsignalledSsrcHandler::DefaultUnsignalledSsrcHandler()

View File

@ -339,12 +339,6 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
bool is_texture;
};
union VideoEncoderSettings {
webrtc::VideoCodecH264 h264;
webrtc::VideoCodecVP8 vp8;
webrtc::VideoCodecVP9 vp9;
};
static std::vector<webrtc::VideoStream> CreateVideoStreams(
const VideoCodec& codec,
const VideoOptions& options,
@ -356,7 +350,8 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
int max_bitrate_bps,
size_t num_streams);
void* ConfigureVideoEncoderSettings(const VideoCodec& codec)
rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
ConfigureVideoEncoderSettings(const VideoCodec& codec)
EXCLUSIVE_LOCKS_REQUIRED(lock_);
AllocatedEncoder CreateVideoEncoder(const VideoCodec& codec)
@ -413,7 +408,6 @@ class WebRtcVideoChannel2 : public VideoMediaChannel, public webrtc::Transport {
// one stream per MediaChannel.
webrtc::RtpParameters rtp_parameters_ GUARDED_BY(lock_);
bool pending_encoder_reconfiguration_ GUARDED_BY(lock_);
VideoEncoderSettings encoder_settings_ GUARDED_BY(lock_);
AllocatedEncoder allocated_encoder_ GUARDED_BY(lock_);
VideoFrameInfo last_frame_info_ GUARDED_BY(lock_);

View File

@ -1317,6 +1317,7 @@ if (rtc_include_tests) {
testonly = true
deps = [
"../..:webrtc_common",
"../../base:rtc_base_approved",
"//testing/gtest",
]
sources = [

View File

@ -1032,21 +1032,23 @@ void VideoQualityTest::SetupScreenshare() {
// Fill out codec settings.
video_encoder_config_.content_type = VideoEncoderConfig::ContentType::kScreen;
if (params_.common.codec == "VP8") {
codec_settings_.VP8 = VideoEncoder::GetDefaultVp8Settings();
codec_settings_.VP8.denoisingOn = false;
codec_settings_.VP8.frameDroppingOn = false;
codec_settings_.VP8.numberOfTemporalLayers =
VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
vp8_settings.denoisingOn = false;
vp8_settings.frameDroppingOn = false;
vp8_settings.numberOfTemporalLayers =
static_cast<unsigned char>(params_.common.num_temporal_layers);
video_encoder_config_.encoder_specific_settings = &codec_settings_.VP8;
video_encoder_config_.encoder_specific_settings = new rtc::RefCountedObject<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
} else if (params_.common.codec == "VP9") {
codec_settings_.VP9 = VideoEncoder::GetDefaultVp9Settings();
codec_settings_.VP9.denoisingOn = false;
codec_settings_.VP9.frameDroppingOn = false;
codec_settings_.VP9.numberOfTemporalLayers =
VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
vp9_settings.denoisingOn = false;
vp9_settings.frameDroppingOn = false;
vp9_settings.numberOfTemporalLayers =
static_cast<unsigned char>(params_.common.num_temporal_layers);
video_encoder_config_.encoder_specific_settings = &codec_settings_.VP9;
codec_settings_.VP9.numberOfSpatialLayers =
vp9_settings.numberOfSpatialLayers =
static_cast<unsigned char>(params_.ss.num_spatial_layers);
video_encoder_config_.encoder_specific_settings = new rtc::RefCountedObject<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
}
// Setup frame generator.

View File

@ -112,7 +112,6 @@ class VideoQualityTest : public test::CallTest {
std::unique_ptr<test::TraceToStderr> trace_to_stderr_;
std::unique_ptr<test::FrameGenerator> frame_generator_;
std::unique_ptr<VideoEncoder> encoder_;
VideoCodecUnion codec_settings_;
Clock* const clock_;
Params params_;

View File

@ -1726,7 +1726,7 @@ class VideoCodecConfigObserver : public test::SendTest,
kVideoCodecConfigObserverNumberOfTemporalLayers - 1);
}
encoder_config->encoder_specific_settings = &encoder_settings_;
encoder_config->encoder_specific_settings = GetEncoderSpecificSettings();
encoder_config_ = encoder_config->Copy();
}
@ -1747,6 +1747,8 @@ class VideoCodecConfigObserver : public test::SendTest,
}
void VerifyCodecSpecifics(const VideoCodec& config) const;
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
GetEncoderSpecificSettings() const;
void PerformTest() override {
EXPECT_TRUE(
@ -1754,6 +1756,7 @@ class VideoCodecConfigObserver : public test::SendTest,
ASSERT_EQ(1u, num_initializations_) << "VideoEncoder not initialized.";
encoder_settings_.frameDroppingOn = true;
encoder_config_.encoder_specific_settings = GetEncoderSpecificSettings();
stream_->ReconfigureVideoEncoder(std::move(encoder_config_));
ASSERT_TRUE(
init_encode_event_.Wait(VideoSendStreamTest::kDefaultTimeoutMs));
@ -1784,6 +1787,14 @@ void VideoCodecConfigObserver<VideoCodecH264>::VerifyCodecSpecifics(
EXPECT_EQ(0, memcmp(&config.codecSpecific.H264, &encoder_settings_,
sizeof(encoder_settings_)));
}
template <>
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
VideoCodecConfigObserver<VideoCodecH264>::GetEncoderSpecificSettings() const {
return new rtc::RefCountedObject<
VideoEncoderConfig::H264EncoderSpecificSettings>(encoder_settings_);
}
template <>
void VideoCodecConfigObserver<VideoCodecVP8>::VerifyCodecSpecifics(
const VideoCodec& config) const {
@ -1805,6 +1816,14 @@ void VideoCodecConfigObserver<VideoCodecVP8>::VerifyCodecSpecifics(
EXPECT_EQ(0, memcmp(&config.codecSpecific.VP8, &encoder_settings,
sizeof(encoder_settings_)));
}
template <>
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
VideoCodecConfigObserver<VideoCodecVP8>::GetEncoderSpecificSettings() const {
return new rtc::RefCountedObject<
VideoEncoderConfig::Vp8EncoderSpecificSettings>(encoder_settings_);
}
template <>
void VideoCodecConfigObserver<VideoCodecVP9>::VerifyCodecSpecifics(
const VideoCodec& config) const {
@ -1827,6 +1846,13 @@ void VideoCodecConfigObserver<VideoCodecVP9>::VerifyCodecSpecifics(
sizeof(encoder_settings_)));
}
template <>
rtc::scoped_refptr<VideoEncoderConfig::EncoderSpecificSettings>
VideoCodecConfigObserver<VideoCodecVP9>::GetEncoderSpecificSettings() const {
return new rtc::RefCountedObject<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(encoder_settings_);
}
TEST_F(VideoSendStreamTest, EncoderSetupPropagatesVp8Config) {
VideoCodecConfigObserver<VideoCodecVP8> test(kVideoCodecVP8, "VP8");
RunBaseTest(&test);
@ -2190,11 +2216,12 @@ class Vp9HeaderObserver : public test::SendTest {
VideoSendStream::Config* send_config,
std::vector<VideoReceiveStream::Config>* receive_configs,
VideoEncoderConfig* encoder_config) override {
encoder_config->encoder_specific_settings = &vp9_settings_;
send_config->encoder_settings.encoder = vp9_encoder_.get();
send_config->encoder_settings.payload_name = "VP9";
send_config->encoder_settings.payload_type = kVp9PayloadType;
ModifyVideoConfigsHook(send_config, receive_configs, encoder_config);
encoder_config->encoder_specific_settings = new rtc::RefCountedObject<
VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings_);
EXPECT_EQ(1u, encoder_config->streams.size());
encoder_config->streams[0].temporal_layer_thresholds_bps.resize(
vp9_settings_.numberOfTemporalLayers - 1);

View File

@ -67,32 +67,27 @@ VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
break;
}
if (config.encoder_specific_settings)
config.encoder_specific_settings->FillEncoderSpecificSettings(&video_codec);
switch (video_codec.codecType) {
case kVideoCodecVP8: {
if (config.encoder_specific_settings) {
video_codec.codecSpecific.VP8 = *reinterpret_cast<const VideoCodecVP8*>(
config.encoder_specific_settings);
} else {
if (!config.encoder_specific_settings)
video_codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
}
video_codec.codecSpecific.VP8.numberOfTemporalLayers =
static_cast<unsigned char>(
streams.back().temporal_layer_thresholds_bps.size() + 1);
break;
}
case kVideoCodecVP9: {
if (config.encoder_specific_settings) {
video_codec.codecSpecific.VP9 = *reinterpret_cast<const VideoCodecVP9*>(
config.encoder_specific_settings);
if (video_codec.mode == kScreensharing) {
video_codec.codecSpecific.VP9.flexibleMode = true;
// For now VP9 screensharing use 1 temporal and 2 spatial layers.
RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfTemporalLayers,
1);
RTC_DCHECK_EQ(video_codec.codecSpecific.VP9.numberOfSpatialLayers, 2);
}
} else {
if (!config.encoder_specific_settings)
video_codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
if (video_codec.mode == kScreensharing &&
config.encoder_specific_settings) {
video_codec.codecSpecific.VP9.flexibleMode = true;
// For now VP9 screensharing use 1 temporal and 2 spatial layers.
RTC_DCHECK_EQ(1, video_codec.codecSpecific.VP9.numberOfTemporalLayers);
RTC_DCHECK_EQ(2, video_codec.codecSpecific.VP9.numberOfSpatialLayers);
}
video_codec.codecSpecific.VP9.numberOfTemporalLayers =
static_cast<unsigned char>(
@ -100,13 +95,8 @@ VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
break;
}
case kVideoCodecH264: {
if (config.encoder_specific_settings) {
video_codec.codecSpecific.H264 =
*reinterpret_cast<const VideoCodecH264*>(
config.encoder_specific_settings);
} else {
if (!config.encoder_specific_settings)
video_codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
}
break;
}
default:
@ -156,10 +146,10 @@ VideoCodec VideoEncoderConfigToVideoCodec(const VideoEncoderConfig& config,
sim_stream->numberOfTemporalLayers = static_cast<unsigned char>(
streams[i].temporal_layer_thresholds_bps.size() + 1);
video_codec.width =
std::max(video_codec.width, static_cast<uint16_t>(streams[i].width));
video_codec.height =
std::max(video_codec.height, static_cast<uint16_t>(streams[i].height));
video_codec.width = std::max(video_codec.width,
static_cast<uint16_t>(streams[i].width));
video_codec.height = std::max(
video_codec.height, static_cast<uint16_t>(streams[i].height));
video_codec.minBitrate =
std::min(static_cast<uint16_t>(video_codec.minBitrate),
static_cast<uint16_t>(streams[i].min_bitrate_bps / 1000));