Use enum class for VideoCodecMode and VideoCodecComplexity.

Bug: webrtc:7660
Change-Id: I6a8ef01f8abcc25c8efaf0af387408343a7c8ba3
Reviewed-on: https://webrtc-review.googlesource.com/81240
Commit-Queue: Niels Moller <nisse@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#23595}
This commit is contained in:
Niels Möller
2018-06-13 11:52:16 +02:00
committed by Commit Bot
parent 037b37a192
commit e3cf3d0496
19 changed files with 49 additions and 42 deletions

View File

@ -164,7 +164,7 @@ H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec)
max_frame_rate_(0.0f),
target_bps_(0),
max_bps_(0),
mode_(kRealtimeVideo),
mode_(VideoCodecMode::kRealtimeVideo),
frame_dropping_on_(false),
key_frame_interval_(0),
packetization_mode_(H264PacketizationMode::SingleNalUnit),
@ -373,7 +373,7 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms();
encoded_image_.capture_time_ms_ = input_frame.render_time_ms();
encoded_image_.rotation_ = input_frame.rotation();
encoded_image_.content_type_ = (mode_ == kScreensharing)
encoded_image_.content_type_ = (mode_ == VideoCodecMode::kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_image_.timing_.flags = VideoSendTiming::kInvalid;
@ -419,9 +419,9 @@ SEncParamExt H264EncoderImpl::CreateEncoderParams() const {
RTC_DCHECK(openh264_encoder_);
SEncParamExt encoder_params;
openh264_encoder_->GetDefaultParams(&encoder_params);
if (mode_ == kRealtimeVideo) {
if (mode_ == VideoCodecMode::kRealtimeVideo) {
encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
} else if (mode_ == kScreensharing) {
} else if (mode_ == VideoCodecMode::kScreensharing) {
encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
} else {
RTC_NOTREACHED();

View File

@ -87,7 +87,7 @@ std::string CodecSpecificToString(const VideoCodec& codec) {
rtc::SimpleStringBuilder ss(buf);
switch (codec.codecType) {
case kVideoCodecVP8:
ss << "complexity: " << codec.VP8().complexity;
ss << "complexity: " << static_cast<int>(codec.VP8().complexity);
ss << "\nnum_temporal_layers: "
<< static_cast<int>(codec.VP8().numberOfTemporalLayers);
ss << "\ndenoising: " << codec.VP8().denoisingOn;
@ -96,7 +96,7 @@ std::string CodecSpecificToString(const VideoCodec& codec) {
ss << "\nkey_frame_interval: " << codec.VP8().keyFrameInterval;
break;
case kVideoCodecVP9:
ss << "complexity: " << codec.VP9().complexity;
ss << "complexity: " << static_cast<int>(codec.VP9().complexity);
ss << "\nnum_temporal_layers: "
<< static_cast<int>(codec.VP9().numberOfTemporalLayers);
ss << "\nnum_spatial_layers: "

View File

@ -473,13 +473,13 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
// Allow the user to set the complexity for the base stream.
switch (inst->VP8().complexity) {
case kComplexityHigh:
case VideoCodecComplexity::kComplexityHigh:
cpu_speed_[0] = -5;
break;
case kComplexityHigher:
case VideoCodecComplexity::kComplexityHigher:
cpu_speed_[0] = -4;
break;
case kComplexityMax:
case VideoCodecComplexity::kComplexityMax:
cpu_speed_[0] = -3;
break;
default:
@ -661,7 +661,7 @@ int LibvpxVp8Encoder::InitAndSetControlSettings() {
for (size_t i = 0; i < encoders_.size(); ++i) {
// Allow more screen content to be detected as static.
vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD,
codec_.mode == kScreensharing ? 300 : 1);
codec_.mode == VideoCodecMode::kScreensharing ? 300 : 1);
vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]);
vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS,
static_cast<vp8e_token_partitions>(kTokenPartitions));
@ -670,7 +670,7 @@ int LibvpxVp8Encoder::InitAndSetControlSettings() {
// VP8E_SET_SCREEN_CONTENT_MODE 2 = screen content with more aggressive
// rate control (drop frames on large target bitrate overshoot)
vpx_codec_control(&(encoders_[i]), VP8E_SET_SCREEN_CONTENT_MODE,
codec_.mode == kScreensharing ? 2 : 0);
codec_.mode == VideoCodecMode::kScreensharing ? 2 : 0);
// Apply boost on golden frames (has only effect when resilience is off).
if (use_gf_boost_ && configurations_[0].g_error_resilient == 0) {
int gf_boost_percent;
@ -778,7 +778,8 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
if (send_key_frame) {
// Adapt the size of the key frame when in screenshare with 1 temporal
// layer.
if (encoders_.size() == 1 && codec_.mode == kScreensharing &&
if (encoders_.size() == 1 &&
codec_.mode == VideoCodecMode::kScreensharing &&
codec_.VP8()->numberOfTemporalLayers <= 1) {
const uint32_t forceKeyFrameIntraTh = 100;
vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
@ -925,8 +926,9 @@ int LibvpxVp8Encoder::GetEncodedPartitions(
input_image.render_time_ms();
encoded_images_[encoder_idx].rotation_ = input_image.rotation();
encoded_images_[encoder_idx].content_type_ =
(codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
(codec_.mode == VideoCodecMode::kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_images_[encoder_idx].timing_.flags = VideoSendTiming::kInvalid;
int qp = -1;
@ -947,7 +949,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(
encoded_images_[encoder_idx].qp_ = qp_128;
encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
&codec_specific, &frag_info);
} else if (codec_.mode == kScreensharing) {
} else if (codec_.mode == VideoCodecMode::kScreensharing) {
result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
}
}

View File

@ -134,7 +134,8 @@ void SimulcastRateAllocator::DistributeAllocationToTemporalLayers(
// Legacy temporal-layered only screenshare, or simulcast screenshare
// with legacy mode for simulcast stream 0.
const bool conference_screenshare_mode =
codec_.mode == kScreensharing && codec_.targetBitrate > 0 &&
codec_.mode == VideoCodecMode::kScreensharing &&
codec_.targetBitrate > 0 &&
((num_spatial_streams == 1 && num_temporal_streams == 2) || // Legacy.
(num_spatial_streams > 1 && simulcast_id == 0)); // Simulcast.
if (conference_screenshare_mode) {

View File

@ -40,7 +40,7 @@ uint8_t NumTemporalLayers(const VideoCodec& codec, int spatial_id) {
}
bool IsConferenceModeScreenshare(const VideoCodec& codec) {
if (codec.mode != kScreensharing) {
if (codec.mode != VideoCodecMode::kScreensharing) {
return false;
}
return NumTemporalLayers(codec, 0) == 2;

View File

@ -50,7 +50,7 @@ class TestVp8Impl : public VideoCodecUnitTest {
codec_settings->VP8()->denoisingOn = true;
codec_settings->VP8()->frameDroppingOn = false;
codec_settings->VP8()->automaticResizeOn = false;
codec_settings->VP8()->complexity = kComplexityNormal;
codec_settings->VP8()->complexity = VideoCodecComplexity::kComplexityNormal;
}
void EncodeAndWaitForFrame(const VideoFrame& input_frame,

View File

@ -38,7 +38,7 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocation(
VideoBitrateAllocation bitrate_allocation;
bitrate_allocation.SetBitrate(0, 0, total_bitrate_bps);
return bitrate_allocation;
} else if (codec_.mode == kRealtimeVideo) {
} else if (codec_.mode == VideoCodecMode::kRealtimeVideo) {
return GetAllocationNormalVideo(total_bitrate_bps);
} else {
return GetAllocationScreenSharing(total_bitrate_bps);

View File

@ -25,7 +25,8 @@ static VideoCodec Configure(size_t width,
codec.width = width;
codec.height = height;
codec.codecType = kVideoCodecVP9;
codec.mode = is_screen_sharing ? kScreensharing : kRealtimeVideo;
codec.mode = is_screen_sharing ? VideoCodecMode::kScreensharing
: VideoCodecMode::kRealtimeVideo;
std::vector<SpatialLayer> spatial_layers =
GetSvcConfig(width, height, num_spatial_layers, num_temporal_layers,

View File

@ -434,7 +434,7 @@ class TestVp9ImplFrameDropping : public TestVp9Impl {
// to reduce execution time.
codec_settings->width = 64;
codec_settings->height = 64;
codec_settings->mode = kScreensharing;
codec_settings->mode = VideoCodecMode::kScreensharing;
}
};

View File

@ -287,7 +287,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
// Init framerate controller.
output_framerate_.Reset();
if (codec_.mode == kScreensharing) {
if (codec_.mode == VideoCodecMode::kScreensharing) {
target_framerate_fps_ = kMaxScreenSharingFramerateFps;
} else {
target_framerate_fps_.reset();
@ -538,7 +538,7 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
inst->VP9().denoisingOn ? 1 : 0);
#endif
if (codec_.mode == kScreensharing) {
if (codec_.mode == VideoCodecMode::kScreensharing) {
// Adjust internal parameters to screen content.
vpx_codec_control(encoder_, VP9E_SET_TUNE_CONTENT, 1);
}
@ -580,7 +580,7 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
}
}
if (kScreensharing == codec_.mode && !force_key_frame_) {
if (VideoCodecMode::kScreensharing == codec_.mode && !force_key_frame_) {
if (DropFrame(input_image.timestamp())) {
return WEBRTC_VIDEO_CODEC_OK;
}
@ -881,7 +881,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
encoded_image_._timeStamp = input_image_->timestamp();
encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
encoded_image_.rotation_ = input_image_->rotation();
encoded_image_.content_type_ = (codec_.mode == kScreensharing)
encoded_image_.content_type_ = (codec_.mode == VideoCodecMode::kScreensharing)
? VideoContentType::SCREENSHARE
: VideoContentType::UNSPECIFIED;
encoded_image_._encodedHeight =

View File

@ -82,10 +82,10 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
switch (config.content_type) {
case VideoEncoderConfig::ContentType::kRealtimeVideo:
video_codec.mode = kRealtimeVideo;
video_codec.mode = VideoCodecMode::kRealtimeVideo;
break;
case VideoEncoderConfig::ContentType::kScreen:
video_codec.mode = kScreensharing;
video_codec.mode = VideoCodecMode::kScreensharing;
if (!streams.empty() && streams[0].num_temporal_layers == 2u) {
video_codec.targetBitrate = streams[0].target_bitrate_bps / 1000;
}
@ -203,10 +203,11 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
// Layering is set explicitly.
spatial_layers = config.spatial_layers;
} else {
spatial_layers = GetSvcConfig(video_codec.width, video_codec.height,
video_codec.VP9()->numberOfSpatialLayers,
video_codec.VP9()->numberOfTemporalLayers,
video_codec.mode == kScreensharing);
spatial_layers =
GetSvcConfig(video_codec.width, video_codec.height,
video_codec.VP9()->numberOfSpatialLayers,
video_codec.VP9()->numberOfTemporalLayers,
video_codec.mode == VideoCodecMode::kScreensharing);
const bool no_spatial_layering = (spatial_layers.size() == 1);
if (no_spatial_layering) {

View File

@ -92,7 +92,7 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
// If we have screensharing and we have layers, we disable frame dropper.
bool disable_frame_dropper =
numLayers > 1 && sendCodec->mode == kScreensharing;
numLayers > 1 && sendCodec->mode == VideoCodecMode::kScreensharing;
if (disable_frame_dropper) {
_mediaOpt.EnableFrameDropper(false);
} else if (frame_dropper_enabled_) {