Use enum class for VideoCodecMode and VideoCodecComplexity.
Bug: webrtc:7660 Change-Id: I6a8ef01f8abcc25c8efaf0af387408343a7c8ba3 Reviewed-on: https://webrtc-review.googlesource.com/81240 Commit-Queue: Niels Moller <nisse@webrtc.org> Reviewed-by: Karl Wiberg <kwiberg@webrtc.org> Cr-Commit-Position: refs/heads/master@{#23595}
This commit is contained in:
@ -79,7 +79,7 @@ VideoCodec::VideoCodec()
|
|||||||
numberOfSimulcastStreams(0),
|
numberOfSimulcastStreams(0),
|
||||||
simulcastStream(),
|
simulcastStream(),
|
||||||
spatialLayers(),
|
spatialLayers(),
|
||||||
mode(kRealtimeVideo),
|
mode(VideoCodecMode::kRealtimeVideo),
|
||||||
expect_encode_from_texture(false),
|
expect_encode_from_texture(false),
|
||||||
timing_frame_thresholds({0, 0}),
|
timing_frame_thresholds({0, 0}),
|
||||||
codec_specific_() {}
|
codec_specific_() {}
|
||||||
|
@ -21,7 +21,7 @@ namespace webrtc {
|
|||||||
// away from slowly.
|
// away from slowly.
|
||||||
|
|
||||||
// Video codec
|
// Video codec
|
||||||
enum VideoCodecComplexity {
|
enum class VideoCodecComplexity {
|
||||||
kComplexityNormal = 0,
|
kComplexityNormal = 0,
|
||||||
kComplexityHigh = 1,
|
kComplexityHigh = 1,
|
||||||
kComplexityHigher = 2,
|
kComplexityHigher = 2,
|
||||||
@ -95,7 +95,7 @@ union VideoCodecUnion {
|
|||||||
VideoCodecH264 H264;
|
VideoCodecH264 H264;
|
||||||
};
|
};
|
||||||
|
|
||||||
enum VideoCodecMode { kRealtimeVideo, kScreensharing };
|
enum class VideoCodecMode { kRealtimeVideo, kScreensharing };
|
||||||
|
|
||||||
// Common video codec properties
|
// Common video codec properties
|
||||||
class VideoCodec {
|
class VideoCodec {
|
||||||
|
@ -471,7 +471,8 @@ void SimulcastEncoderAdapter::PopulateStreamCodec(
|
|||||||
// kComplexityHigher, which maps to cpu_used = -4.
|
// kComplexityHigher, which maps to cpu_used = -4.
|
||||||
int pixels_per_frame = stream_codec->width * stream_codec->height;
|
int pixels_per_frame = stream_codec->width * stream_codec->height;
|
||||||
if (pixels_per_frame < 352 * 288) {
|
if (pixels_per_frame < 352 * 288) {
|
||||||
stream_codec->VP8()->complexity = webrtc::kComplexityHigher;
|
stream_codec->VP8()->complexity =
|
||||||
|
webrtc::VideoCodecComplexity::kComplexityHigher;
|
||||||
}
|
}
|
||||||
// Turn off denoising for all streams but the highest resolution.
|
// Turn off denoising for all streams but the highest resolution.
|
||||||
stream_codec->VP8()->denoisingOn = false;
|
stream_codec->VP8()->denoisingOn = false;
|
||||||
|
@ -407,7 +407,8 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test,
|
|||||||
// stream 0, the lowest resolution stream.
|
// stream 0, the lowest resolution stream.
|
||||||
InitRefCodec(0, &ref_codec);
|
InitRefCodec(0, &ref_codec);
|
||||||
ref_codec.qpMax = 45;
|
ref_codec.qpMax = 45;
|
||||||
ref_codec.VP8()->complexity = webrtc::kComplexityHigher;
|
ref_codec.VP8()->complexity =
|
||||||
|
webrtc::VideoCodecComplexity::kComplexityHigher;
|
||||||
ref_codec.VP8()->denoisingOn = false;
|
ref_codec.VP8()->denoisingOn = false;
|
||||||
ref_codec.startBitrate = 100; // Should equal to the target bitrate.
|
ref_codec.startBitrate = 100; // Should equal to the target bitrate.
|
||||||
VerifyCodec(ref_codec, 0);
|
VerifyCodec(ref_codec, 0);
|
||||||
|
@ -1101,7 +1101,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) {
|
|||||||
capturer.Start(capturer.GetSupportedFormats()->front()));
|
capturer.Start(capturer.GetSupportedFormats()->front()));
|
||||||
EXPECT_TRUE(capturer.CaptureFrame());
|
EXPECT_TRUE(capturer.CaptureFrame());
|
||||||
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
|
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(1));
|
||||||
EXPECT_EQ(webrtc::kRealtimeVideo,
|
EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo,
|
||||||
encoder_factory_->encoders().back()->GetCodecSettings().mode);
|
encoder_factory_->encoders().back()->GetCodecSettings().mode);
|
||||||
|
|
||||||
EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &capturer));
|
EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &capturer));
|
||||||
@ -1116,7 +1116,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) {
|
|||||||
// adapter case, this will result in two calls since InitEncode triggers a
|
// adapter case, this will result in two calls since InitEncode triggers a
|
||||||
// a new instance.
|
// a new instance.
|
||||||
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
|
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(2));
|
||||||
EXPECT_EQ(webrtc::kScreensharing,
|
EXPECT_EQ(webrtc::VideoCodecMode::kScreensharing,
|
||||||
encoder_factory_->encoders().back()->GetCodecSettings().mode);
|
encoder_factory_->encoders().back()->GetCodecSettings().mode);
|
||||||
|
|
||||||
EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &capturer));
|
EXPECT_TRUE(channel->SetVideoSend(kSsrc, &options, &capturer));
|
||||||
@ -1131,7 +1131,7 @@ TEST_F(WebRtcVideoEngineTest, DISABLED_RecreatesEncoderOnContentTypeChange) {
|
|||||||
// a non |is_screencast| option just to verify it doesn't affect recreation.
|
// a non |is_screencast| option just to verify it doesn't affect recreation.
|
||||||
EXPECT_TRUE(capturer.CaptureFrame());
|
EXPECT_TRUE(capturer.CaptureFrame());
|
||||||
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(3));
|
ASSERT_TRUE(encoder_factory_->WaitForCreatedVideoEncoders(3));
|
||||||
EXPECT_EQ(webrtc::kRealtimeVideo,
|
EXPECT_EQ(webrtc::VideoCodecMode::kRealtimeVideo,
|
||||||
encoder_factory_->encoders().back()->GetCodecSettings().mode);
|
encoder_factory_->encoders().back()->GetCodecSettings().mode);
|
||||||
|
|
||||||
// Remove stream previously added to free the external encoder instance.
|
// Remove stream previously added to free the external encoder instance.
|
||||||
|
@ -164,7 +164,7 @@ H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec)
|
|||||||
max_frame_rate_(0.0f),
|
max_frame_rate_(0.0f),
|
||||||
target_bps_(0),
|
target_bps_(0),
|
||||||
max_bps_(0),
|
max_bps_(0),
|
||||||
mode_(kRealtimeVideo),
|
mode_(VideoCodecMode::kRealtimeVideo),
|
||||||
frame_dropping_on_(false),
|
frame_dropping_on_(false),
|
||||||
key_frame_interval_(0),
|
key_frame_interval_(0),
|
||||||
packetization_mode_(H264PacketizationMode::SingleNalUnit),
|
packetization_mode_(H264PacketizationMode::SingleNalUnit),
|
||||||
@ -373,7 +373,7 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame,
|
|||||||
encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms();
|
encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms();
|
||||||
encoded_image_.capture_time_ms_ = input_frame.render_time_ms();
|
encoded_image_.capture_time_ms_ = input_frame.render_time_ms();
|
||||||
encoded_image_.rotation_ = input_frame.rotation();
|
encoded_image_.rotation_ = input_frame.rotation();
|
||||||
encoded_image_.content_type_ = (mode_ == kScreensharing)
|
encoded_image_.content_type_ = (mode_ == VideoCodecMode::kScreensharing)
|
||||||
? VideoContentType::SCREENSHARE
|
? VideoContentType::SCREENSHARE
|
||||||
: VideoContentType::UNSPECIFIED;
|
: VideoContentType::UNSPECIFIED;
|
||||||
encoded_image_.timing_.flags = VideoSendTiming::kInvalid;
|
encoded_image_.timing_.flags = VideoSendTiming::kInvalid;
|
||||||
@ -419,9 +419,9 @@ SEncParamExt H264EncoderImpl::CreateEncoderParams() const {
|
|||||||
RTC_DCHECK(openh264_encoder_);
|
RTC_DCHECK(openh264_encoder_);
|
||||||
SEncParamExt encoder_params;
|
SEncParamExt encoder_params;
|
||||||
openh264_encoder_->GetDefaultParams(&encoder_params);
|
openh264_encoder_->GetDefaultParams(&encoder_params);
|
||||||
if (mode_ == kRealtimeVideo) {
|
if (mode_ == VideoCodecMode::kRealtimeVideo) {
|
||||||
encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
|
encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME;
|
||||||
} else if (mode_ == kScreensharing) {
|
} else if (mode_ == VideoCodecMode::kScreensharing) {
|
||||||
encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
|
encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME;
|
||||||
} else {
|
} else {
|
||||||
RTC_NOTREACHED();
|
RTC_NOTREACHED();
|
||||||
|
@ -87,7 +87,7 @@ std::string CodecSpecificToString(const VideoCodec& codec) {
|
|||||||
rtc::SimpleStringBuilder ss(buf);
|
rtc::SimpleStringBuilder ss(buf);
|
||||||
switch (codec.codecType) {
|
switch (codec.codecType) {
|
||||||
case kVideoCodecVP8:
|
case kVideoCodecVP8:
|
||||||
ss << "complexity: " << codec.VP8().complexity;
|
ss << "complexity: " << static_cast<int>(codec.VP8().complexity);
|
||||||
ss << "\nnum_temporal_layers: "
|
ss << "\nnum_temporal_layers: "
|
||||||
<< static_cast<int>(codec.VP8().numberOfTemporalLayers);
|
<< static_cast<int>(codec.VP8().numberOfTemporalLayers);
|
||||||
ss << "\ndenoising: " << codec.VP8().denoisingOn;
|
ss << "\ndenoising: " << codec.VP8().denoisingOn;
|
||||||
@ -96,7 +96,7 @@ std::string CodecSpecificToString(const VideoCodec& codec) {
|
|||||||
ss << "\nkey_frame_interval: " << codec.VP8().keyFrameInterval;
|
ss << "\nkey_frame_interval: " << codec.VP8().keyFrameInterval;
|
||||||
break;
|
break;
|
||||||
case kVideoCodecVP9:
|
case kVideoCodecVP9:
|
||||||
ss << "complexity: " << codec.VP9().complexity;
|
ss << "complexity: " << static_cast<int>(codec.VP9().complexity);
|
||||||
ss << "\nnum_temporal_layers: "
|
ss << "\nnum_temporal_layers: "
|
||||||
<< static_cast<int>(codec.VP9().numberOfTemporalLayers);
|
<< static_cast<int>(codec.VP9().numberOfTemporalLayers);
|
||||||
ss << "\nnum_spatial_layers: "
|
ss << "\nnum_spatial_layers: "
|
||||||
|
@ -473,13 +473,13 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst,
|
|||||||
|
|
||||||
// Allow the user to set the complexity for the base stream.
|
// Allow the user to set the complexity for the base stream.
|
||||||
switch (inst->VP8().complexity) {
|
switch (inst->VP8().complexity) {
|
||||||
case kComplexityHigh:
|
case VideoCodecComplexity::kComplexityHigh:
|
||||||
cpu_speed_[0] = -5;
|
cpu_speed_[0] = -5;
|
||||||
break;
|
break;
|
||||||
case kComplexityHigher:
|
case VideoCodecComplexity::kComplexityHigher:
|
||||||
cpu_speed_[0] = -4;
|
cpu_speed_[0] = -4;
|
||||||
break;
|
break;
|
||||||
case kComplexityMax:
|
case VideoCodecComplexity::kComplexityMax:
|
||||||
cpu_speed_[0] = -3;
|
cpu_speed_[0] = -3;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
@ -661,7 +661,7 @@ int LibvpxVp8Encoder::InitAndSetControlSettings() {
|
|||||||
for (size_t i = 0; i < encoders_.size(); ++i) {
|
for (size_t i = 0; i < encoders_.size(); ++i) {
|
||||||
// Allow more screen content to be detected as static.
|
// Allow more screen content to be detected as static.
|
||||||
vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD,
|
vpx_codec_control(&(encoders_[i]), VP8E_SET_STATIC_THRESHOLD,
|
||||||
codec_.mode == kScreensharing ? 300 : 1);
|
codec_.mode == VideoCodecMode::kScreensharing ? 300 : 1);
|
||||||
vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]);
|
vpx_codec_control(&(encoders_[i]), VP8E_SET_CPUUSED, cpu_speed_[i]);
|
||||||
vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS,
|
vpx_codec_control(&(encoders_[i]), VP8E_SET_TOKEN_PARTITIONS,
|
||||||
static_cast<vp8e_token_partitions>(kTokenPartitions));
|
static_cast<vp8e_token_partitions>(kTokenPartitions));
|
||||||
@ -670,7 +670,7 @@ int LibvpxVp8Encoder::InitAndSetControlSettings() {
|
|||||||
// VP8E_SET_SCREEN_CONTENT_MODE 2 = screen content with more aggressive
|
// VP8E_SET_SCREEN_CONTENT_MODE 2 = screen content with more aggressive
|
||||||
// rate control (drop frames on large target bitrate overshoot)
|
// rate control (drop frames on large target bitrate overshoot)
|
||||||
vpx_codec_control(&(encoders_[i]), VP8E_SET_SCREEN_CONTENT_MODE,
|
vpx_codec_control(&(encoders_[i]), VP8E_SET_SCREEN_CONTENT_MODE,
|
||||||
codec_.mode == kScreensharing ? 2 : 0);
|
codec_.mode == VideoCodecMode::kScreensharing ? 2 : 0);
|
||||||
// Apply boost on golden frames (has only effect when resilience is off).
|
// Apply boost on golden frames (has only effect when resilience is off).
|
||||||
if (use_gf_boost_ && configurations_[0].g_error_resilient == 0) {
|
if (use_gf_boost_ && configurations_[0].g_error_resilient == 0) {
|
||||||
int gf_boost_percent;
|
int gf_boost_percent;
|
||||||
@ -778,7 +778,8 @@ int LibvpxVp8Encoder::Encode(const VideoFrame& frame,
|
|||||||
if (send_key_frame) {
|
if (send_key_frame) {
|
||||||
// Adapt the size of the key frame when in screenshare with 1 temporal
|
// Adapt the size of the key frame when in screenshare with 1 temporal
|
||||||
// layer.
|
// layer.
|
||||||
if (encoders_.size() == 1 && codec_.mode == kScreensharing &&
|
if (encoders_.size() == 1 &&
|
||||||
|
codec_.mode == VideoCodecMode::kScreensharing &&
|
||||||
codec_.VP8()->numberOfTemporalLayers <= 1) {
|
codec_.VP8()->numberOfTemporalLayers <= 1) {
|
||||||
const uint32_t forceKeyFrameIntraTh = 100;
|
const uint32_t forceKeyFrameIntraTh = 100;
|
||||||
vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
|
vpx_codec_control(&(encoders_[0]), VP8E_SET_MAX_INTRA_BITRATE_PCT,
|
||||||
@ -925,8 +926,9 @@ int LibvpxVp8Encoder::GetEncodedPartitions(
|
|||||||
input_image.render_time_ms();
|
input_image.render_time_ms();
|
||||||
encoded_images_[encoder_idx].rotation_ = input_image.rotation();
|
encoded_images_[encoder_idx].rotation_ = input_image.rotation();
|
||||||
encoded_images_[encoder_idx].content_type_ =
|
encoded_images_[encoder_idx].content_type_ =
|
||||||
(codec_.mode == kScreensharing) ? VideoContentType::SCREENSHARE
|
(codec_.mode == VideoCodecMode::kScreensharing)
|
||||||
: VideoContentType::UNSPECIFIED;
|
? VideoContentType::SCREENSHARE
|
||||||
|
: VideoContentType::UNSPECIFIED;
|
||||||
encoded_images_[encoder_idx].timing_.flags = VideoSendTiming::kInvalid;
|
encoded_images_[encoder_idx].timing_.flags = VideoSendTiming::kInvalid;
|
||||||
|
|
||||||
int qp = -1;
|
int qp = -1;
|
||||||
@ -947,7 +949,7 @@ int LibvpxVp8Encoder::GetEncodedPartitions(
|
|||||||
encoded_images_[encoder_idx].qp_ = qp_128;
|
encoded_images_[encoder_idx].qp_ = qp_128;
|
||||||
encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
|
encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx],
|
||||||
&codec_specific, &frag_info);
|
&codec_specific, &frag_info);
|
||||||
} else if (codec_.mode == kScreensharing) {
|
} else if (codec_.mode == VideoCodecMode::kScreensharing) {
|
||||||
result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
|
result = WEBRTC_VIDEO_CODEC_TARGET_BITRATE_OVERSHOOT;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -134,7 +134,8 @@ void SimulcastRateAllocator::DistributeAllocationToTemporalLayers(
|
|||||||
// Legacy temporal-layered only screenshare, or simulcast screenshare
|
// Legacy temporal-layered only screenshare, or simulcast screenshare
|
||||||
// with legacy mode for simulcast stream 0.
|
// with legacy mode for simulcast stream 0.
|
||||||
const bool conference_screenshare_mode =
|
const bool conference_screenshare_mode =
|
||||||
codec_.mode == kScreensharing && codec_.targetBitrate > 0 &&
|
codec_.mode == VideoCodecMode::kScreensharing &&
|
||||||
|
codec_.targetBitrate > 0 &&
|
||||||
((num_spatial_streams == 1 && num_temporal_streams == 2) || // Legacy.
|
((num_spatial_streams == 1 && num_temporal_streams == 2) || // Legacy.
|
||||||
(num_spatial_streams > 1 && simulcast_id == 0)); // Simulcast.
|
(num_spatial_streams > 1 && simulcast_id == 0)); // Simulcast.
|
||||||
if (conference_screenshare_mode) {
|
if (conference_screenshare_mode) {
|
||||||
|
@ -40,7 +40,7 @@ uint8_t NumTemporalLayers(const VideoCodec& codec, int spatial_id) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool IsConferenceModeScreenshare(const VideoCodec& codec) {
|
bool IsConferenceModeScreenshare(const VideoCodec& codec) {
|
||||||
if (codec.mode != kScreensharing) {
|
if (codec.mode != VideoCodecMode::kScreensharing) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return NumTemporalLayers(codec, 0) == 2;
|
return NumTemporalLayers(codec, 0) == 2;
|
||||||
|
@ -50,7 +50,7 @@ class TestVp8Impl : public VideoCodecUnitTest {
|
|||||||
codec_settings->VP8()->denoisingOn = true;
|
codec_settings->VP8()->denoisingOn = true;
|
||||||
codec_settings->VP8()->frameDroppingOn = false;
|
codec_settings->VP8()->frameDroppingOn = false;
|
||||||
codec_settings->VP8()->automaticResizeOn = false;
|
codec_settings->VP8()->automaticResizeOn = false;
|
||||||
codec_settings->VP8()->complexity = kComplexityNormal;
|
codec_settings->VP8()->complexity = VideoCodecComplexity::kComplexityNormal;
|
||||||
}
|
}
|
||||||
|
|
||||||
void EncodeAndWaitForFrame(const VideoFrame& input_frame,
|
void EncodeAndWaitForFrame(const VideoFrame& input_frame,
|
||||||
|
@ -38,7 +38,7 @@ VideoBitrateAllocation SvcRateAllocator::GetAllocation(
|
|||||||
VideoBitrateAllocation bitrate_allocation;
|
VideoBitrateAllocation bitrate_allocation;
|
||||||
bitrate_allocation.SetBitrate(0, 0, total_bitrate_bps);
|
bitrate_allocation.SetBitrate(0, 0, total_bitrate_bps);
|
||||||
return bitrate_allocation;
|
return bitrate_allocation;
|
||||||
} else if (codec_.mode == kRealtimeVideo) {
|
} else if (codec_.mode == VideoCodecMode::kRealtimeVideo) {
|
||||||
return GetAllocationNormalVideo(total_bitrate_bps);
|
return GetAllocationNormalVideo(total_bitrate_bps);
|
||||||
} else {
|
} else {
|
||||||
return GetAllocationScreenSharing(total_bitrate_bps);
|
return GetAllocationScreenSharing(total_bitrate_bps);
|
||||||
|
@ -25,7 +25,8 @@ static VideoCodec Configure(size_t width,
|
|||||||
codec.width = width;
|
codec.width = width;
|
||||||
codec.height = height;
|
codec.height = height;
|
||||||
codec.codecType = kVideoCodecVP9;
|
codec.codecType = kVideoCodecVP9;
|
||||||
codec.mode = is_screen_sharing ? kScreensharing : kRealtimeVideo;
|
codec.mode = is_screen_sharing ? VideoCodecMode::kScreensharing
|
||||||
|
: VideoCodecMode::kRealtimeVideo;
|
||||||
|
|
||||||
std::vector<SpatialLayer> spatial_layers =
|
std::vector<SpatialLayer> spatial_layers =
|
||||||
GetSvcConfig(width, height, num_spatial_layers, num_temporal_layers,
|
GetSvcConfig(width, height, num_spatial_layers, num_temporal_layers,
|
||||||
|
@ -434,7 +434,7 @@ class TestVp9ImplFrameDropping : public TestVp9Impl {
|
|||||||
// to reduce execution time.
|
// to reduce execution time.
|
||||||
codec_settings->width = 64;
|
codec_settings->width = 64;
|
||||||
codec_settings->height = 64;
|
codec_settings->height = 64;
|
||||||
codec_settings->mode = kScreensharing;
|
codec_settings->mode = VideoCodecMode::kScreensharing;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -287,7 +287,7 @@ int VP9EncoderImpl::InitEncode(const VideoCodec* inst,
|
|||||||
|
|
||||||
// Init framerate controller.
|
// Init framerate controller.
|
||||||
output_framerate_.Reset();
|
output_framerate_.Reset();
|
||||||
if (codec_.mode == kScreensharing) {
|
if (codec_.mode == VideoCodecMode::kScreensharing) {
|
||||||
target_framerate_fps_ = kMaxScreenSharingFramerateFps;
|
target_framerate_fps_ = kMaxScreenSharingFramerateFps;
|
||||||
} else {
|
} else {
|
||||||
target_framerate_fps_.reset();
|
target_framerate_fps_.reset();
|
||||||
@ -538,7 +538,7 @@ int VP9EncoderImpl::InitAndSetControlSettings(const VideoCodec* inst) {
|
|||||||
inst->VP9().denoisingOn ? 1 : 0);
|
inst->VP9().denoisingOn ? 1 : 0);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (codec_.mode == kScreensharing) {
|
if (codec_.mode == VideoCodecMode::kScreensharing) {
|
||||||
// Adjust internal parameters to screen content.
|
// Adjust internal parameters to screen content.
|
||||||
vpx_codec_control(encoder_, VP9E_SET_TUNE_CONTENT, 1);
|
vpx_codec_control(encoder_, VP9E_SET_TUNE_CONTENT, 1);
|
||||||
}
|
}
|
||||||
@ -580,7 +580,7 @@ int VP9EncoderImpl::Encode(const VideoFrame& input_image,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (kScreensharing == codec_.mode && !force_key_frame_) {
|
if (VideoCodecMode::kScreensharing == codec_.mode && !force_key_frame_) {
|
||||||
if (DropFrame(input_image.timestamp())) {
|
if (DropFrame(input_image.timestamp())) {
|
||||||
return WEBRTC_VIDEO_CODEC_OK;
|
return WEBRTC_VIDEO_CODEC_OK;
|
||||||
}
|
}
|
||||||
@ -881,7 +881,7 @@ int VP9EncoderImpl::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) {
|
|||||||
encoded_image_._timeStamp = input_image_->timestamp();
|
encoded_image_._timeStamp = input_image_->timestamp();
|
||||||
encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
|
encoded_image_.capture_time_ms_ = input_image_->render_time_ms();
|
||||||
encoded_image_.rotation_ = input_image_->rotation();
|
encoded_image_.rotation_ = input_image_->rotation();
|
||||||
encoded_image_.content_type_ = (codec_.mode == kScreensharing)
|
encoded_image_.content_type_ = (codec_.mode == VideoCodecMode::kScreensharing)
|
||||||
? VideoContentType::SCREENSHARE
|
? VideoContentType::SCREENSHARE
|
||||||
: VideoContentType::UNSPECIFIED;
|
: VideoContentType::UNSPECIFIED;
|
||||||
encoded_image_._encodedHeight =
|
encoded_image_._encodedHeight =
|
||||||
|
@ -82,10 +82,10 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
|
|||||||
|
|
||||||
switch (config.content_type) {
|
switch (config.content_type) {
|
||||||
case VideoEncoderConfig::ContentType::kRealtimeVideo:
|
case VideoEncoderConfig::ContentType::kRealtimeVideo:
|
||||||
video_codec.mode = kRealtimeVideo;
|
video_codec.mode = VideoCodecMode::kRealtimeVideo;
|
||||||
break;
|
break;
|
||||||
case VideoEncoderConfig::ContentType::kScreen:
|
case VideoEncoderConfig::ContentType::kScreen:
|
||||||
video_codec.mode = kScreensharing;
|
video_codec.mode = VideoCodecMode::kScreensharing;
|
||||||
if (!streams.empty() && streams[0].num_temporal_layers == 2u) {
|
if (!streams.empty() && streams[0].num_temporal_layers == 2u) {
|
||||||
video_codec.targetBitrate = streams[0].target_bitrate_bps / 1000;
|
video_codec.targetBitrate = streams[0].target_bitrate_bps / 1000;
|
||||||
}
|
}
|
||||||
@ -203,10 +203,11 @@ VideoCodec VideoCodecInitializer::VideoEncoderConfigToVideoCodec(
|
|||||||
// Layering is set explicitly.
|
// Layering is set explicitly.
|
||||||
spatial_layers = config.spatial_layers;
|
spatial_layers = config.spatial_layers;
|
||||||
} else {
|
} else {
|
||||||
spatial_layers = GetSvcConfig(video_codec.width, video_codec.height,
|
spatial_layers =
|
||||||
video_codec.VP9()->numberOfSpatialLayers,
|
GetSvcConfig(video_codec.width, video_codec.height,
|
||||||
video_codec.VP9()->numberOfTemporalLayers,
|
video_codec.VP9()->numberOfSpatialLayers,
|
||||||
video_codec.mode == kScreensharing);
|
video_codec.VP9()->numberOfTemporalLayers,
|
||||||
|
video_codec.mode == VideoCodecMode::kScreensharing);
|
||||||
|
|
||||||
const bool no_spatial_layering = (spatial_layers.size() == 1);
|
const bool no_spatial_layering = (spatial_layers.size() == 1);
|
||||||
if (no_spatial_layering) {
|
if (no_spatial_layering) {
|
||||||
|
@ -92,7 +92,7 @@ int32_t VideoSender::RegisterSendCodec(const VideoCodec* sendCodec,
|
|||||||
|
|
||||||
// If we have screensharing and we have layers, we disable frame dropper.
|
// If we have screensharing and we have layers, we disable frame dropper.
|
||||||
bool disable_frame_dropper =
|
bool disable_frame_dropper =
|
||||||
numLayers > 1 && sendCodec->mode == kScreensharing;
|
numLayers > 1 && sendCodec->mode == VideoCodecMode::kScreensharing;
|
||||||
if (disable_frame_dropper) {
|
if (disable_frame_dropper) {
|
||||||
_mediaOpt.EnableFrameDropper(false);
|
_mediaOpt.EnableFrameDropper(false);
|
||||||
} else if (frame_dropper_enabled_) {
|
} else if (frame_dropper_enabled_) {
|
||||||
|
@ -614,7 +614,7 @@ int32_t MediaCodecVideoEncoder::Encode(
|
|||||||
}
|
}
|
||||||
|
|
||||||
bool send_key_frame = false;
|
bool send_key_frame = false;
|
||||||
if (codec_mode_ == kRealtimeVideo) {
|
if (codec_mode_ == VideoCodecMode::kRealtimeVideo) {
|
||||||
++frames_received_since_last_key_;
|
++frames_received_since_last_key_;
|
||||||
int64_t now_ms = rtc::TimeMillis();
|
int64_t now_ms = rtc::TimeMillis();
|
||||||
if (last_frame_received_ms_ != -1 &&
|
if (last_frame_received_ms_ != -1 &&
|
||||||
|
@ -150,7 +150,7 @@ int32_t FakeEncoder::Encode(const VideoFrame& input_image,
|
|||||||
encoded._encodedWidth = simulcast_streams[i].width;
|
encoded._encodedWidth = simulcast_streams[i].width;
|
||||||
encoded._encodedHeight = simulcast_streams[i].height;
|
encoded._encodedHeight = simulcast_streams[i].height;
|
||||||
encoded.rotation_ = input_image.rotation();
|
encoded.rotation_ = input_image.rotation();
|
||||||
encoded.content_type_ = (mode == kScreensharing)
|
encoded.content_type_ = (mode == VideoCodecMode::kScreensharing)
|
||||||
? VideoContentType::SCREENSHARE
|
? VideoContentType::SCREENSHARE
|
||||||
: VideoContentType::UNSPECIFIED;
|
: VideoContentType::UNSPECIFIED;
|
||||||
specifics.codec_name = ImplementationName();
|
specifics.codec_name = ImplementationName();
|
||||||
|
Reference in New Issue
Block a user