diff --git a/AUTHORS b/AUTHORS index 2b09878b08..694e98e5f2 100644 --- a/AUTHORS +++ b/AUTHORS @@ -61,7 +61,6 @@ Yura Yaroshevich Hans Knoechel Korniltsev Anatoly Todd Wong -Sergio Garcia Murillo Maxim Pavlov Yusuke Suzuki Piasy Xu @@ -90,5 +89,3 @@ Vonage Holdings Corp. <*@vonage.com> Wire Swiss GmbH <*@wire.com> Miguel Paris Vewd Software AS <*@vewd.com> -Highfive, Inc. <*@highfive.com> -CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io> \ No newline at end of file diff --git a/api/test/create_simulcast_test_fixture.cc b/api/test/create_simulcast_test_fixture.cc index 14bd5ab7c5..49cd0f4004 100644 --- a/api/test/create_simulcast_test_fixture.cc +++ b/api/test/create_simulcast_test_fixture.cc @@ -14,7 +14,7 @@ #include #include "api/test/simulcast_test_fixture.h" -#include "modules/video_coding/utility/simulcast_test_fixture_impl.h" +#include "modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.h" #include "rtc_base/ptr_util.h" namespace webrtc { @@ -22,11 +22,9 @@ namespace test { std::unique_ptr CreateSimulcastTestFixture( std::unique_ptr encoder_factory, - std::unique_ptr decoder_factory, - SdpVideoFormat video_format) { + std::unique_ptr decoder_factory) { return rtc::MakeUnique(std::move(encoder_factory), - std::move(decoder_factory), - video_format); + std::move(decoder_factory)); } } // namespace test diff --git a/api/test/create_simulcast_test_fixture.h b/api/test/create_simulcast_test_fixture.h index 87f229c009..787e72d695 100644 --- a/api/test/create_simulcast_test_fixture.h +++ b/api/test/create_simulcast_test_fixture.h @@ -14,7 +14,6 @@ #include #include "api/test/simulcast_test_fixture.h" -#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" @@ -23,8 +22,7 @@ namespace test { std::unique_ptr CreateSimulcastTestFixture( std::unique_ptr encoder_factory, - std::unique_ptr decoder_factory, - SdpVideoFormat video_format); + std::unique_ptr decoder_factory); } // namespace test } // namespace webrtc diff --git a/api/video_codecs/test/BUILD.gn b/api/video_codecs/test/BUILD.gn index 18943095f5..8dc2fcc6d3 100644 --- a/api/video_codecs/test/BUILD.gn +++ b/api/video_codecs/test/BUILD.gn @@ -22,8 +22,8 @@ if (rtc_include_tests) { "..:rtc_software_fallback_wrappers", "..:video_codecs_api", "../../../modules/video_coding:video_codec_interface", - "../../../modules/video_coding:video_coding_utility", "../../../modules/video_coding:webrtc_vp8", + "../../../modules/video_coding:webrtc_vp8_helpers", "../../../rtc_base:checks", "../../../rtc_base:rtc_base_tests_utils", "../../../system_wrappers:metrics_default", diff --git a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc index a1a43b4202..ad40f90f01 100644 --- a/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc +++ b/api/video_codecs/test/video_encoder_software_fallback_wrapper_unittest.cc @@ -15,10 +15,10 @@ #include "api/video/i420_buffer.h" #include "api/video/video_bitrate_allocation.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include "modules/video_coding/codecs/vp8/temporal_layers.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/checks.h" #include "rtc_base/fakeclock.h" #include "test/field_trial.h" diff --git a/media/BUILD.gn b/media/BUILD.gn index f932a89549..4b92d140e2 100644 --- a/media/BUILD.gn +++ b/media/BUILD.gn @@ -191,10 +191,10 @@ rtc_static_library("rtc_internal_video_codecs") { "../api/video_codecs:video_codecs_api", "../call:call_interfaces", "../call:video_stream_api", - "../modules/video_coding:video_coding_utility", "../modules/video_coding:webrtc_h264", "../modules/video_coding:webrtc_multiplex", "../modules/video_coding:webrtc_vp8", + "../modules/video_coding:webrtc_vp8_helpers", "../modules/video_coding:webrtc_vp9", "../rtc_base:checks", "../rtc_base:rtc_base_approved", @@ -218,7 +218,6 @@ rtc_static_library("rtc_audio_video") { "../modules/audio_processing/aec_dump:aec_dump", "../modules/video_coding:video_codec_interface", "../modules/video_coding:video_coding", - "../modules/video_coding:video_coding_utility", "../rtc_base:audio_format_to_string", "../rtc_base:base64", "../rtc_base:checks", @@ -305,6 +304,7 @@ rtc_static_library("rtc_audio_video") { "../modules/audio_mixer:audio_mixer_impl", "../modules/audio_processing:audio_processing", "../modules/video_capture:video_capture_module", + "../modules/video_coding:webrtc_vp8_helpers", "../pc:rtc_pc_base", "../rtc_base:rtc_base", "../rtc_base:rtc_task_queue", @@ -474,7 +474,6 @@ if (rtc_include_tests) { "../api/video:video_frame_i420", "../modules/audio_processing:mocks", "../modules/video_coding:video_codec_interface", - "../modules/video_coding:webrtc_vp8", "../pc:rtc_pc", "../pc:rtc_pc_base", "../rtc_base:checks", @@ -570,6 +569,7 @@ if (rtc_include_tests) { "../modules/audio_device:mock_audio_device", "../modules/audio_processing:audio_processing", "../modules/video_coding:simulcast_test_fixture_impl", + "../modules/video_coding:webrtc_vp8_helpers", "../p2p:p2p_test_utils", "../rtc_base:rtc_base", "../rtc_base:rtc_base_approved", diff --git a/media/DEPS b/media/DEPS index ab54b44036..99e62aab08 100644 --- a/media/DEPS +++ b/media/DEPS @@ -10,7 +10,6 @@ include_rules = [ "+modules/rtp_rtcp", "+modules/video_capture", "+modules/video_coding", - "+modules/video_coding/utility", "+p2p", "+pc", "+sound", diff --git a/media/engine/fakewebrtccall.cc b/media/engine/fakewebrtccall.cc index 70353fe45e..78a1b63d93 100644 --- a/media/engine/fakewebrtccall.cc +++ b/media/engine/fakewebrtccall.cc @@ -21,17 +21,16 @@ namespace cricket { FakeAudioSendStream::FakeAudioSendStream( - int id, const webrtc::AudioSendStream::Config& config) - : id_(id), config_(config) { -} + int id, + const webrtc::AudioSendStream::Config& config) + : id_(id), config_(config) {} void FakeAudioSendStream::Reconfigure( const webrtc::AudioSendStream::Config& config) { config_ = config; } -const webrtc::AudioSendStream::Config& - FakeAudioSendStream::GetConfig() const { +const webrtc::AudioSendStream::Config& FakeAudioSendStream::GetConfig() const { return config_; } @@ -41,12 +40,13 @@ void FakeAudioSendStream::SetStats( } FakeAudioSendStream::TelephoneEvent - FakeAudioSendStream::GetLatestTelephoneEvent() const { +FakeAudioSendStream::GetLatestTelephoneEvent() const { return latest_telephone_event_; } bool FakeAudioSendStream::SendTelephoneEvent(int payload_type, - int payload_frequency, int event, + int payload_frequency, + int event, int duration_ms) { latest_telephone_event_.payload_type = payload_type; latest_telephone_event_.payload_frequency = payload_frequency; @@ -69,12 +69,12 @@ webrtc::AudioSendStream::Stats FakeAudioSendStream::GetStats( } FakeAudioReceiveStream::FakeAudioReceiveStream( - int id, const webrtc::AudioReceiveStream::Config& config) - : id_(id), config_(config) { -} + int id, + const webrtc::AudioReceiveStream::Config& config) + : id_(id), config_(config) {} -const webrtc::AudioReceiveStream::Config& - FakeAudioReceiveStream::GetConfig() const { +const webrtc::AudioReceiveStream::Config& FakeAudioReceiveStream::GetConfig() + const { return config_; } @@ -156,7 +156,7 @@ bool FakeVideoSendStream::GetVp8Settings( return false; } - *settings = codec_specific_settings_.vp8; + *settings = vpx_settings_.vp8; return true; } @@ -166,17 +166,7 @@ bool FakeVideoSendStream::GetVp9Settings( return false; } - *settings = codec_specific_settings_.vp9; - return true; -} - -bool FakeVideoSendStream::GetH264Settings( - webrtc::VideoCodecH264* settings) const { - if (!codec_settings_set_) { - return false; - } - - *settings = codec_specific_settings_.h264; + *settings = vpx_settings_.vp9; return true; } @@ -199,8 +189,7 @@ int64_t FakeVideoSendStream::GetLastTimestamp() const { void FakeVideoSendStream::OnFrame(const webrtc::VideoFrame& frame) { ++num_swapped_frames_; - if (!last_frame_ || - frame.width() != last_frame_->width() || + if (!last_frame_ || frame.width() != last_frame_->width() || frame.height() != last_frame_->height() || frame.rotation() != last_frame_->rotation()) { video_streams_ = encoder_config_.video_stream_factory->CreateEncoderStreams( @@ -240,22 +229,15 @@ void FakeVideoSendStream::ReconfigureVideoEncoder( const unsigned char num_temporal_layers = static_cast( video_streams_.back().num_temporal_layers.value_or(1)); if (config_.rtp.payload_name == "VP8") { - config.encoder_specific_settings->FillVideoCodecVp8( - &codec_specific_settings_.vp8); + config.encoder_specific_settings->FillVideoCodecVp8(&vpx_settings_.vp8); if (!video_streams_.empty()) { - codec_specific_settings_.vp8.numberOfTemporalLayers = - num_temporal_layers; + vpx_settings_.vp8.numberOfTemporalLayers = num_temporal_layers; } } else if (config_.rtp.payload_name == "VP9") { - config.encoder_specific_settings->FillVideoCodecVp9( - &codec_specific_settings_.vp9); + config.encoder_specific_settings->FillVideoCodecVp9(&vpx_settings_.vp9); if (!video_streams_.empty()) { - codec_specific_settings_.vp9.numberOfTemporalLayers = - num_temporal_layers; + vpx_settings_.vp9.numberOfTemporalLayers = num_temporal_layers; } - } else if (config_.rtp.payload_name == "H264") { - config.encoder_specific_settings->FillVideoCodecH264( - &codec_specific_settings_.h264); } else { ADD_FAILURE() << "Unsupported encoder payload: " << config_.rtp.payload_name; @@ -474,16 +456,15 @@ webrtc::NetworkState FakeCall::GetNetworkState(webrtc::MediaType media) const { webrtc::AudioSendStream* FakeCall::CreateAudioSendStream( const webrtc::AudioSendStream::Config& config) { - FakeAudioSendStream* fake_stream = new FakeAudioSendStream(next_stream_id_++, - config); + FakeAudioSendStream* fake_stream = + new FakeAudioSendStream(next_stream_id_++, config); audio_send_streams_.push_back(fake_stream); ++num_created_send_streams_; return fake_stream; } void FakeCall::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { - auto it = std::find(audio_send_streams_.begin(), - audio_send_streams_.end(), + auto it = std::find(audio_send_streams_.begin(), audio_send_streams_.end(), static_cast(send_stream)); if (it == audio_send_streams_.end()) { ADD_FAILURE() << "DestroyAudioSendStream called with unknown parameter."; @@ -495,17 +476,17 @@ void FakeCall::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { webrtc::AudioReceiveStream* FakeCall::CreateAudioReceiveStream( const webrtc::AudioReceiveStream::Config& config) { - audio_receive_streams_.push_back(new FakeAudioReceiveStream(next_stream_id_++, - config)); + audio_receive_streams_.push_back( + new FakeAudioReceiveStream(next_stream_id_++, config)); ++num_created_receive_streams_; return audio_receive_streams_.back(); } void FakeCall::DestroyAudioReceiveStream( webrtc::AudioReceiveStream* receive_stream) { - auto it = std::find(audio_receive_streams_.begin(), - audio_receive_streams_.end(), - static_cast(receive_stream)); + auto it = + std::find(audio_receive_streams_.begin(), audio_receive_streams_.end(), + static_cast(receive_stream)); if (it == audio_receive_streams_.end()) { ADD_FAILURE() << "DestroyAudioReceiveStream called with unknown parameter."; } else { @@ -525,8 +506,7 @@ webrtc::VideoSendStream* FakeCall::CreateVideoSendStream( } void FakeCall::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { - auto it = std::find(video_send_streams_.begin(), - video_send_streams_.end(), + auto it = std::find(video_send_streams_.begin(), video_send_streams_.end(), static_cast(send_stream)); if (it == video_send_streams_.end()) { ADD_FAILURE() << "DestroyVideoSendStream called with unknown parameter."; @@ -546,9 +526,9 @@ webrtc::VideoReceiveStream* FakeCall::CreateVideoReceiveStream( void FakeCall::DestroyVideoReceiveStream( webrtc::VideoReceiveStream* receive_stream) { - auto it = std::find(video_receive_streams_.begin(), - video_receive_streams_.end(), - static_cast(receive_stream)); + auto it = + std::find(video_receive_streams_.begin(), video_receive_streams_.end(), + static_cast(receive_stream)); if (it == video_receive_streams_.end()) { ADD_FAILURE() << "DestroyVideoReceiveStream called with unknown parameter."; } else { diff --git a/media/engine/fakewebrtccall.h b/media/engine/fakewebrtccall.h index 3231ccf65c..4ee6a8018c 100644 --- a/media/engine/fakewebrtccall.h +++ b/media/engine/fakewebrtccall.h @@ -134,7 +134,6 @@ class FakeVideoSendStream final bool IsSending() const; bool GetVp8Settings(webrtc::VideoCodecVP8* settings) const; bool GetVp9Settings(webrtc::VideoCodecVP9* settings) const; - bool GetH264Settings(webrtc::VideoCodecH264* settings) const; int GetNumberOfSwappedFrames() const; int GetLastWidth() const; @@ -180,11 +179,10 @@ class FakeVideoSendStream final rtc::VideoSinkWants sink_wants_; bool codec_settings_set_; - union CodecSpecificSettings { + union VpxSettings { webrtc::VideoCodecVP8 vp8; webrtc::VideoCodecVP9 vp9; - webrtc::VideoCodecH264 h264; - } codec_specific_settings_; + } vpx_settings_; bool resolution_scaling_enabled_; bool framerate_scaling_enabled_; rtc::VideoSourceInterface* source_; diff --git a/media/engine/simulcast.cc b/media/engine/simulcast.cc index 77c7dbdf53..607d1412f9 100644 --- a/media/engine/simulcast.cc +++ b/media/engine/simulcast.cc @@ -15,7 +15,7 @@ #include "media/base/streamparams.h" #include "media/engine/constants.h" #include "media/engine/simulcast.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" +#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h" #include "rtc_base/arraysize.h" #include "rtc_base/logging.h" #include "system_wrappers/include/field_trial.h" @@ -52,17 +52,11 @@ struct SimulcastFormat { // These tables describe from which resolution we can use how many // simulcast layers at what bitrates (maximum, target, and minimum). // Important!! Keep this table from high resolution to low resolution. -// clang-format off const SimulcastFormat kSimulcastFormats[] = { - {1920, 1080, 3, 5000, 4000, 800}, - {1280, 720, 3, 2500, 2500, 600}, - {960, 540, 3, 900, 900, 450}, - {640, 360, 2, 700, 500, 150}, - {480, 270, 2, 450, 350, 150}, - {320, 180, 1, 200, 150, 30}, - {0, 0, 1, 200, 150, 30} -}; -// clang-format on + {1920, 1080, 3, 5000, 4000, 800}, {1280, 720, 3, 2500, 2500, 600}, + {960, 540, 3, 900, 900, 450}, {640, 360, 2, 700, 500, 150}, + {480, 270, 2, 450, 350, 150}, {320, 180, 1, 200, 150, 30}, + {0, 0, 1, 200, 150, 30}}; const int kMaxScreenshareSimulcastLayers = 2; @@ -179,25 +173,21 @@ int GetTotalMaxBitrateBps(const std::vector& layers) { return total_max_bitrate_bps; } -std::vector GetSimulcastConfig( - size_t max_layers, - int width, - int height, - int /*max_bitrate_bps*/, - double bitrate_priority, - int max_qp, - int max_framerate, - bool is_screenshare, - bool temporal_layers_supported) { +std::vector GetSimulcastConfig(size_t max_layers, + int width, + int height, + int /*max_bitrate_bps*/, + double bitrate_priority, + int max_qp, + int max_framerate, + bool is_screenshare) { if (is_screenshare) { return GetScreenshareLayers(max_layers, width, height, bitrate_priority, max_qp, max_framerate, - ScreenshareSimulcastFieldTrialEnabled(), - temporal_layers_supported); + ScreenshareSimulcastFieldTrialEnabled()); } else { return GetNormalSimulcastLayers(max_layers, width, height, bitrate_priority, - max_qp, max_framerate, - temporal_layers_supported); + max_qp, max_framerate); } } @@ -207,8 +197,7 @@ std::vector GetNormalSimulcastLayers( int height, double bitrate_priority, int max_qp, - int max_framerate, - bool temporal_layers_supported) { + int max_framerate) { // TODO(bugs.webrtc.org/8785): Currently if the resolution isn't large enough // (defined in kSimulcastFormats) we scale down the number of simulcast // layers. Consider changing this so that the application can have more @@ -237,9 +226,7 @@ std::vector GetNormalSimulcastLayers( layers[s].height = height; // TODO(pbos): Fill actual temporal-layer bitrate thresholds. layers[s].max_qp = max_qp; - layers[s].num_temporal_layers = - temporal_layers_supported ? DefaultNumberOfTemporalLayers(s) - : 0; + layers[s].num_temporal_layers = DefaultNumberOfTemporalLayers(s); layers[s].max_bitrate_bps = FindSimulcastMaxBitrateBps(width, height); layers[s].target_bitrate_bps = FindSimulcastTargetBitrateBps(width, height); int num_temporal_layers = DefaultNumberOfTemporalLayers(s); @@ -250,8 +237,8 @@ std::vector GetNormalSimulcastLayers( // with the default 3 simulcast streams. Otherwise we risk a higher // threshold for receiving a feed at all. const float rate_factor = - webrtc::kLayerRateAllocation[3][0] / - webrtc::kLayerRateAllocation[num_temporal_layers][0]; + webrtc::kVp8LayerRateAlloction[3][0] / + webrtc::kVp8LayerRateAlloction[num_temporal_layers][0]; layers[s].max_bitrate_bps = static_cast(layers[s].max_bitrate_bps * rate_factor); layers[s].target_bitrate_bps = @@ -283,8 +270,7 @@ std::vector GetScreenshareLayers( double bitrate_priority, int max_qp, int max_framerate, - bool screenshare_simulcast_enabled, - bool temporal_layers_supported) { + bool screenshare_simulcast_enabled) { auto max_screenshare_layers = screenshare_simulcast_enabled ? kMaxScreenshareSimulcastLayers : 1; size_t num_simulcast_layers = @@ -301,7 +287,7 @@ std::vector GetScreenshareLayers( layers[0].min_bitrate_bps = kMinVideoBitrateBps; layers[0].target_bitrate_bps = kScreenshareDefaultTl0BitrateKbps * 1000; layers[0].max_bitrate_bps = kScreenshareDefaultTl1BitrateKbps * 1000; - layers[0].num_temporal_layers = temporal_layers_supported ? 2 : 0; + layers[0].num_temporal_layers = 2; // With simulcast enabled, add another spatial layer. This one will have a // more normal layout, with the regular 3 temporal layer pattern and no fps diff --git a/media/engine/simulcast.h b/media/engine/simulcast.h index f217ac5eca..3172f5e01f 100644 --- a/media/engine/simulcast.h +++ b/media/engine/simulcast.h @@ -31,16 +31,14 @@ void BoostMaxSimulcastLayer(int max_bitrate_bps, // Gets simulcast settings. // TODO(asapersson): Remove max_bitrate_bps. -std::vector GetSimulcastConfig( - size_t max_layers, - int width, - int height, - int /*max_bitrate_bps*/, - double bitrate_priority, - int max_qp, - int max_framerate, - bool is_screenshare, - bool temporal_layers_supported); +std::vector GetSimulcastConfig(size_t max_layers, + int width, + int height, + int /*max_bitrate_bps*/, + double bitrate_priority, + int max_qp, + int max_framerate, + bool is_screenshare); // Gets the simulcast config layers for a non-screensharing case. std::vector GetNormalSimulcastLayers( @@ -49,8 +47,7 @@ std::vector GetNormalSimulcastLayers( int height, double bitrate_priority, int max_qp, - int max_framerate, - bool temporal_layers_supported); + int max_framerate); // Gets simulcast config layers for screenshare settings. std::vector GetScreenshareLayers( @@ -60,8 +57,7 @@ std::vector GetScreenshareLayers( double bitrate_priority, int max_qp, int max_framerate, - bool screenshare_simulcast_enabled, - bool temporal_layers_supported); + bool screenshare_simulcast_enabled); bool ScreenshareSimulcastFieldTrialEnabled(); diff --git a/media/engine/simulcast_encoder_adapter.cc b/media/engine/simulcast_encoder_adapter.cc index dbf55af6ca..7a277950dc 100644 --- a/media/engine/simulcast_encoder_adapter.cc +++ b/media/engine/simulcast_encoder_adapter.cc @@ -16,7 +16,8 @@ #include "api/video/video_bitrate_allocation.h" #include "api/video_codecs/video_encoder_factory.h" #include "media/engine/scopedvideoencoder.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" +#include "modules/video_coding/codecs/vp8/screenshare_layers.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include "rtc_base/checks.h" #include "system_wrappers/include/clock.h" #include "third_party/libyuv/include/libyuv/scale.h" @@ -75,8 +76,7 @@ int VerifyCodec(const webrtc::VideoCodec* inst) { if (inst->width <= 1 || inst->height <= 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->codecType == webrtc::kVideoCodecVP8 && - inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { + if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) { return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } return WEBRTC_VIDEO_CODEC_OK; @@ -219,8 +219,7 @@ int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst, encoder = std::move(stored_encoders_.top()); stored_encoders_.pop(); } else { - encoder = factory_->CreateVideoEncoder(SdpVideoFormat( - codec_.codecType == webrtc::kVideoCodecVP8 ? "VP8" : "H264")); + encoder = factory_->CreateVideoEncoder(video_format_); } ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size); @@ -436,11 +435,8 @@ EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage( const RTPFragmentationHeader* fragmentation) { CodecSpecificInfo stream_codec_specific = *codecSpecificInfo; stream_codec_specific.codec_name = implementation_name_.c_str(); - if (stream_codec_specific.codecType == webrtc::kVideoCodecVP8) { - stream_codec_specific.codecSpecific.VP8.simulcastIdx = stream_idx; - } else if (stream_codec_specific.codecType == webrtc::kVideoCodecH264) { - stream_codec_specific.codecSpecific.H264.simulcast_idx = stream_idx; - } + CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8); + vp8Info->simulcastIdx = stream_idx; return encoded_complete_callback_->OnEncodedImage( encodedImage, &stream_codec_specific, fragmentation); @@ -455,6 +451,8 @@ void SimulcastEncoderAdapter::PopulateStreamCodec( *stream_codec = inst; // Stream specific settings. + stream_codec->VP8()->numberOfTemporalLayers = + inst.simulcastStream[stream_index].numberOfTemporalLayers; stream_codec->numberOfSimulcastStreams = 0; stream_codec->width = inst.simulcastStream[stream_index].width; stream_codec->height = inst.simulcastStream[stream_index].height; @@ -467,20 +465,16 @@ void SimulcastEncoderAdapter::PopulateStreamCodec( // Settings for lowest spatial resolutions. stream_codec->qpMax = kLowestResMaxQp; } - if (inst.codecType == webrtc::kVideoCodecVP8) { - stream_codec->VP8()->numberOfTemporalLayers = - inst.simulcastStream[stream_index].numberOfTemporalLayers; - if (!highest_resolution_stream) { - // For resolutions below CIF, set the codec |complexity| parameter to - // kComplexityHigher, which maps to cpu_used = -4. - int pixels_per_frame = stream_codec->width * stream_codec->height; - if (pixels_per_frame < 352 * 288) { - stream_codec->VP8()->complexity = - webrtc::VideoCodecComplexity::kComplexityHigher; - } - // Turn off denoising for all streams but the highest resolution. - stream_codec->VP8()->denoisingOn = false; + if (!highest_resolution_stream) { + // For resolutions below CIF, set the codec |complexity| parameter to + // kComplexityHigher, which maps to cpu_used = -4. + int pixels_per_frame = stream_codec->width * stream_codec->height; + if (pixels_per_frame < 352 * 288) { + stream_codec->VP8()->complexity = + webrtc::VideoCodecComplexity::kComplexityHigher; } + // Turn off denoising for all streams but the highest resolution. + stream_codec->VP8()->denoisingOn = false; } // TODO(ronghuawu): what to do with targetBitrate. diff --git a/media/engine/simulcast_encoder_adapter.h b/media/engine/simulcast_encoder_adapter.h index af830d7ab8..2b7a9b031c 100644 --- a/media/engine/simulcast_encoder_adapter.h +++ b/media/engine/simulcast_encoder_adapter.h @@ -19,7 +19,7 @@ #include #include "media/engine/webrtcvideoencoderfactory.h" -#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/codecs/vp8/include/vp8.h" #include "rtc_base/atomicops.h" #include "rtc_base/sequenced_task_checker.h" @@ -32,7 +32,7 @@ class VideoEncoderFactory; // webrtc::VideoEncoder instances with the given VideoEncoderFactory. // The object is created and destroyed on the worker thread, but all public // interfaces should be called from the encoder task queue. -class SimulcastEncoderAdapter : public VideoEncoder { +class SimulcastEncoderAdapter : public VP8Encoder { public: explicit SimulcastEncoderAdapter(VideoEncoderFactory* factory, const SdpVideoFormat& format); diff --git a/media/engine/simulcast_encoder_adapter_unittest.cc b/media/engine/simulcast_encoder_adapter_unittest.cc index 53f12a3732..9625b31d23 100644 --- a/media/engine/simulcast_encoder_adapter_unittest.cc +++ b/media/engine/simulcast_encoder_adapter_unittest.cc @@ -19,9 +19,8 @@ #include "common_video/include/video_frame_buffer.h" #include "media/engine/internalencoderfactory.h" #include "media/engine/simulcast_encoder_adapter.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/utility/simulcast_test_fixture_impl.h" #include "rtc_base/ptr_util.h" #include "test/function_video_decoder_factory.h" #include "test/function_video_encoder_factory.h" @@ -52,8 +51,7 @@ std::unique_ptr CreateSpecificSimulcastTestFixture( rtc::MakeUnique( []() { return VP8Decoder::Create(); }); return CreateSimulcastTestFixture(std::move(encoder_factory), - std::move(decoder_factory), - SdpVideoFormat(cricket::kVp8CodecName)); + std::move(decoder_factory)); } } // namespace @@ -220,7 +218,6 @@ class MockVideoEncoder : public VideoEncoder { image._encodedHeight = height; CodecSpecificInfo codec_specific_info; memset(&codec_specific_info, 0, sizeof(codec_specific_info)); - codec_specific_info.codecType = webrtc::kVideoCodecVP8; callback_->OnEncodedImage(image, &codec_specific_info, nullptr); } @@ -298,7 +295,7 @@ class TestSimulcastEncoderAdapterFakeHelper { // Can only be called once as the SimulcastEncoderAdapter will take the // ownership of |factory_|. - VideoEncoder* CreateMockEncoderAdapter() { + VP8Encoder* CreateMockEncoderAdapter() { return new SimulcastEncoderAdapter(factory_.get(), SdpVideoFormat("VP8")); } @@ -360,8 +357,7 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test, void SetupCodec() { SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); rate_allocator_.reset(new SimulcastRateAllocator(codec_)); EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200)); adapter_->RegisterEncodeCompleteCallback(this); @@ -436,7 +432,7 @@ class TestSimulcastEncoderAdapterFake : public ::testing::Test, protected: std::unique_ptr helper_; - std::unique_ptr adapter_; + std::unique_ptr adapter_; VideoCodec codec_; int last_encoded_image_width_; int last_encoded_image_height_; @@ -510,8 +506,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, EncodedCallbackForDifferentEncoders) { TEST_F(TestSimulcastEncoderAdapterFake, ReusesEncodersInOrder) { // Set up common settings for three streams. SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); rate_allocator_.reset(new SimulcastRateAllocator(codec_)); adapter_->RegisterEncodeCompleteCallback(this); @@ -709,8 +704,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, ReinitDoesNotReorderFrameSimulcastIdx) { TEST_F(TestSimulcastEncoderAdapterFake, SupportsNativeHandleForSingleStreams) { SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); codec_.numberOfSimulcastStreams = 1; EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200)); adapter_->RegisterEncodeCompleteCallback(this); @@ -723,8 +717,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsNativeHandleForSingleStreams) { TEST_F(TestSimulcastEncoderAdapterFake, SetRatesUnderMinBitrate) { SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); codec_.minBitrate = 50; codec_.numberOfSimulcastStreams = 1; EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200)); @@ -753,8 +746,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, SetRatesUnderMinBitrate) { TEST_F(TestSimulcastEncoderAdapterFake, SupportsImplementationName) { EXPECT_STREQ("SimulcastEncoderAdapter", adapter_->ImplementationName()); SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); std::vector encoder_names; encoder_names.push_back("codec1"); encoder_names.push_back("codec2"); @@ -776,8 +768,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, SupportsImplementationName) { TEST_F(TestSimulcastEncoderAdapterFake, SupportsNativeHandleForMultipleStreams) { SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); codec_.numberOfSimulcastStreams = 3; EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200)); adapter_->RegisterEncodeCompleteCallback(this); @@ -814,8 +805,7 @@ class FakeNativeBuffer : public VideoFrameBuffer { TEST_F(TestSimulcastEncoderAdapterFake, NativeHandleForwardingForMultipleStreams) { SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); codec_.numberOfSimulcastStreams = 3; // High start bitrate, so all streams are enabled. codec_.startBitrate = 3000; @@ -839,8 +829,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) { SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); codec_.numberOfSimulcastStreams = 3; EXPECT_EQ(0, adapter_->InitEncode(&codec_, 1, 1200)); adapter_->RegisterEncodeCompleteCallback(this); @@ -861,8 +850,7 @@ TEST_F(TestSimulcastEncoderAdapterFake, TestFailureReturnCodesFromEncodeCalls) { TEST_F(TestSimulcastEncoderAdapterFake, TestInitFailureCleansUpEncoders) { SimulcastTestFixtureImpl::DefaultSettings( - &codec_, static_cast(kTestTemporalLayerProfile), - kVideoCodecVP8); + &codec_, static_cast(kTestTemporalLayerProfile)); codec_.numberOfSimulcastStreams = 3; helper_->factory()->set_init_encode_return_value( WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE); diff --git a/media/engine/webrtcvideoengine.cc b/media/engine/webrtcvideoengine.cc index 08de675eda..26bb83d790 100644 --- a/media/engine/webrtcvideoengine.cc +++ b/media/engine/webrtcvideoengine.cc @@ -318,10 +318,8 @@ static bool ValidateStreamParams(const StreamParams& sp) { // Returns true if the given codec is disallowed from doing simulcast. bool IsCodecBlacklistedForSimulcast(const std::string& codec_name) { - return webrtc::field_trial::IsEnabled("WebRTC-H264Simulcast") - ? CodecNamesEq(codec_name, kVp9CodecName) - : CodecNamesEq(codec_name, kH264CodecName) || - CodecNamesEq(codec_name, kVp9CodecName); + return CodecNamesEq(codec_name, kH264CodecName) || + CodecNamesEq(codec_name, kVp9CodecName); } // The selected thresholds for QVGA and VGA corresponded to a QP around 10. @@ -2715,14 +2713,11 @@ std::vector EncoderStreamFactory::CreateEncoderStreams( std::vector layers; if (encoder_config.number_of_streams > 1 || - ((CodecNamesEq(codec_name_, kVp8CodecName) || - CodecNamesEq(codec_name_, kH264CodecName)) && - is_screenshare_ && screenshare_config_explicitly_enabled_)) { - bool temporal_layers_supported = CodecNamesEq(codec_name_, kVp8CodecName); + (CodecNamesEq(codec_name_, kVp8CodecName) && is_screenshare_ && + screenshare_config_explicitly_enabled_)) { layers = GetSimulcastConfig(encoder_config.number_of_streams, width, height, 0 /*not used*/, encoder_config.bitrate_priority, - max_qp_, max_framerate_, is_screenshare_, - temporal_layers_supported); + max_qp_, max_framerate_, is_screenshare_); // Update the active simulcast layers and configured bitrates. bool is_highest_layer_max_bitrate_configured = false; for (size_t i = 0; i < layers.size(); ++i) { diff --git a/media/engine/webrtcvideoengine_unittest.cc b/media/engine/webrtcvideoengine_unittest.cc index 039de76c65..2b048fc8f8 100644 --- a/media/engine/webrtcvideoengine_unittest.cc +++ b/media/engine/webrtcvideoengine_unittest.cc @@ -770,9 +770,7 @@ TEST_F(WebRtcVideoEngineTest, ASSERT_EQ(0u, encoder_factory_->encoders().size()); } -TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264BehindFieldTrial) { - webrtc::test::ScopedFieldTrials override_field_trials_( - "WebRTC-H264Simulcast/Enabled/"); +TEST_F(WebRtcVideoEngineTest, SimulcastDisabledForH264) { encoder_factory_->AddSupportedVideoCodecType("H264"); std::unique_ptr channel( @@ -798,7 +796,7 @@ TEST_F(WebRtcVideoEngineTest, SimulcastEnabledForH264BehindFieldTrial) { FakeWebRtcVideoEncoder* encoder = encoder_factory_->encoders()[0]; ASSERT_TRUE(encoder_factory_->encoders()[0]->WaitForInitEncode()); EXPECT_EQ(webrtc::kVideoCodecH264, encoder->GetCodecSettings().codecType); - EXPECT_LT(1u, encoder->GetCodecSettings().numberOfSimulcastStreams); + EXPECT_EQ(1u, encoder->GetCodecSettings().numberOfSimulcastStreams); EXPECT_TRUE(channel->SetVideoSend(ssrcs[0], nullptr, nullptr)); } @@ -6186,7 +6184,7 @@ class WebRtcVideoChannelSimulcastTest : public testing::Test { expected_streams = GetSimulcastConfig( num_configured_streams, capture_width, capture_height, 0, webrtc::kDefaultBitratePriority, kDefaultQpMax, - kDefaultVideoMaxFramerate, screenshare, true); + kDefaultVideoMaxFramerate, screenshare); if (screenshare) { for (const webrtc::VideoStream& stream : expected_streams) { // Never scale screen content. diff --git a/modules/rtp_rtcp/source/rtp_format_h264.cc b/modules/rtp_rtcp/source/rtp_format_h264.cc index c8c6f5f989..9c91611dbd 100644 --- a/modules/rtp_rtcp/source/rtp_format_h264.cc +++ b/modules/rtp_rtcp/source/rtp_format_h264.cc @@ -456,7 +456,6 @@ bool RtpDepacketizerH264::ProcessStapAOrSingleNalu( parsed_payload->type.Video.width = 0; parsed_payload->type.Video.height = 0; parsed_payload->type.Video.codec = kVideoCodecH264; - parsed_payload->type.Video.simulcastIdx = 0; parsed_payload->type.Video.is_first_packet_in_frame = true; RTPVideoHeaderH264* h264_header = &parsed_payload->type.Video.codecHeader.H264; @@ -675,7 +674,6 @@ bool RtpDepacketizerH264::ParseFuaNalu( parsed_payload->type.Video.width = 0; parsed_payload->type.Video.height = 0; parsed_payload->type.Video.codec = kVideoCodecH264; - parsed_payload->type.Video.simulcastIdx = 0; parsed_payload->type.Video.is_first_packet_in_frame = first_fragment; RTPVideoHeaderH264* h264 = &parsed_payload->type.Video.codecHeader.H264; h264->packetization_type = kH264FuA; diff --git a/modules/video_coding/BUILD.gn b/modules/video_coding/BUILD.gn index 0ed9da22a9..f247ac9fe7 100644 --- a/modules/video_coding/BUILD.gn +++ b/modules/video_coding/BUILD.gn @@ -152,6 +152,7 @@ rtc_static_library("video_coding") { ":packet", ":video_codec_interface", ":video_coding_utility", + ":webrtc_vp8_helpers", ":webrtc_vp9_helpers", "..:module_api", "..:module_api_public", @@ -236,10 +237,6 @@ rtc_source_set("video_coding_utility") { "utility/moving_average.h", "utility/quality_scaler.cc", "utility/quality_scaler.h", - "utility/simulcast_rate_allocator.cc", - "utility/simulcast_rate_allocator.h", - "utility/simulcast_utility.cc", - "utility/simulcast_utility.h", "utility/vp8_header_parser.cc", "utility/vp8_header_parser.h", "utility/vp9_uncompressed_header_parser.cc", @@ -266,8 +263,6 @@ rtc_source_set("video_coding_utility") { "../../rtc_base:sequenced_task_checker", "../../rtc_base/experiments:quality_scaling_experiment", "../../system_wrappers", - "../../system_wrappers:field_trial_api", - "../../system_wrappers:metrics_api", "../rtp_rtcp:rtp_rtcp_format", "//third_party/abseil-cpp/absl/types:optional", ] @@ -296,7 +291,6 @@ rtc_static_library("webrtc_h264") { "../../rtc_base:checks", "../../rtc_base:rtc_base", "../../system_wrappers:metrics_api", - "//third_party/libyuv", ] if (rtc_use_h264) { @@ -383,20 +377,18 @@ rtc_static_library("webrtc_multiplex") { ] } -# This target includes the internal SW codec. -rtc_static_library("webrtc_vp8") { +# This target includes VP8 files that may be used for any VP8 codec, internal SW or external HW. +rtc_static_library("webrtc_vp8_helpers") { visibility = [ "*" ] - poisonous = [ "software_video_codecs" ] sources = [ "codecs/vp8/default_temporal_layers.cc", "codecs/vp8/default_temporal_layers.h", "codecs/vp8/include/vp8.h", - "codecs/vp8/libvpx_vp8_decoder.cc", - "codecs/vp8/libvpx_vp8_decoder.h", - "codecs/vp8/libvpx_vp8_encoder.cc", - "codecs/vp8/libvpx_vp8_encoder.h", + "codecs/vp8/include/vp8_common_types.h", "codecs/vp8/screenshare_layers.cc", "codecs/vp8/screenshare_layers.h", + "codecs/vp8/simulcast_rate_allocator.cc", + "codecs/vp8/simulcast_rate_allocator.h", "codecs/vp8/temporal_layers.cc", "codecs/vp8/temporal_layers.h", ] @@ -424,6 +416,45 @@ rtc_static_library("webrtc_vp8") { "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] +} + +# This target includes the internal SW codec. +rtc_static_library("webrtc_vp8") { + visibility = [ "*" ] + poisonous = [ "software_video_codecs" ] + sources = [ + "codecs/vp8/include/vp8.h", + "codecs/vp8/include/vp8_common_types.h", + "codecs/vp8/libvpx_vp8_decoder.cc", + "codecs/vp8/libvpx_vp8_decoder.h", + "codecs/vp8/libvpx_vp8_encoder.cc", + "codecs/vp8/libvpx_vp8_encoder.h", + ] + + if (!build_with_chromium && is_clang) { + # Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163). + suppressed_configs += [ "//build/config/clang:find_bad_constructs" ] + } + + deps = [ + ":video_codec_interface", + ":video_coding_utility", + ":webrtc_vp8_helpers", + "..:module_api", + "../..:webrtc_common", + "../../:typedefs", + "../../api/video:video_frame", + "../../api/video_codecs:video_codecs_api", + "../../common_video", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_numerics", + "../../system_wrappers", + "../../system_wrappers:field_trial_api", + "../../system_wrappers:metrics_api", + "//third_party/abseil-cpp/absl/types:optional", + "//third_party/libyuv", + ] if (rtc_build_libvpx) { deps += [ rtc_libvpx_dir ] } @@ -543,8 +574,8 @@ if (rtc_include_tests) { rtc_source_set("simulcast_test_fixture_impl") { testonly = true sources = [ - "utility/simulcast_test_fixture_impl.cc", - "utility/simulcast_test_fixture_impl.h", + "codecs/vp8/simulcast_test_fixture_impl.cc", + "codecs/vp8/simulcast_test_fixture_impl.h", ] if (!build_with_chromium && is_clang) { @@ -556,7 +587,7 @@ if (rtc_include_tests) { ":mock_headers", ":video_codec_interface", ":video_coding", - ":video_coding_utility", + ":webrtc_vp8_helpers", "../../:webrtc_common", "../../api:simulcast_test_fixture_api", "../../api/video:video_frame", @@ -587,6 +618,7 @@ if (rtc_include_tests) { ":video_codec_interface", ":video_coding", ":video_coding_utility", + ":webrtc_vp8_helpers", ":webrtc_vp9_helpers", "../..:webrtc_common", "../../:typedefs", @@ -706,7 +738,7 @@ if (rtc_include_tests) { ":videocodec_test_impl", ":webrtc_h264", ":webrtc_multiplex", - ":webrtc_vp8", + ":webrtc_vp8_helpers", ":webrtc_vp9", ":webrtc_vp9_helpers", "../..:webrtc_common", @@ -796,10 +828,7 @@ if (rtc_include_tests) { "video_sender_unittest.cc", ] if (rtc_use_h264) { - sources += [ - "codecs/h264/h264_encoder_impl_unittest.cc", - "codecs/h264/h264_simulcast_unittest.cc", - ] + sources += [ "codecs/h264/h264_encoder_impl_unittest.cc" ] } deps = [ @@ -808,7 +837,6 @@ if (rtc_include_tests) { ":mock_headers", ":nack_module", ":packet", - ":simulcast_test_fixture_impl", ":video_codec_interface", ":video_codecs_test_framework", ":video_coding", @@ -816,6 +844,7 @@ if (rtc_include_tests) { ":videocodec_test_impl", ":webrtc_h264", ":webrtc_vp8", + ":webrtc_vp8_helpers", ":webrtc_vp9", ":webrtc_vp9_helpers", "..:module_api", diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.cc b/modules/video_coding/codecs/h264/h264_encoder_impl.cc index cac198ef79..eee954d176 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.cc +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.cc @@ -20,14 +20,10 @@ #include "third_party/openh264/src/codec/api/svc/codec_ver.h" #include "common_video/libyuv/include/webrtc_libyuv.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" -#include "modules/video_coding/utility/simulcast_utility.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/timeutils.h" #include "system_wrappers/include/metrics.h" -#include "third_party/libyuv/include/libyuv/convert.h" -#include "third_party/libyuv/include/libyuv/scale.h" namespace webrtc { @@ -161,7 +157,16 @@ static void RtpFragmentize(EncodedImage* encoded_image, } H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec) - : packetization_mode_(H264PacketizationMode::SingleNalUnit), + : openh264_encoder_(nullptr), + width_(0), + height_(0), + max_frame_rate_(0.0f), + target_bps_(0), + max_bps_(0), + mode_(VideoCodecMode::kRealtimeVideo), + frame_dropping_on_(false), + key_frame_interval_(0), + packetization_mode_(H264PacketizationMode::SingleNalUnit), max_payload_size_(0), number_of_cores_(0), encoded_image_callback_(nullptr), @@ -174,30 +179,25 @@ H264EncoderImpl::H264EncoderImpl(const cricket::VideoCodec& codec) packetization_mode_string == "1") { packetization_mode_ = H264PacketizationMode::NonInterleaved; } - downscaled_buffers_.reserve(kMaxSimulcastStreams - 1); - encoded_images_.reserve(kMaxSimulcastStreams); - encoded_image_buffers_.reserve(kMaxSimulcastStreams); - encoders_.reserve(kMaxSimulcastStreams); - configurations_.reserve(kMaxSimulcastStreams); } H264EncoderImpl::~H264EncoderImpl() { Release(); } -int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, +int32_t H264EncoderImpl::InitEncode(const VideoCodec* codec_settings, int32_t number_of_cores, size_t max_payload_size) { ReportInit(); - if (!inst || inst->codecType != kVideoCodecH264) { + if (!codec_settings || codec_settings->codecType != kVideoCodecH264) { ReportError(); return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->maxFramerate == 0) { + if (codec_settings->maxFramerate == 0) { ReportError(); return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } - if (inst->width < 1 || inst->height < 1) { + if (codec_settings->width < 1 || codec_settings->height < 1) { ReportError(); return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; } @@ -207,134 +207,73 @@ int32_t H264EncoderImpl::InitEncode(const VideoCodec* inst, ReportError(); return release_ret; } + RTC_DCHECK(!openh264_encoder_); - int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst); - bool doing_simulcast = (number_of_streams > 1); - - if (doing_simulcast && (!SimulcastUtility::ValidSimulcastResolutions( - *inst, number_of_streams) || - !SimulcastUtility::ValidSimulcastTemporalLayers( - *inst, number_of_streams))) { - return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED; + // Create encoder. + if (WelsCreateSVCEncoder(&openh264_encoder_) != 0) { + // Failed to create encoder. + RTC_LOG(LS_ERROR) << "Failed to create OpenH264 encoder"; + RTC_DCHECK(!openh264_encoder_); + ReportError(); + return WEBRTC_VIDEO_CODEC_ERROR; } - downscaled_buffers_.resize(number_of_streams - 1); - encoded_images_.resize(number_of_streams); - encoded_image_buffers_.resize(number_of_streams); - encoders_.resize(number_of_streams); - pictures_.resize(number_of_streams); - configurations_.resize(number_of_streams); + RTC_DCHECK(openh264_encoder_); + if (kOpenH264EncoderDetailedLogging) { + int trace_level = WELS_LOG_DETAIL; + openh264_encoder_->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level); + } + // else WELS_LOG_DEFAULT is used by default. number_of_cores_ = number_of_cores; + // Set internal settings from codec_settings + width_ = codec_settings->width; + height_ = codec_settings->height; + max_frame_rate_ = static_cast(codec_settings->maxFramerate); + mode_ = codec_settings->mode; + frame_dropping_on_ = codec_settings->H264().frameDroppingOn; + key_frame_interval_ = codec_settings->H264().keyFrameInterval; max_payload_size_ = max_payload_size; - codec_ = *inst; - // Code expects simulcastStream resolutions to be correct, make sure they are - // filled even when there are no simulcast layers. - if (codec_.numberOfSimulcastStreams == 0) { - codec_.simulcastStream[0].width = codec_.width; - codec_.simulcastStream[0].height = codec_.height; + // Codec_settings uses kbits/second; encoder uses bits/second. + max_bps_ = codec_settings->maxBitrate * 1000; + if (codec_settings->targetBitrate == 0) + target_bps_ = codec_settings->startBitrate * 1000; + else + target_bps_ = codec_settings->targetBitrate * 1000; + + SEncParamExt encoder_params = CreateEncoderParams(); + + // Initialize. + if (openh264_encoder_->InitializeExt(&encoder_params) != 0) { + RTC_LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; + Release(); + ReportError(); + return WEBRTC_VIDEO_CODEC_ERROR; } + // TODO(pbos): Base init params on these values before submitting. + int video_format = EVideoFormatType::videoFormatI420; + openh264_encoder_->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format); - for (int i = 0, idx = number_of_streams - 1; i < number_of_streams; - ++i, --idx) { - // Temporal layers still not supported. - if (inst->simulcastStream[i].numberOfTemporalLayers > 1) { - Release(); - return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED; - } - ISVCEncoder* openh264_encoder; - // Create encoder. - if (WelsCreateSVCEncoder(&openh264_encoder) != 0) { - // Failed to create encoder. - RTC_LOG(LS_ERROR) << "Failed to create OpenH264 encoder"; - RTC_DCHECK(!openh264_encoder); - Release(); - ReportError(); - return WEBRTC_VIDEO_CODEC_ERROR; - } - RTC_DCHECK(openh264_encoder); - if (kOpenH264EncoderDetailedLogging) { - int trace_level = WELS_LOG_DETAIL; - openh264_encoder->SetOption(ENCODER_OPTION_TRACE_LEVEL, &trace_level); - } - // else WELS_LOG_DEFAULT is used by default. - - // Store h264 encoder. - encoders_[i] = openh264_encoder; - - // Set internal settings from codec_settings - configurations_[i].simulcast_idx = idx; - configurations_[i].sending = false; - configurations_[i].width = codec_.simulcastStream[idx].width; - configurations_[i].height = codec_.simulcastStream[idx].height; - configurations_[i].max_frame_rate = static_cast(codec_.maxFramerate); - configurations_[i].frame_dropping_on = codec_.H264()->frameDroppingOn; - configurations_[i].key_frame_interval = codec_.H264()->keyFrameInterval; - - // Create downscaled image buffers. - if (i > 0) { - downscaled_buffers_[i - 1] = I420Buffer::Create( - configurations_[i].width, configurations_[i].height, - configurations_[i].width, configurations_[i].width / 2, - configurations_[i].width / 2); - } - - // Codec_settings uses kbits/second; encoder uses bits/second. - configurations_[i].max_bps = codec_.maxBitrate * 1000; - if (codec_.targetBitrate == 0) { - configurations_[i].target_bps = codec_.startBitrate * 1000; - } else { - configurations_[i].target_bps = codec_.targetBitrate * 1000; - } - - // Create encoder parameters based on the layer configuration. - SEncParamExt encoder_params = CreateEncoderParams(i); - - // Initialize. - if (openh264_encoder->InitializeExt(&encoder_params) != 0) { - RTC_LOG(LS_ERROR) << "Failed to initialize OpenH264 encoder"; - Release(); - ReportError(); - return WEBRTC_VIDEO_CODEC_ERROR; - } - // TODO(pbos): Base init params on these values before submitting. - int video_format = EVideoFormatType::videoFormatI420; - openh264_encoder->SetOption(ENCODER_OPTION_DATAFORMAT, &video_format); - - // Initialize encoded image. Default buffer size: size of unencoded data. - encoded_images_[i]._size = - CalcBufferSize(VideoType::kI420, codec_.simulcastStream[idx].width, - codec_.simulcastStream[idx].height); - encoded_images_[i]._buffer = new uint8_t[encoded_images_[i]._size]; - encoded_image_buffers_[i].reset(encoded_images_[i]._buffer); - encoded_images_[i]._completeFrame = true; - encoded_images_[i]._encodedWidth = codec_.simulcastStream[idx].width; - encoded_images_[i]._encodedHeight = codec_.simulcastStream[idx].height; - encoded_images_[i]._length = 0; - } - - SimulcastRateAllocator init_allocator(codec_); - BitrateAllocation allocation = init_allocator.GetAllocation( - codec_.targetBitrate ? codec_.targetBitrate * 1000 - : codec_.startBitrate * 1000, - codec_.maxFramerate); - return SetRateAllocation(allocation, codec_.maxFramerate); + // Initialize encoded image. Default buffer size: size of unencoded data. + encoded_image_._size = CalcBufferSize(VideoType::kI420, codec_settings->width, + codec_settings->height); + encoded_image_._buffer = new uint8_t[encoded_image_._size]; + encoded_image_buffer_.reset(encoded_image_._buffer); + encoded_image_._completeFrame = true; + encoded_image_._encodedWidth = 0; + encoded_image_._encodedHeight = 0; + encoded_image_._length = 0; + return WEBRTC_VIDEO_CODEC_OK; } int32_t H264EncoderImpl::Release() { - while (!encoders_.empty()) { - ISVCEncoder* openh264_encoder = encoders_.back(); - if (openh264_encoder) { - RTC_CHECK_EQ(0, openh264_encoder->Uninitialize()); - WelsDestroySVCEncoder(openh264_encoder); - } - encoders_.pop_back(); + if (openh264_encoder_) { + RTC_CHECK_EQ(0, openh264_encoder_->Uninitialize()); + WelsDestroySVCEncoder(openh264_encoder_); + openh264_encoder_ = nullptr; } - downscaled_buffers_.clear(); - configurations_.clear(); - encoded_images_.clear(); - encoded_image_buffers_.clear(); - pictures_.clear(); + encoded_image_._buffer = nullptr; + encoded_image_buffer_.reset(); return WEBRTC_VIDEO_CODEC_OK; } @@ -345,59 +284,27 @@ int32_t H264EncoderImpl::RegisterEncodeCompleteCallback( } int32_t H264EncoderImpl::SetRateAllocation( - const BitrateAllocation& bitrate, - uint32_t new_framerate) { - if (encoders_.empty()) - return WEBRTC_VIDEO_CODEC_UNINITIALIZED; - - if (new_framerate < 1) + const VideoBitrateAllocation& bitrate_allocation, + uint32_t framerate) { + if (bitrate_allocation.get_sum_bps() <= 0 || framerate <= 0) return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; - if (bitrate.get_sum_bps() == 0) { - // Encoder paused, turn off all encoding. - for (size_t i = 0; i < configurations_.size(); ++i) - configurations_[i].SetStreamState(false); - return WEBRTC_VIDEO_CODEC_OK; - } - - // At this point, bitrate allocation should already match codec settings. - if (codec_.maxBitrate > 0) - RTC_DCHECK_LE(bitrate.get_sum_kbps(), codec_.maxBitrate); - RTC_DCHECK_GE(bitrate.get_sum_kbps(), codec_.minBitrate); - if (codec_.numberOfSimulcastStreams > 0) - RTC_DCHECK_GE(bitrate.get_sum_kbps(), codec_.simulcastStream[0].minBitrate); - - codec_.maxFramerate = new_framerate; - - size_t stream_idx = encoders_.size() - 1; - for (size_t i = 0; i < encoders_.size(); ++i, --stream_idx) { - // Update layer config. - configurations_[i].target_bps = bitrate.GetSpatialLayerSum(stream_idx); - configurations_[i].max_frame_rate = static_cast(new_framerate); - - if (configurations_[i].target_bps) { - configurations_[i].SetStreamState(true); - - // Update h264 encoder. - SBitrateInfo target_bitrate; - memset(&target_bitrate, 0, sizeof(SBitrateInfo)); - target_bitrate.iLayer = SPATIAL_LAYER_ALL, - target_bitrate.iBitrate = configurations_[i].target_bps; - encoders_[i]->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate); - encoders_[i]->SetOption(ENCODER_OPTION_FRAME_RATE, - &configurations_[i].max_frame_rate); - } else { - configurations_[i].SetStreamState(false); - } - } + target_bps_ = bitrate_allocation.get_sum_bps(); + max_frame_rate_ = static_cast(framerate); + SBitrateInfo target_bitrate; + memset(&target_bitrate, 0, sizeof(SBitrateInfo)); + target_bitrate.iLayer = SPATIAL_LAYER_ALL, + target_bitrate.iBitrate = target_bps_; + openh264_encoder_->SetOption(ENCODER_OPTION_BITRATE, &target_bitrate); + openh264_encoder_->SetOption(ENCODER_OPTION_FRAME_RATE, &max_frame_rate_); return WEBRTC_VIDEO_CODEC_OK; } int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame, const CodecSpecificInfo* codec_specific_info, const std::vector* frame_types) { - if (encoders_.empty()) { + if (!IsInitialized()) { ReportError(); return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } @@ -409,134 +316,83 @@ int32_t H264EncoderImpl::Encode(const VideoFrame& input_frame, return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } + bool force_key_frame = false; + if (frame_types != nullptr) { + // We only support a single stream. + RTC_DCHECK_EQ(frame_types->size(), 1); + // Skip frame? + if ((*frame_types)[0] == kEmptyFrame) { + return WEBRTC_VIDEO_CODEC_OK; + } + // Force key frame? + force_key_frame = (*frame_types)[0] == kVideoFrameKey; + } + if (force_key_frame) { + // API doc says ForceIntraFrame(false) does nothing, but calling this + // function forces a key frame regardless of the |bIDR| argument's value. + // (If every frame is a key frame we get lag/delays.) + openh264_encoder_->ForceIntraFrame(true); + } rtc::scoped_refptr frame_buffer = input_frame.video_frame_buffer()->ToI420(); + // EncodeFrame input. + SSourcePicture picture; + memset(&picture, 0, sizeof(SSourcePicture)); + picture.iPicWidth = frame_buffer->width(); + picture.iPicHeight = frame_buffer->height(); + picture.iColorFormat = EVideoFormatType::videoFormatI420; + picture.uiTimeStamp = input_frame.ntp_time_ms(); + picture.iStride[0] = frame_buffer->StrideY(); + picture.iStride[1] = frame_buffer->StrideU(); + picture.iStride[2] = frame_buffer->StrideV(); + picture.pData[0] = const_cast(frame_buffer->DataY()); + picture.pData[1] = const_cast(frame_buffer->DataU()); + picture.pData[2] = const_cast(frame_buffer->DataV()); - bool send_key_frame = false; - for (size_t i = 0; i < configurations_.size(); ++i) { - if (configurations_[i].key_frame_request && configurations_[i].sending) { - send_key_frame = true; - break; - } - } - if (!send_key_frame && frame_types) { - for (size_t i = 0; i < frame_types->size() && i < configurations_.size(); - ++i) { - if ((*frame_types)[i] == kVideoFrameKey && configurations_[i].sending) { - send_key_frame = true; - break; - } - } + // EncodeFrame output. + SFrameBSInfo info; + memset(&info, 0, sizeof(SFrameBSInfo)); + + // Encode! + int enc_ret = openh264_encoder_->EncodeFrame(&picture, &info); + if (enc_ret != 0) { + RTC_LOG(LS_ERROR) << "OpenH264 frame encoding failed, EncodeFrame returned " + << enc_ret << "."; + ReportError(); + return WEBRTC_VIDEO_CODEC_ERROR; } - RTC_DCHECK_EQ(configurations_[0].width, frame_buffer->width()); - RTC_DCHECK_EQ(configurations_[0].height, frame_buffer->height()); + encoded_image_._encodedWidth = frame_buffer->width(); + encoded_image_._encodedHeight = frame_buffer->height(); + encoded_image_._timeStamp = input_frame.timestamp(); + encoded_image_.ntp_time_ms_ = input_frame.ntp_time_ms(); + encoded_image_.capture_time_ms_ = input_frame.render_time_ms(); + encoded_image_.rotation_ = input_frame.rotation(); + encoded_image_.content_type_ = (mode_ == VideoCodecMode::kScreensharing) + ? VideoContentType::SCREENSHARE + : VideoContentType::UNSPECIFIED; + encoded_image_.timing_.flags = VideoSendTiming::kInvalid; + encoded_image_._frameType = ConvertToVideoFrameType(info.eFrameType); - // Encode image for each layer. - for (size_t i = 0; i < encoders_.size(); ++i) { - // EncodeFrame input. - pictures_[i] = {0}; - pictures_[i].iPicWidth = configurations_[i].width; - pictures_[i].iPicHeight = configurations_[i].height; - pictures_[i].iColorFormat = EVideoFormatType::videoFormatI420; - pictures_[i].uiTimeStamp = input_frame.ntp_time_ms(); - // Downscale images on second and ongoing layers. - if (i == 0) { - pictures_[i].iStride[0] = frame_buffer->StrideY(); - pictures_[i].iStride[1] = frame_buffer->StrideU(); - pictures_[i].iStride[2] = frame_buffer->StrideV(); - pictures_[i].pData[0] = const_cast(frame_buffer->DataY()); - pictures_[i].pData[1] = const_cast(frame_buffer->DataU()); - pictures_[i].pData[2] = const_cast(frame_buffer->DataV()); - } else { - pictures_[i].iStride[0] = downscaled_buffers_[i - 1]->StrideY(); - pictures_[i].iStride[1] = downscaled_buffers_[i - 1]->StrideU(); - pictures_[i].iStride[2] = downscaled_buffers_[i - 1]->StrideV(); - pictures_[i].pData[0] = - const_cast(downscaled_buffers_[i - 1]->DataY()); - pictures_[i].pData[1] = - const_cast(downscaled_buffers_[i - 1]->DataU()); - pictures_[i].pData[2] = - const_cast(downscaled_buffers_[i - 1]->DataV()); - // Scale the image down a number of times by downsampling factor. - libyuv::I420Scale(pictures_[i - 1].pData[0], pictures_[i - 1].iStride[0], - pictures_[i - 1].pData[1], pictures_[i - 1].iStride[1], - pictures_[i - 1].pData[2], pictures_[i - 1].iStride[2], - configurations_[i - 1].width, - configurations_[i - 1].height, pictures_[i].pData[0], - pictures_[i].iStride[0], pictures_[i].pData[1], - pictures_[i].iStride[1], pictures_[i].pData[2], - pictures_[i].iStride[2], configurations_[i].width, - configurations_[i].height, libyuv::kFilterBilinear); - } + // Split encoded image up into fragments. This also updates |encoded_image_|. + RTPFragmentationHeader frag_header; + RtpFragmentize(&encoded_image_, &encoded_image_buffer_, *frame_buffer, &info, + &frag_header); - if (!configurations_[i].sending) { - continue; - } - if (frame_types != nullptr) { - // Skip frame? - if ((*frame_types)[i] == kEmptyFrame) { - continue; - } - } - if (send_key_frame) { - // API doc says ForceIntraFrame(false) does nothing, but calling this - // function forces a key frame regardless of the |bIDR| argument's value. - // (If every frame is a key frame we get lag/delays.) - encoders_[i]->ForceIntraFrame(true); - configurations_[i].key_frame_request = false; - } - // EncodeFrame output. - SFrameBSInfo info; - memset(&info, 0, sizeof(SFrameBSInfo)); + // Encoder can skip frames to save bandwidth in which case + // |encoded_image_._length| == 0. + if (encoded_image_._length > 0) { + // Parse QP. + h264_bitstream_parser_.ParseBitstream(encoded_image_._buffer, + encoded_image_._length); + h264_bitstream_parser_.GetLastSliceQp(&encoded_image_.qp_); - // Encode! - int enc_ret = encoders_[i]->EncodeFrame(&pictures_[i], &info); - if (enc_ret != 0) { - RTC_LOG(LS_ERROR) - << "OpenH264 frame encoding failed, EncodeFrame returned " << enc_ret - << "."; - ReportError(); - return WEBRTC_VIDEO_CODEC_ERROR; - } - - encoded_images_[i]._encodedWidth = configurations_[i].width; - encoded_images_[i]._encodedHeight = configurations_[i].height; - encoded_images_[i]._timeStamp = input_frame.timestamp(); - encoded_images_[i].ntp_time_ms_ = input_frame.ntp_time_ms(); - encoded_images_[i].capture_time_ms_ = input_frame.render_time_ms(); - encoded_images_[i].rotation_ = input_frame.rotation(); - encoded_images_[i].content_type_ = - (codec_.mode == VideoCodecMode::kScreensharing) - ? VideoContentType::SCREENSHARE - : VideoContentType::UNSPECIFIED; - encoded_images_[i].timing_.flags = VideoSendTiming::kInvalid; - encoded_images_[i]._frameType = ConvertToVideoFrameType(info.eFrameType); - - // Split encoded image up into fragments. This also updates - // |encoded_image_|. - RTPFragmentationHeader frag_header; - RtpFragmentize(&encoded_images_[i], &encoded_image_buffers_[i], - *frame_buffer, &info, &frag_header); - - // Encoder can skip frames to save bandwidth in which case - // |encoded_images_[i]._length| == 0. - if (encoded_images_[i]._length > 0) { - // Parse QP. - h264_bitstream_parser_.ParseBitstream(encoded_images_[i]._buffer, - encoded_images_[i]._length); - h264_bitstream_parser_.GetLastSliceQp(&encoded_images_[i].qp_); - - // Deliver encoded image. - CodecSpecificInfo codec_specific; - codec_specific.codecType = kVideoCodecH264; - codec_specific.codecSpecific.H264.packetization_mode = - packetization_mode_; - codec_specific.codecSpecific.H264.simulcast_idx = - configurations_[i].simulcast_idx; - encoded_image_callback_->OnEncodedImage(encoded_images_[i], - &codec_specific, &frag_header); - } + // Deliver encoded image. + CodecSpecificInfo codec_specific; + codec_specific.codecType = kVideoCodecH264; + codec_specific.codecSpecific.H264.packetization_mode = packetization_mode_; + encoded_image_callback_->OnEncodedImage(encoded_image_, &codec_specific, + &frag_header); } return WEBRTC_VIDEO_CODEC_OK; } @@ -545,35 +401,40 @@ const char* H264EncoderImpl::ImplementationName() const { return "OpenH264"; } +bool H264EncoderImpl::IsInitialized() const { + return openh264_encoder_ != nullptr; +} + // Initialization parameters. // There are two ways to initialize. There is SEncParamBase (cleared with // memset(&p, 0, sizeof(SEncParamBase)) used in Initialize, and SEncParamExt // which is a superset of SEncParamBase (cleared with GetDefaultParams) used // in InitializeExt. -SEncParamExt H264EncoderImpl::CreateEncoderParams(size_t i) const { +SEncParamExt H264EncoderImpl::CreateEncoderParams() const { + RTC_DCHECK(openh264_encoder_); SEncParamExt encoder_params; - encoders_[i]->GetDefaultParams(&encoder_params); - if (codec_.mode == VideoCodecMode::kRealtimeVideo) { + openh264_encoder_->GetDefaultParams(&encoder_params); + if (mode_ == VideoCodecMode::kRealtimeVideo) { encoder_params.iUsageType = CAMERA_VIDEO_REAL_TIME; - } else if (codec_.mode == VideoCodecMode::kScreensharing) { + } else if (mode_ == VideoCodecMode::kScreensharing) { encoder_params.iUsageType = SCREEN_CONTENT_REAL_TIME; } else { RTC_NOTREACHED(); } - encoder_params.iPicWidth = configurations_[i].width; - encoder_params.iPicHeight = configurations_[i].height; - encoder_params.iTargetBitrate = configurations_[i].target_bps; - encoder_params.iMaxBitrate = configurations_[i].max_bps; + encoder_params.iPicWidth = width_; + encoder_params.iPicHeight = height_; + encoder_params.iTargetBitrate = target_bps_; + encoder_params.iMaxBitrate = max_bps_; // Rate Control mode encoder_params.iRCMode = RC_BITRATE_MODE; - encoder_params.fMaxFrameRate = configurations_[i].max_frame_rate; + encoder_params.fMaxFrameRate = max_frame_rate_; // The following parameters are extension parameters (they're in SEncParamExt, // not in SEncParamBase). - encoder_params.bEnableFrameSkip = configurations_[i].frame_dropping_on; + encoder_params.bEnableFrameSkip = frame_dropping_on_; // |uiIntraPeriod| - multiple of GOP size // |keyFrameInterval| - number of frames - encoder_params.uiIntraPeriod = configurations_[i].key_frame_interval; + encoder_params.uiIntraPeriod = key_frame_interval_; encoder_params.uiMaxNalSize = 0; // Threading model: use auto. // 0: auto (dynamic imp. internal encoder) @@ -641,12 +502,4 @@ VideoEncoder::ScalingSettings H264EncoderImpl::GetScalingSettings() const { kHighH264QpThreshold); } -void H264EncoderImpl::LayerConfig::SetStreamState(bool send_stream) { - if (send_stream && !sending) { - // Need a key frame if we have not sent this stream before. - key_frame_request = true; - } - sending = send_stream; -} - } // namespace webrtc diff --git a/modules/video_coding/codecs/h264/h264_encoder_impl.h b/modules/video_coding/codecs/h264/h264_encoder_impl.h index 0d259660f6..c48439b444 100644 --- a/modules/video_coding/codecs/h264/h264_encoder_impl.h +++ b/modules/video_coding/codecs/h264/h264_encoder_impl.h @@ -15,7 +15,6 @@ #include #include -#include "api/video/i420_buffer.h" #include "common_video/h264/h264_bitstream_parser.h" #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/utility/quality_scaler.h" @@ -27,22 +26,6 @@ class ISVCEncoder; namespace webrtc { class H264EncoderImpl : public H264Encoder { - public: - struct LayerConfig { - int simulcast_idx = 0; - int width = -1; - int height = -1; - bool sending = true; - bool key_frame_request = false; - float max_frame_rate = 0; - uint32_t target_bps = 0; - uint32_t max_bps = 0; - bool frame_dropping_on = false; - int key_frame_interval = 0; - - void SetStreamState(bool send_stream); - }; - public: explicit H264EncoderImpl(const cricket::VideoCodec& codec); ~H264EncoderImpl() override; @@ -83,24 +66,32 @@ class H264EncoderImpl : public H264Encoder { } private: - SEncParamExt CreateEncoderParams(size_t i) const; + bool IsInitialized() const; + SEncParamExt CreateEncoderParams() const; webrtc::H264BitstreamParser h264_bitstream_parser_; // Reports statistics with histograms. void ReportInit(); void ReportError(); - std::vector encoders_; - std::vector pictures_; - std::vector> downscaled_buffers_; - std::vector configurations_; - std::vector encoded_images_; - std::vector> encoded_image_buffers_; - - VideoCodec codec_; + ISVCEncoder* openh264_encoder_; + // Settings that are used by this encoder. + int width_; + int height_; + float max_frame_rate_; + uint32_t target_bps_; + uint32_t max_bps_; + VideoCodecMode mode_; + // H.264 specifc parameters + bool frame_dropping_on_; + int key_frame_interval_; H264PacketizationMode packetization_mode_; + size_t max_payload_size_; int32_t number_of_cores_; + + EncodedImage encoded_image_; + std::unique_ptr encoded_image_buffer_; EncodedImageCallback* encoded_image_callback_; bool has_reported_init_; diff --git a/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc b/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc deleted file mode 100644 index 237728508d..0000000000 --- a/modules/video_coding/codecs/h264/h264_simulcast_unittest.cc +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "api/test/create_simulcast_test_fixture.h" -#include "api/test/simulcast_test_fixture.h" -#include "modules/video_coding/codecs/h264/include/h264.h" -#include "rtc_base/ptr_util.h" -#include "test/function_video_decoder_factory.h" -#include "test/function_video_encoder_factory.h" -#include "test/gtest.h" - -namespace webrtc { -namespace test { - -namespace { -std::unique_ptr CreateSpecificSimulcastTestFixture() { - std::unique_ptr encoder_factory = - rtc::MakeUnique( - []() { return H264Encoder::Create(cricket::VideoCodec("H264")); }); - std::unique_ptr decoder_factory = - rtc::MakeUnique( - []() { return H264Decoder::Create(); }); - return CreateSimulcastTestFixture(std::move(encoder_factory), - std::move(decoder_factory), - SdpVideoFormat("H264")); -} -} // namespace - -TEST(TestH264Simulcast, TestKeyFrameRequestsOnAllStreams) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestKeyFrameRequestsOnAllStreams(); -} - -TEST(TestH264Simulcast, TestPaddingAllStreams) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestPaddingAllStreams(); -} - -TEST(TestH264Simulcast, TestPaddingTwoStreams) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestPaddingTwoStreams(); -} - -TEST(TestH264Simulcast, TestPaddingTwoStreamsOneMaxedOut) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestPaddingTwoStreamsOneMaxedOut(); -} - -TEST(TestH264Simulcast, TestPaddingOneStream) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestPaddingOneStream(); -} - -TEST(TestH264Simulcast, TestPaddingOneStreamTwoMaxedOut) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestPaddingOneStreamTwoMaxedOut(); -} - -TEST(TestH264Simulcast, TestSendAllStreams) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestSendAllStreams(); -} - -TEST(TestH264Simulcast, TestDisablingStreams) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestDisablingStreams(); -} - -TEST(TestH264Simulcast, TestActiveStreams) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestActiveStreams(); -} - -TEST(TestH264Simulcast, TestSwitchingToOneStream) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestSwitchingToOneStream(); -} - -TEST(TestH264Simulcast, TestSwitchingToOneOddStream) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestSwitchingToOneOddStream(); -} - -TEST(TestH264Simulcast, TestStrideEncodeDecode) { - auto fixture = CreateSpecificSimulcastTestFixture(); - fixture->TestStrideEncodeDecode(); -} - -} // namespace test -} // namespace webrtc diff --git a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc index abeef3100b..394ee14c4e 100644 --- a/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc +++ b/modules/video_coding/codecs/test/videocodec_test_fixture_impl.cc @@ -52,7 +52,7 @@ void ConfigureSimulcast(VideoCodec* codec_settings) { const std::vector streams = cricket::GetSimulcastConfig( codec_settings->numberOfSimulcastStreams, codec_settings->width, codec_settings->height, kMaxBitrateBps, kBitratePriority, kMaxQp, - kMaxFramerateFps, /* is_screenshare = */ false, true); + kMaxFramerateFps, /* is_screenshare = */ false); for (size_t i = 0; i < streams.size(); ++i) { SimulcastStream* ss = &codec_settings->simulcastStream[i]; diff --git a/modules/video_coding/codecs/test/videoprocessor.cc b/modules/video_coding/codecs/test/videoprocessor.cc index 98ba07f764..a4b8edbe6b 100644 --- a/modules/video_coding/codecs/test/videoprocessor.cc +++ b/modules/video_coding/codecs/test/videoprocessor.cc @@ -19,10 +19,10 @@ #include "common_video/h264/h264_common.h" #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include "modules/video_coding/include/video_codec_initializer.h" #include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/utility/default_video_bitrate_allocator.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/checks.h" #include "rtc_base/timeutils.h" #include "test/gtest.h" diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers.cc b/modules/video_coding/codecs/vp8/default_temporal_layers.cc index 986c5ad9f0..eea693370e 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers.cc +++ b/modules/video_coding/codecs/vp8/default_temporal_layers.cc @@ -18,6 +18,7 @@ #include #include "modules/include/module_common_types.h" +#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" diff --git a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc index 2b69745e3e..f61c302f17 100644 --- a/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc +++ b/modules/video_coding/codecs/vp8/default_temporal_layers_unittest.cc @@ -10,8 +10,8 @@ #include "modules/video_coding/codecs/vp8/default_temporal_layers.h" #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include "modules/video_coding/include/video_codec_interface.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "test/field_trial.h" #include "test/gtest.h" diff --git a/modules/video_coding/codecs/vp8/include/vp8_common_types.h b/modules/video_coding/codecs/vp8/include/vp8_common_types.h new file mode 100644 index 0000000000..dff70ac332 --- /dev/null +++ b/modules/video_coding/codecs/vp8/include/vp8_common_types.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_ +#define MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_ + +#include "common_types.h" // NOLINT(build/include) + +namespace webrtc { + +// Ratio allocation between temporal streams: +// Values as required for the VP8 codec (accumulating). +static const float + kVp8LayerRateAlloction[kMaxSimulcastStreams][kMaxTemporalStreams] = { + {1.0f, 1.0f, 1.0f, 1.0f}, // 1 layer + {0.6f, 1.0f, 1.0f, 1.0f}, // 2 layers {60%, 40%} + {0.4f, 0.6f, 1.0f, 1.0f}, // 3 layers {40%, 20%, 40%} + {0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%} +}; + +} // namespace webrtc +#endif // MODULES_VIDEO_CODING_CODECS_VP8_INCLUDE_VP8_COMMON_TYPES_H_ diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index c34502714d..522c98971f 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -1,5 +1,5 @@ /* - * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -14,8 +14,7 @@ #include "common_video/libyuv/include/webrtc_libyuv.h" #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" -#include "modules/video_coding/utility/simulcast_utility.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include "rtc_base/checks.h" #include "rtc_base/ptr_util.h" #include "rtc_base/timeutils.h" @@ -48,7 +47,7 @@ enum denoiserState { }; // Greatest common divisior -static int GCD(int a, int b) { +int GCD(int a, int b) { int c = a % b; while (c != 0) { a = b; @@ -58,6 +57,53 @@ static int GCD(int a, int b) { return b; } +uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec) { + uint32_t bitrate_sum = 0; + for (int i = 0; i < streams; ++i) { + bitrate_sum += codec.simulcastStream[i].maxBitrate; + } + return bitrate_sum; +} + +int NumberOfStreams(const VideoCodec& codec) { + int streams = + codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams; + uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec); + if (simulcast_max_bitrate == 0) { + streams = 1; + } + return streams; +} + +bool ValidSimulcastResolutions(const VideoCodec& codec, int num_streams) { + if (codec.width != codec.simulcastStream[num_streams - 1].width || + codec.height != codec.simulcastStream[num_streams - 1].height) { + return false; + } + for (int i = 0; i < num_streams; ++i) { + if (codec.width * codec.simulcastStream[i].height != + codec.height * codec.simulcastStream[i].width) { + return false; + } + } + for (int i = 1; i < num_streams; ++i) { + if (codec.simulcastStream[i].width != + codec.simulcastStream[i - 1].width * 2) { + return false; + } + } + return true; +} + +bool ValidSimulcastTemporalLayers(const VideoCodec& codec, int num_streams) { + for (int i = 0; i < num_streams - 1; ++i) { + if (codec.simulcastStream[i].numberOfTemporalLayers != + codec.simulcastStream[i + 1].numberOfTemporalLayers) + return false; + } + return true; +} + bool GetGfBoostPercentageFromFieldTrialGroup(int* boost_percentage) { std::string group = webrtc::field_trial::FindFullName(kVp8GfBoostFieldTrial); if (group.empty()) @@ -323,13 +369,12 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, return retVal; } - int number_of_streams = SimulcastUtility::NumberOfSimulcastStreams(*inst); + int number_of_streams = NumberOfStreams(*inst); bool doing_simulcast = (number_of_streams > 1); - if (doing_simulcast && (!SimulcastUtility::ValidSimulcastResolutions( - *inst, number_of_streams) || - !SimulcastUtility::ValidSimulcastTemporalLayers( - *inst, number_of_streams))) { + if (doing_simulcast && + (!ValidSimulcastResolutions(*inst, number_of_streams) || + !ValidSimulcastTemporalLayers(*inst, number_of_streams))) { return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED; } diff --git a/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc b/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc index d8c0dbbb81..9ecb9cf3ba 100644 --- a/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc +++ b/modules/video_coding/codecs/vp8/libvpx_vp8_simulcast_test.cc @@ -30,8 +30,7 @@ std::unique_ptr CreateSpecificSimulcastTestFixture() { rtc::MakeUnique( []() { return VP8Decoder::Create(); }); return CreateSimulcastTestFixture(std::move(encoder_factory), - std::move(decoder_factory), - SdpVideoFormat("VP8")); + std::move(decoder_factory)); } } // namespace diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.cc b/modules/video_coding/codecs/vp8/screenshare_layers.cc index cd2449052b..f7f1019820 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers.cc +++ b/modules/video_coding/codecs/vp8/screenshare_layers.cc @@ -37,8 +37,7 @@ constexpr int ScreenshareLayers::kMaxNumTemporalLayers; // been exceeded. This prevents needless keyframe requests. const int ScreenshareLayers::kMaxFrameIntervalMs = 2750; -ScreenshareLayers::ScreenshareLayers(int num_temporal_layers, - Clock* clock) +ScreenshareLayers::ScreenshareLayers(int num_temporal_layers, Clock* clock) : clock_(clock), number_of_temporal_layers_( std::min(kMaxNumTemporalLayers, num_temporal_layers)), diff --git a/modules/video_coding/codecs/vp8/screenshare_layers.h b/modules/video_coding/codecs/vp8/screenshare_layers.h index c1b5fa79a7..5185b45d8f 100644 --- a/modules/video_coding/codecs/vp8/screenshare_layers.h +++ b/modules/video_coding/codecs/vp8/screenshare_layers.h @@ -28,8 +28,7 @@ class ScreenshareLayers : public TemporalLayers { static const double kAcceptableTargetOvershoot; static const int kMaxFrameIntervalMs; - ScreenshareLayers(int num_temporal_layers, - Clock* clock); + ScreenshareLayers(int num_temporal_layers, Clock* clock); virtual ~ScreenshareLayers(); // Returns the recommended VP8 encode flags needed. May refresh the decoder diff --git a/modules/video_coding/utility/simulcast_rate_allocator.cc b/modules/video_coding/codecs/vp8/simulcast_rate_allocator.cc similarity index 95% rename from modules/video_coding/utility/simulcast_rate_allocator.cc rename to modules/video_coding/codecs/vp8/simulcast_rate_allocator.cc index b33b1e8568..f8cfe88b07 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator.cc +++ b/modules/video_coding/codecs/vp8/simulcast_rate_allocator.cc @@ -8,14 +8,14 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/utility/simulcast_rate_allocator.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include #include #include #include -#include "common_types.h" // NOLINT(build/include) +#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h" #include "rtc_base/checks.h" namespace webrtc { @@ -190,7 +190,7 @@ std::vector SimulcastRateAllocator::DefaultTemporalLayerAllocation( std::vector bitrates; for (size_t i = 0; i < num_temporal_layers; ++i) { float layer_bitrate = - bitrate_kbps * kLayerRateAllocation[num_temporal_layers - 1][i]; + bitrate_kbps * kVp8LayerRateAlloction[num_temporal_layers - 1][i]; bitrates.push_back(static_cast(layer_bitrate + 0.5)); } @@ -235,10 +235,9 @@ const VideoCodec& webrtc::SimulcastRateAllocator::GetCodec() const { int SimulcastRateAllocator::NumTemporalStreams(size_t simulcast_id) const { return std::max( - 1, - codec_.codecType == kVideoCodecVP8 && codec_.numberOfSimulcastStreams == 0 - ? codec_.VP8().numberOfTemporalLayers - : codec_.simulcastStream[simulcast_id].numberOfTemporalLayers); + 1, codec_.numberOfSimulcastStreams == 0 + ? codec_.VP8().numberOfTemporalLayers + : codec_.simulcastStream[simulcast_id].numberOfTemporalLayers); } } // namespace webrtc diff --git a/modules/video_coding/utility/simulcast_rate_allocator.h b/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h similarity index 75% rename from modules/video_coding/utility/simulcast_rate_allocator.h rename to modules/video_coding/codecs/vp8/simulcast_rate_allocator.h index daa1523e00..b958781289 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator.h +++ b/modules/video_coding/codecs/vp8/simulcast_rate_allocator.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_RATE_ALLOCATOR_H_ -#define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_RATE_ALLOCATOR_H_ +#ifndef MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_ +#define MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_ #include @@ -20,20 +20,11 @@ #include "api/video_codecs/video_encoder.h" #include "common_types.h" // NOLINT(build/include) #include "common_video/include/video_bitrate_allocator.h" +#include "modules/video_coding/codecs/vp8/temporal_layers.h" #include "rtc_base/constructormagic.h" namespace webrtc { -// Ratio allocation between temporal streams: -// Values as required for the VP8 codec (accumulating). -static const float - kLayerRateAllocation[kMaxSimulcastStreams][kMaxTemporalStreams] = { - {1.0f, 1.0f, 1.0f, 1.0f}, // 1 layer - {0.6f, 1.0f, 1.0f, 1.0f}, // 2 layers {60%, 40%} - {0.4f, 0.6f, 1.0f, 1.0f}, // 3 layers {40%, 20%, 40%} - {0.25f, 0.4f, 0.6f, 1.0f} // 4 layers {25%, 15%, 20%, 40%} -}; - class SimulcastRateAllocator : public VideoBitrateAllocator { public: explicit SimulcastRateAllocator(const VideoCodec& codec); @@ -67,4 +58,4 @@ class SimulcastRateAllocator : public VideoBitrateAllocator { } // namespace webrtc -#endif // MODULES_VIDEO_CODING_UTILITY_SIMULCAST_RATE_ALLOCATOR_H_ +#endif // MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_RATE_ALLOCATOR_H_ diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.cc b/modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.cc similarity index 91% rename from modules/video_coding/utility/simulcast_test_fixture_impl.cc rename to modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.cc index 2656a584fb..5a4712f1d1 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.cc +++ b/modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.cc @@ -8,7 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/utility/simulcast_test_fixture_impl.h" +#include "modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.h" #include #include @@ -18,6 +18,8 @@ #include "api/video_codecs/sdp_video_format.h" #include "common_video/include/video_frame.h" #include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/temporal_layers.h" #include "modules/video_coding/include/video_coding_defines.h" #include "rtc_base/checks.h" #include "test/gtest.h" @@ -42,7 +44,6 @@ const int kMaxBitrates[kNumberOfSimulcastStreams] = {150, 600, 1200}; const int kMinBitrates[kNumberOfSimulcastStreams] = {50, 150, 600}; const int kTargetBitrates[kNumberOfSimulcastStreams] = {100, 450, 1000}; const int kDefaultTemporalLayerProfile[3] = {3, 3, 3}; -const int kNoTemporalLayerProfile[3] = {0, 0, 0}; template void SetExpectedValues3(T value0, T value1, T value2, T* expected_values) { @@ -60,15 +61,15 @@ enum PlaneType { } // namespace -class SimulcastTestFixtureImpl::TestEncodedImageCallback +class SimulcastTestFixtureImpl::Vp8TestEncodedImageCallback : public EncodedImageCallback { public: - TestEncodedImageCallback() { + Vp8TestEncodedImageCallback() : picture_id_(-1) { memset(temporal_layer_, -1, sizeof(temporal_layer_)); memset(layer_sync_, false, sizeof(layer_sync_)); } - ~TestEncodedImageCallback() { + ~Vp8TestEncodedImageCallback() { delete[] encoded_key_frame_._buffer; delete[] encoded_frame_._buffer; } @@ -76,15 +77,8 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback virtual Result OnEncodedImage(const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info, const RTPFragmentationHeader* fragmentation) { - uint16_t simulcast_idx = 0; - bool is_vp8 = (codec_specific_info->codecType == kVideoCodecVP8); - if (is_vp8) { - simulcast_idx = codec_specific_info->codecSpecific.VP8.simulcastIdx; - } else { - simulcast_idx = codec_specific_info->codecSpecific.H264.simulcast_idx; - } // Only store the base layer. - if (simulcast_idx) { + if (codec_specific_info->codecSpecific.VP8.simulcastIdx == 0) { if (encoded_image._frameType == kVideoFrameKey) { delete[] encoded_key_frame_._buffer; encoded_key_frame_._buffer = new uint8_t[encoded_image._size]; @@ -103,18 +97,17 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback encoded_image._length); } } - if (is_vp8) { - layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = - codec_specific_info->codecSpecific.VP8.layerSync; - temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = - codec_specific_info->codecSpecific.VP8.temporalIdx; - } + layer_sync_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = + codec_specific_info->codecSpecific.VP8.layerSync; + temporal_layer_[codec_specific_info->codecSpecific.VP8.simulcastIdx] = + codec_specific_info->codecSpecific.VP8.temporalIdx; return Result(Result::OK, encoded_image._timeStamp); } - // This method only makes sense for VP8. - void GetLastEncodedFrameInfo(int* temporal_layer, + void GetLastEncodedFrameInfo(int* picture_id, + int* temporal_layer, bool* layer_sync, int stream) { + *picture_id = picture_id_; *temporal_layer = temporal_layer_[stream]; *layer_sync = layer_sync_[stream]; } @@ -128,14 +121,15 @@ class SimulcastTestFixtureImpl::TestEncodedImageCallback private: EncodedImage encoded_key_frame_; EncodedImage encoded_frame_; + int picture_id_; int temporal_layer_[kNumberOfSimulcastStreams]; bool layer_sync_[kNumberOfSimulcastStreams]; }; -class SimulcastTestFixtureImpl::TestDecodedImageCallback +class SimulcastTestFixtureImpl::Vp8TestDecodedImageCallback : public DecodedImageCallback { public: - TestDecodedImageCallback() : decoded_frames_(0) {} + Vp8TestDecodedImageCallback() : decoded_frames_(0) {} int32_t Decoded(VideoFrame& decoded_image) override { rtc::scoped_refptr i420_buffer = decoded_image.video_frame_buffer()->ToI420(); @@ -204,9 +198,7 @@ void ConfigureStream(int width, stream->maxBitrate = max_bitrate; stream->minBitrate = min_bitrate; stream->targetBitrate = target_bitrate; - if (num_temporal_layers >= 0) { - stream->numberOfTemporalLayers = num_temporal_layers; - } + stream->numberOfTemporalLayers = num_temporal_layers; stream->qpMax = 45; stream->active = true; } @@ -215,11 +207,10 @@ void ConfigureStream(int width, void SimulcastTestFixtureImpl::DefaultSettings( VideoCodec* settings, - const int* temporal_layer_profile, - VideoCodecType codec_type) { + const int* temporal_layer_profile) { RTC_CHECK(settings); memset(settings, 0, sizeof(VideoCodec)); - settings->codecType = codec_type; + settings->codecType = kVideoCodecVP8; // 96 to 127 dynamic payload types for video codecs settings->plType = 120; settings->startBitrate = 300; @@ -242,26 +233,18 @@ void SimulcastTestFixtureImpl::DefaultSettings( ConfigureStream(kDefaultWidth, kDefaultHeight, kMaxBitrates[2], kMinBitrates[2], kTargetBitrates[2], &settings->simulcastStream[2], temporal_layer_profile[2]); - if (codec_type == kVideoCodecVP8) { - settings->VP8()->denoisingOn = true; - settings->VP8()->automaticResizeOn = false; - settings->VP8()->frameDroppingOn = true; - settings->VP8()->keyFrameInterval = 3000; - } else { - settings->H264()->frameDroppingOn = true; - settings->H264()->keyFrameInterval = 3000; - } + settings->VP8()->denoisingOn = true; + settings->VP8()->automaticResizeOn = false; + settings->VP8()->frameDroppingOn = true; + settings->VP8()->keyFrameInterval = 3000; } SimulcastTestFixtureImpl::SimulcastTestFixtureImpl( std::unique_ptr encoder_factory, - std::unique_ptr decoder_factory, - SdpVideoFormat video_format) - : codec_type_(PayloadStringToCodecType(video_format.name)) { - encoder_ = encoder_factory->CreateVideoEncoder(video_format); - decoder_ = decoder_factory->CreateVideoDecoder(video_format); - SetUpCodec(codec_type_ == kVideoCodecVP8 ? kDefaultTemporalLayerProfile - : kNoTemporalLayerProfile); + std::unique_ptr decoder_factory) { + encoder_ = encoder_factory->CreateVideoEncoder(SdpVideoFormat("VP8")); + decoder_ = decoder_factory->CreateVideoDecoder(SdpVideoFormat("VP8")); + SetUpCodec(kDefaultTemporalLayerProfile); } SimulcastTestFixtureImpl::~SimulcastTestFixtureImpl() { @@ -272,7 +255,7 @@ SimulcastTestFixtureImpl::~SimulcastTestFixtureImpl() { void SimulcastTestFixtureImpl::SetUpCodec(const int* temporal_layer_profile) { encoder_->RegisterEncodeCompleteCallback(&encoder_callback_); decoder_->RegisterDecodeCompleteCallback(&decoder_callback_); - DefaultSettings(&settings_, temporal_layer_profile, codec_type_); + DefaultSettings(&settings_, temporal_layer_profile); SetUpRateAllocator(); EXPECT_EQ(0, encoder_->InitEncode(&settings_, 1, 1200)); EXPECT_EQ(0, decoder_->InitDecode(&settings_, 1)); @@ -378,14 +361,16 @@ void SimulcastTestFixtureImpl::ExpectStreams(FrameType frame_type, } void SimulcastTestFixtureImpl::VerifyTemporalIdxAndSyncForAllSpatialLayers( - TestEncodedImageCallback* encoder_callback, + Vp8TestEncodedImageCallback* encoder_callback, const int* expected_temporal_idx, const bool* expected_layer_sync, int num_spatial_layers) { + int picture_id = -1; int temporal_layer = -1; bool layer_sync = false; for (int i = 0; i < num_spatial_layers; i++) { - encoder_callback->GetLastEncodedFrameInfo(&temporal_layer, &layer_sync, i); + encoder_callback->GetLastEncodedFrameInfo(&picture_id, &temporal_layer, + &layer_sync, i); EXPECT_EQ(expected_temporal_idx[i], temporal_layer); EXPECT_EQ(expected_layer_sync[i], layer_sync); } @@ -573,15 +558,9 @@ void SimulcastTestFixtureImpl::TestActiveStreams() { } void SimulcastTestFixtureImpl::SwitchingToOneStream(int width, int height) { - const int* temporal_layer_profile = nullptr; // Disable all streams except the last and set the bitrate of the last to // 100 kbps. This verifies the way GTP switches to screenshare mode. - if (codec_type_ == kVideoCodecVP8) { - settings_.VP8()->numberOfTemporalLayers = 1; - temporal_layer_profile = kDefaultTemporalLayerProfile; - } else { - temporal_layer_profile = kNoTemporalLayerProfile; - } + settings_.VP8()->numberOfTemporalLayers = 1; settings_.maxBitrate = 100; settings_.startBitrate = 100; settings_.width = width; @@ -626,7 +605,7 @@ void SimulcastTestFixtureImpl::SwitchingToOneStream(int width, int height) { EXPECT_EQ(0, encoder_->Encode(*input_frame_, NULL, &frame_types)); // Switch back. - DefaultSettings(&settings_, temporal_layer_profile, codec_type_); + DefaultSettings(&settings_, kDefaultTemporalLayerProfile); // Start at the lowest bitrate for enabling base stream. settings_.startBitrate = kMinBitrates[0]; SetUpRateAllocator(); @@ -657,8 +636,7 @@ void SimulcastTestFixtureImpl::TestSwitchingToOneSmallStream() { // 3-3-3 pattern: 3 temporal layers for all spatial streams, so same // temporal_layer id and layer_sync is expected for all streams. void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { - EXPECT_EQ(codec_type_, kVideoCodecVP8); - TestEncodedImageCallback encoder_callback; + Vp8TestEncodedImageCallback encoder_callback; encoder_->RegisterEncodeCompleteCallback(&encoder_callback); SetRates(kMaxBitrates[2], 30); // To get all three streams. @@ -725,10 +703,9 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers333PatternEncoder() { // Since CodecSpecificInfoVP8.temporalIdx is uint8_t, this will wrap to 255. // TODO(marpan): Although this seems safe for now, we should fix this. void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { - EXPECT_EQ(codec_type_, kVideoCodecVP8); int temporal_layer_profile[3] = {3, 2, 1}; SetUpCodec(temporal_layer_profile); - TestEncodedImageCallback encoder_callback; + Vp8TestEncodedImageCallback encoder_callback; encoder_->RegisterEncodeCompleteCallback(&encoder_callback); SetRates(kMaxBitrates[2], 30); // To get all three streams. @@ -784,8 +761,8 @@ void SimulcastTestFixtureImpl::TestSpatioTemporalLayers321PatternEncoder() { } void SimulcastTestFixtureImpl::TestStrideEncodeDecode() { - TestEncodedImageCallback encoder_callback; - TestDecodedImageCallback decoder_callback; + Vp8TestEncodedImageCallback encoder_callback; + Vp8TestDecodedImageCallback decoder_callback; encoder_->RegisterEncodeCompleteCallback(&encoder_callback); decoder_->RegisterDecodeCompleteCallback(&decoder_callback); diff --git a/modules/video_coding/utility/simulcast_test_fixture_impl.h b/modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.h similarity index 82% rename from modules/video_coding/utility/simulcast_test_fixture_impl.h rename to modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.h index 6634a69ad1..1fcf48e82e 100644 --- a/modules/video_coding/utility/simulcast_test_fixture_impl.h +++ b/modules/video_coding/codecs/vp8/simulcast_test_fixture_impl.h @@ -8,8 +8,8 @@ * be found in the AUTHORS file in the root of the source tree. */ -#ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ -#define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ +#ifndef MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_FIXTURE_IMPL_H_ +#define MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_FIXTURE_IMPL_H_ #include #include @@ -20,7 +20,7 @@ #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" #include "common_types.h" // NOLINT(build/include) -#include "modules/video_coding/utility/simulcast_rate_allocator.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include "modules/video_coding/include/mock/mock_video_codec_interface.h" namespace webrtc { @@ -30,8 +30,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { public: SimulcastTestFixtureImpl( std::unique_ptr encoder_factory, - std::unique_ptr decoder_factory, - SdpVideoFormat video_format); + std::unique_ptr decoder_factory); ~SimulcastTestFixtureImpl() final; // Implements SimulcastTestFixture. @@ -52,12 +51,11 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { void TestStrideEncodeDecode() override; static void DefaultSettings(VideoCodec* settings, - const int* temporal_layer_profile, - VideoCodecType codec_type); + const int* temporal_layer_profile); private: - class TestEncodedImageCallback; - class TestDecodedImageCallback; + class Vp8TestEncodedImageCallback; + class Vp8TestDecodedImageCallback; void SetUpCodec(const int* temporal_layer_profile); void SetUpRateAllocator(); @@ -68,7 +66,7 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { const std::vector expected_streams_active); void ExpectStreams(FrameType frame_type, int expected_video_streams); void VerifyTemporalIdxAndSyncForAllSpatialLayers( - TestEncodedImageCallback* encoder_callback, + Vp8TestEncodedImageCallback* encoder_callback, const int* expected_temporal_idx, const bool* expected_layer_sync, int num_spatial_layers); @@ -82,10 +80,9 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { rtc::scoped_refptr input_buffer_; std::unique_ptr input_frame_; std::unique_ptr rate_allocator_; - VideoCodecType codec_type_; }; } // namespace test } // namespace webrtc -#endif // MODULES_VIDEO_CODING_UTILITY_SIMULCAST_TEST_FIXTURE_IMPL_H_ +#endif // MODULES_VIDEO_CODING_CODECS_VP8_SIMULCAST_TEST_FIXTURE_IMPL_H_ diff --git a/modules/video_coding/codecs/vp8/temporal_layers.cc b/modules/video_coding/codecs/vp8/temporal_layers.cc index 9ee5ce38ee..67401cd360 100644 --- a/modules/video_coding/codecs/vp8/temporal_layers.cc +++ b/modules/video_coding/codecs/vp8/temporal_layers.cc @@ -16,6 +16,7 @@ #include "modules/include/module_common_types.h" #include "modules/video_coding/codecs/vp8/default_temporal_layers.h" +#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h" #include "modules/video_coding/codecs/vp8/screenshare_layers.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" diff --git a/modules/video_coding/include/video_codec_interface.h b/modules/video_coding/include/video_codec_interface.h index d5c4223fa6..1b5e155e47 100644 --- a/modules/video_coding/include/video_codec_interface.h +++ b/modules/video_coding/include/video_codec_interface.h @@ -70,7 +70,6 @@ struct CodecSpecificInfoGeneric { struct CodecSpecificInfoH264 { H264PacketizationMode packetization_mode; - uint8_t simulcast_idx; }; union CodecSpecificInfoUnion { @@ -84,9 +83,7 @@ union CodecSpecificInfoUnion { // must be fitted with a copy-constructor. This is because it is copied // in the copy-constructor of VCMEncodedFrame. struct CodecSpecificInfo { - CodecSpecificInfo() : codecType(kVideoCodecUnknown), codec_name(nullptr) { - memset(&codecSpecific, 0, sizeof(codecSpecific)); - } + CodecSpecificInfo() : codecType(kVideoCodecUnknown), codec_name(nullptr) {} VideoCodecType codecType; const char* codec_name; CodecSpecificInfoUnion codecSpecific; diff --git a/modules/video_coding/media_opt_util.cc b/modules/video_coding/media_opt_util.cc index 776dd9d29d..aea35b04dd 100644 --- a/modules/video_coding/media_opt_util.cc +++ b/modules/video_coding/media_opt_util.cc @@ -18,10 +18,10 @@ #include #include "modules/include/module_common_types.h" +#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h" #include "modules/video_coding/fec_rate_table.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/nack_fec_tables.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" namespace webrtc { // Max value of loss rates in off-line model @@ -400,7 +400,8 @@ bool VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters) { int VCMFecMethod::BitsPerFrame(const VCMProtectionParameters* parameters) { // When temporal layers are available FEC will only be applied on the base // layer. - const float bitRateRatio = kLayerRateAllocation[parameters->numLayers - 1][0]; + const float bitRateRatio = + kVp8LayerRateAlloction[parameters->numLayers - 1][0]; float frameRateRatio = powf(1 / 2.0, parameters->numLayers - 1); float bitRate = parameters->bitRate * bitRateRatio; float frameRate = parameters->frameRate * frameRateRatio; diff --git a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc index 1fec9932bd..345fdcb943 100644 --- a/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc +++ b/modules/video_coding/utility/simulcast_rate_allocator_unittest.cc @@ -8,15 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "modules/video_coding/utility/simulcast_rate_allocator.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include #include #include #include -#include "modules/video_coding/codecs/vp8/temporal_layers.h" - #include "test/gmock.h" #include "test/gtest.h" diff --git a/modules/video_coding/utility/simulcast_utility.cc b/modules/video_coding/utility/simulcast_utility.cc deleted file mode 100644 index 60cf0627da..0000000000 --- a/modules/video_coding/utility/simulcast_utility.cc +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/utility/simulcast_utility.h" - -namespace webrtc { - -uint32_t SimulcastUtility::SumStreamMaxBitrate(int streams, - const VideoCodec& codec) { - uint32_t bitrate_sum = 0; - for (int i = 0; i < streams; ++i) { - bitrate_sum += codec.simulcastStream[i].maxBitrate; - } - return bitrate_sum; -} - -int SimulcastUtility::NumberOfSimulcastStreams(const VideoCodec& codec) { - int streams = - codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams; - uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec); - if (simulcast_max_bitrate == 0) { - streams = 1; - } - return streams; -} - -bool SimulcastUtility::ValidSimulcastResolutions(const VideoCodec& codec, - int num_streams) { - if (codec.width != codec.simulcastStream[num_streams - 1].width || - codec.height != codec.simulcastStream[num_streams - 1].height) { - return false; - } - for (int i = 0; i < num_streams; ++i) { - if (codec.width * codec.simulcastStream[i].height != - codec.height * codec.simulcastStream[i].width) { - return false; - } - } - for (int i = 1; i < num_streams; ++i) { - if (codec.simulcastStream[i].width != - codec.simulcastStream[i - 1].width * 2) { - return false; - } - } - return true; -} - -bool SimulcastUtility::ValidSimulcastTemporalLayers(const VideoCodec& codec, - int num_streams) { - for (int i = 0; i < num_streams - 1; ++i) { - if (codec.simulcastStream[i].numberOfTemporalLayers != - codec.simulcastStream[i + 1].numberOfTemporalLayers) - return false; - } - return true; -} - -} // namespace webrtc diff --git a/modules/video_coding/utility/simulcast_utility.h b/modules/video_coding/utility/simulcast_utility.h deleted file mode 100644 index cf690f29a8..0000000000 --- a/modules/video_coding/utility/simulcast_utility.h +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_UTILITY_SIMULCAST_UTILITY_H_ -#define MODULES_VIDEO_CODING_UTILITY_SIMULCAST_UTILITY_H_ - -#include "api/video_codecs/video_encoder.h" - -namespace webrtc { - -class SimulcastUtility { - public: - static uint32_t SumStreamMaxBitrate(int streams, const VideoCodec& codec); - static int NumberOfSimulcastStreams(const VideoCodec& codec); - static bool ValidSimulcastResolutions(const VideoCodec& codec, - int num_streams); - static bool ValidSimulcastTemporalLayers(const VideoCodec& codec, - int num_streams); -}; - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_UTILITY_SIMULCAST_UTILITY_H_ diff --git a/modules/video_coding/video_codec_initializer.cc b/modules/video_coding/video_codec_initializer.cc index 04554c146f..7ef6cfe24a 100644 --- a/modules/video_coding/video_codec_initializer.cc +++ b/modules/video_coding/video_codec_initializer.cc @@ -13,13 +13,14 @@ #include "api/video_codecs/video_encoder.h" #include "common_types.h" // NOLINT(build/include) #include "common_video/include/video_bitrate_allocator.h" +#include "modules/video_coding/codecs/vp8/screenshare_layers.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" +#include "modules/video_coding/codecs/vp8/temporal_layers.h" #include "modules/video_coding/codecs/vp9/svc_config.h" #include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/utility/default_video_bitrate_allocator.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "rtc_base/logging.h" -#include "rtc_base/system/fallthrough.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -52,8 +53,7 @@ VideoCodecInitializer::CreateBitrateAllocator(const VideoCodec& codec) { switch (codec.codecType) { case kVideoCodecVP8: - RTC_FALLTHROUGH(); - case kVideoCodecH264: + // Set up default VP8 temporal layer factory, if not provided. rate_allocator.reset(new SimulcastRateAllocator(codec)); break; case kVideoCodecVP9: diff --git a/modules/video_coding/video_coding_impl.cc b/modules/video_coding/video_coding_impl.cc index aa9a0d5bd5..1127b0f340 100644 --- a/modules/video_coding/video_coding_impl.cc +++ b/modules/video_coding/video_coding_impl.cc @@ -16,6 +16,7 @@ #include "common_types.h" // NOLINT(build/include) #include "common_video/include/video_bitrate_allocator.h" #include "common_video/libyuv/include/webrtc_libyuv.h" +#include "modules/video_coding/codecs/vp8/temporal_layers.h" #include "modules/video_coding/encoded_frame.h" #include "modules/video_coding/include/video_codec_initializer.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -104,16 +105,17 @@ class VideoCodingModuleImpl : public VideoCodingModule { int32_t RegisterSendCodec(const VideoCodec* sendCodec, uint32_t numberOfCores, uint32_t maxPayloadSize) override { - if (sendCodec != nullptr && ((sendCodec->codecType == kVideoCodecVP8) || - (sendCodec->codecType == kVideoCodecH264))) { - // Set up a rate allocator and temporal layers factory for this codec + if (sendCodec != nullptr && sendCodec->codecType == kVideoCodecVP8) { + // Set up a rate allocator and temporal layers factory for this vp8 // instance. The codec impl will have a raw pointer to the TL factory, // and will call it when initializing. Since this can happen // asynchronously keep the instance alive until destruction or until a // new send codec is registered. - VideoCodec codec = *sendCodec; - rate_allocator_ = VideoCodecInitializer::CreateBitrateAllocator(codec); - return sender_.RegisterSendCodec(&codec, numberOfCores, maxPayloadSize); + VideoCodec vp8_codec = *sendCodec; + rate_allocator_ = + VideoCodecInitializer::CreateBitrateAllocator(vp8_codec); + return sender_.RegisterSendCodec(&vp8_codec, numberOfCores, + maxPayloadSize); } return sender_.RegisterSendCodec(sendCodec, numberOfCores, maxPayloadSize); } diff --git a/modules/video_coding/video_sender.cc b/modules/video_coding/video_sender.cc index ec24a97d1e..f10822d72b 100644 --- a/modules/video_coding/video_sender.cc +++ b/modules/video_coding/video_sender.cc @@ -8,7 +8,6 @@ * be found in the AUTHORS file in the root of the source tree. */ - #include // std::max #include "common_types.h" // NOLINT(build/include) @@ -137,8 +136,7 @@ void VideoSender::RegisterExternalEncoder(VideoEncoder* externalEncoder, } return; } - _codecDataBase.RegisterExternalEncoder(externalEncoder, - internalSource); + _codecDataBase.RegisterExternalEncoder(externalEncoder, internalSource); } EncoderParameters VideoSender::UpdateEncoderParameters( @@ -291,8 +289,7 @@ int32_t VideoSender::AddVideoFrame(const VideoFrame& videoFrame, RTC_LOG(LS_ERROR) << "Frame conversion failed, dropping frame."; return VCM_PARAMETER_ERROR; } - converted_frame = VideoFrame(converted_buffer, - converted_frame.timestamp(), + converted_frame = VideoFrame(converted_buffer, converted_frame.timestamp(), converted_frame.render_time_ms(), converted_frame.rotation()); } diff --git a/modules/video_coding/video_sender_unittest.cc b/modules/video_coding/video_sender_unittest.cc index 7d729664bc..7321a08464 100644 --- a/modules/video_coding/video_sender_unittest.cc +++ b/modules/video_coding/video_sender_unittest.cc @@ -13,12 +13,13 @@ #include "api/video/i420_buffer.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h" +#include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h" #include "modules/video_coding/codecs/vp8/temporal_layers.h" #include "modules/video_coding/include/mock/mock_vcm_callbacks.h" #include "modules/video_coding/include/mock/mock_video_codec_interface.h" #include "modules/video_coding/include/video_coding.h" #include "modules/video_coding/utility/default_video_bitrate_allocator.h" -#include "modules/video_coding/utility/simulcast_rate_allocator.h" #include "modules/video_coding/video_coding_impl.h" #include "system_wrappers/include/clock.h" #include "test/frame_generator.h" @@ -471,9 +472,9 @@ class TestVideoSenderWithVp8 : public TestVideoSender { #define MAYBE_FixedTemporalLayersStrategy FixedTemporalLayersStrategy #endif TEST_F(TestVideoSenderWithVp8, MAYBE_FixedTemporalLayersStrategy) { - const int low_b = codec_bitrate_kbps_ * kLayerRateAllocation[2][0]; - const int mid_b = codec_bitrate_kbps_ * kLayerRateAllocation[2][1]; - const int high_b = codec_bitrate_kbps_ * kLayerRateAllocation[2][2]; + const int low_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][0]; + const int mid_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][1]; + const int high_b = codec_bitrate_kbps_ * kVp8LayerRateAlloction[2][2]; { Vp8StreamInfo expected = {{7.5, 15.0, 30.0}, {low_b, mid_b, high_b}}; EXPECT_THAT(SimulateWithFramerate(30.0), MatchesVp8StreamInfo(expected)); diff --git a/test/BUILD.gn b/test/BUILD.gn index 24b373514d..00304a8467 100644 --- a/test/BUILD.gn +++ b/test/BUILD.gn @@ -577,7 +577,7 @@ rtc_source_set("test_common") { "../modules/video_coding:video_coding_utility", "../modules/video_coding:webrtc_h264", "../modules/video_coding:webrtc_multiplex", - "../modules/video_coding:webrtc_vp8", + "../modules/video_coding:webrtc_vp8_helpers", "../modules/video_coding:webrtc_vp9", "../rtc_base:checks", "../rtc_base:rtc_base_approved", diff --git a/video/BUILD.gn b/video/BUILD.gn index cb36ef236a..df50d90687 100644 --- a/video/BUILD.gn +++ b/video/BUILD.gn @@ -158,7 +158,6 @@ if (rtc_include_tests) { "../media:rtc_internal_video_codecs", "../modules/audio_mixer:audio_mixer_impl", "../modules/rtp_rtcp", - "../modules/video_coding:video_coding", "../modules/video_coding:webrtc_h264", "../modules/video_coding:webrtc_multiplex", "../modules/video_coding:webrtc_vp8", @@ -394,7 +393,7 @@ if (rtc_include_tests) { "../modules/video_coding:video_coding_utility", "../modules/video_coding:webrtc_h264", "../modules/video_coding:webrtc_multiplex", - "../modules/video_coding:webrtc_vp8", + "../modules/video_coding:webrtc_vp8_helpers", "../modules/video_coding:webrtc_vp9", "../rtc_base:checks", "../rtc_base:rate_limiter", diff --git a/video/payload_router.cc b/video/payload_router.cc index 2e73b8c0e9..e907f2bf03 100644 --- a/video/payload_router.cc +++ b/video/payload_router.cc @@ -78,7 +78,6 @@ void CopyCodecSpecific(const CodecSpecificInfo* info, RTPVideoHeader* rtp) { case kVideoCodecH264: rtp->codecHeader.H264.packetization_mode = info->codecSpecific.H264.packetization_mode; - rtp->simulcastIdx = info->codecSpecific.H264.simulcast_idx; return; case kVideoCodecMultiplex: case kVideoCodecGeneric: diff --git a/video/picture_id_tests.cc b/video/picture_id_tests.cc index b30f324d77..dd746f9200 100644 --- a/video/picture_id_tests.cc +++ b/video/picture_id_tests.cc @@ -10,7 +10,6 @@ #include "media/engine/internalencoderfactory.h" #include "media/engine/simulcast_encoder_adapter.h" #include "modules/rtp_rtcp/source/rtp_format.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/numerics/sequence_number_util.h" diff --git a/video/send_statistics_proxy.cc b/video/send_statistics_proxy.cc index 44cbe010b5..7be89b76cb 100644 --- a/video/send_statistics_proxy.cc +++ b/video/send_statistics_proxy.cc @@ -429,22 +429,9 @@ void SendStatisticsProxy::UmaSamplesContainer::UpdateHistograms( int qp_h264 = it.second.h264.Avg(kMinRequiredMetricsSamples); if (qp_h264 != -1) { int spatial_idx = it.first; - if (spatial_idx == -1) { - RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264", - qp_h264); - } else if (spatial_idx == 0) { - RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S0", - qp_h264); - } else if (spatial_idx == 1) { - RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S1", - qp_h264); - } else if (spatial_idx == 2) { - RTC_HISTOGRAMS_COUNTS_200(kIndex, uma_prefix_ + "Encoded.Qp.H264.S2", - qp_h264); - } else { - RTC_LOG(LS_WARNING) - << "QP stats not recorded for H264 spatial idx " << spatial_idx; - } + RTC_DCHECK_EQ(-1, spatial_idx); + RTC_HISTOGRAMS_COUNTS_100(kIndex, uma_prefix_ + "Encoded.Qp.H264", + qp_h264); } } @@ -871,8 +858,6 @@ void SendStatisticsProxy::OnSendEncodedImage( if (codec_info) { if (codec_info->codecType == kVideoCodecVP8) { simulcast_idx = codec_info->codecSpecific.VP8.simulcastIdx; - } else if (codec_info->codecType == kVideoCodecH264) { - simulcast_idx = codec_info->codecSpecific.H264.simulcast_idx; } else if (codec_info->codecType == kVideoCodecGeneric) { simulcast_idx = codec_info->codecSpecific.generic.simulcast_idx; } @@ -921,9 +906,7 @@ void SendStatisticsProxy::OnSendEncodedImage( : codec_info->codecSpecific.VP9.spatial_idx; uma_container_->qp_counters_[spatial_idx].vp9.Add(encoded_image.qp_); } else if (codec_info->codecType == kVideoCodecH264) { - int spatial_idx = (rtp_config_.ssrcs.size() == 1) - ? -1 - : static_cast(simulcast_idx); + int spatial_idx = -1; uma_container_->qp_counters_[spatial_idx].h264.Add(encoded_image.qp_); } } diff --git a/video/send_statistics_proxy_unittest.cc b/video/send_statistics_proxy_unittest.cc index ee4ef5d6f9..573031799e 100644 --- a/video/send_statistics_proxy_unittest.cc +++ b/video/send_statistics_proxy_unittest.cc @@ -1253,18 +1253,12 @@ TEST_F(SendStatisticsProxyTest, VerifyQpHistogramStats_H264) { codec_info.codecType = kVideoCodecH264; for (int i = 0; i < SendStatisticsProxy::kMinRequiredMetricsSamples; ++i) { - codec_info.codecSpecific.H264.simulcast_idx = 0; encoded_image.qp_ = kQpIdx0; statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); - codec_info.codecSpecific.H264.simulcast_idx = 1; - encoded_image.qp_ = kQpIdx1; - statistics_proxy_->OnSendEncodedImage(encoded_image, &codec_info); } statistics_proxy_.reset(); - EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.Encoded.Qp.H264.S0")); - EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.Encoded.Qp.H264.S0", kQpIdx0)); - EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.Encoded.Qp.H264.S1")); - EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.Encoded.Qp.H264.S1", kQpIdx1)); + EXPECT_EQ(1, metrics::NumSamples("WebRTC.Video.Encoded.Qp.H264")); + EXPECT_EQ(1, metrics::NumEvents("WebRTC.Video.Encoded.Qp.H264", kQpIdx0)); } TEST_F(SendStatisticsProxyTest, diff --git a/video/video_quality_test.cc b/video/video_quality_test.cc index a7e0867fd6..c12212e35a 100644 --- a/video/video_quality_test.cc +++ b/video/video_quality_test.cc @@ -27,6 +27,7 @@ #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" +#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "rtc_base/cpu_time.h" #include "rtc_base/flags.h"