diff --git a/AUTHORS b/AUTHORS index 97db345ad1..63d90c271d 100644 --- a/AUTHORS +++ b/AUTHORS @@ -20,6 +20,7 @@ Cyril Lashkevich David Porter Dax Booysen Danail Kirov +Dharmesh Chauhan Dirk-Jan C. Binnema Dmitry Lizin Eric Rescorla, RTFM Inc. @@ -90,8 +91,11 @@ CZ Theng Miguel Paris Raman Budny Stephan Hartmann +Lennart Grahl &yet LLC <*@andyet.com> +8x8 Inc. <*@sip-communicator.org> +8x8 Inc. <*@8x8.com> Agora IO <*@agora.io> ARM Holdings <*@arm.com> BroadSoft Inc. <*@broadsoft.com> @@ -108,6 +112,7 @@ Opera Software ASA <*@opera.com> Optical Tone Ltd <*@opticaltone.com> Pengutronix e.K. <*@pengutronix.de> RingCentral, Inc. <*@ringcentral.com> +Signal Messenger, LLC <*@signal.org> Sinch AB <*@sinch.com> struktur AG <*@struktur.de> Telenor Digital AS <*@telenor.com> @@ -124,3 +129,4 @@ Highfive, Inc. <*@highfive.com> CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io> Tuple, LLC <*@tuple.app> Videona Socialmedia <*@videona.com> +Threema GmbH <*@threema.ch> diff --git a/Android.bp b/Android.bp index 2113dac7dd..24e8e6af71 100644 --- a/Android.bp +++ b/Android.bp @@ -1,7 +1,7 @@ cc_defaults { name: "webrtc_defaults", local_include_dirs: [ - ".", + ".", ], cflags: [ "-Wno-unused-parameter", @@ -25,1539 +25,25 @@ cc_defaults { "-DWEBRTC_LINUX", ], header_libs: [ - "libabsl_headers", + "libabsl_headers", + ], + static_libs: [ + "libaom", + "libevent", + "libopus", + "libsrtp2", + "libvpx", + "libyuv", + "libpffft", + "rnnoise_rnn_vad", + "usrsctplib", + ], + shared_libs: [ + "libcrypto", + "libssl", ], host_supported: true, device_supported: false, -} - - -cc_library_static { - name: "webrtc_spl_sqrt_floor", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor.c", - ], -} - - -cc_library_static { - name: "webrtc_fft", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/third_party/fft/fft.c", - ], -} - - -cc_library_static { - name: "webrtc_ooura_fft_size_256", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/third_party/ooura/fft_size_256/fft4g.cc", - ], -} - - -cc_library_static { - name: "webrtc_audio_network_adaptor_config", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc", - ], -} - - -cc_library_static { - name: "webrtc_pcm16b_c", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/pcm16b/pcm16b.c", - ], -} - - -cc_library_static { - name: "webrtc_sigslot", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/third_party/sigslot/sigslot.cc", - ], -} - - -cc_library_static { - name: "webrtc_sent_packet", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/network/sent_packet.cc", - ], -} - - -cc_library_static { - name: "webrtc_media_protocol_names", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "pc/media_protocol_names.cc", - ], -} - - -cc_library_static { - name: "webrtc_g722_3p", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/third_party/g722/g722_decode.c", - "modules/third_party/g722/g722_encode.c", - ], -} - - -cc_library_static { - name: "webrtc_rtc_constants", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/engine/constants.cc", - ], -} - - -cc_library_static { - name: "webrtc_transport_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/call/transport.cc", - ], -} - - -cc_library_static { - name: "webrtc_platform_thread_types", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/platform_thread_types.cc", - ], -} - - -cc_library_static { - name: "webrtc_g711_3p", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/third_party/g711/g711.c", - ], -} - - -cc_library_static { - name: "webrtc_audio_processing_statistics", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/include/audio_processing_statistics.cc", - ], -} - - -cc_library_static { - name: "webrtc_bitrate_settings", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/transport/bitrate_settings.cc", - ], -} - - -cc_library_static { - name: "webrtc_base64", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/third_party/base64/base64.cc", - ], -} - - -cc_library_static { - name: "webrtc_g711_c", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/g711/g711_interface.c", - ], - static_libs: [ - "webrtc_g711_3p", - ], -} - - -cc_library_static { - name: "webrtc_rtc_base_checks", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/checks.cc", - ], -} - - -cc_library_static { - name: "webrtc_isac_vad", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/isac/main/source/filter_functions.c", - "modules/audio_coding/codecs/isac/main/source/isac_vad.c", - "modules/audio_coding/codecs/isac/main/source/pitch_estimator.c", - "modules/audio_coding/codecs/isac/main/source/pitch_filter.c", - ], - static_libs: [ - "webrtc_fft", - ], -} - - -cc_library_static { - name: "webrtc_aligned_malloc", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/memory/aligned_malloc.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_video_adaptation_counters", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/video_adaptation_counters.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_g722_c", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/g722/g722_interface.c", - ], - static_libs: [ - "webrtc_g722_3p", - ], -} - - -cc_library_static { - name: "webrtc_cpu_features_linux", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "system_wrappers/source/cpu_features_linux.c", - ], -} - - -cc_library_static { - name: "webrtc_module_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/include/module_common_types.cc", - ], -} - - -cc_library_static { - name: "webrtc_task_queue", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/task_queue/task_queue_base.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -filegroup { - name: "webrtc_rms_level", - srcs: [ - "modules/audio_processing/rms_level.cc", - ], -} - - -cc_library_static { - name: "webrtc_cascaded_biquad_filter", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/utility/cascaded_biquad_filter.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_yield_policy", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/synchronization/yield_policy.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_stringutils", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/string_encode.cc", - "rtc_base/string_to_number.cc", - "rtc_base/string_utils.cc", - "rtc_base/strings/string_builder.cc", - "rtc_base/strings/string_format.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_criticalsection", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/critical_section.cc", - ], - static_libs: [ - "webrtc_platform_thread_types", - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_file_wrapper", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/system/file_wrapper.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_criticalsection", - ], -} - - -cc_library_static { - name: "webrtc_rw_lock_wrapper", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/synchronization/rw_lock_wrapper.cc", - "rtc_base/synchronization/rw_lock_posix.cc", - ], -} - - -cc_library_static { - name: "webrtc_timestamp_extrapolator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/time/timestamp_extrapolator.cc", - ], - static_libs: [ - "webrtc_rw_lock_wrapper", - ], -} - - -cc_library_static { - name: "webrtc_legacy_delay_estimator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/utility/delay_estimator.cc", - "modules/audio_processing/utility/delay_estimator_wrapper.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_sequence_checker", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/synchronization/sequence_checker.cc", - ], - static_libs: [ - "webrtc_platform_thread_types", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_criticalsection", - ], -} - - -cc_library_static { - name: "webrtc_video_bitrate_allocation", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/video_bitrate_allocation.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - ], -} - - -cc_library_static { - name: "webrtc_rtp_parameters", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/media_types.cc", - "api/rtp_parameters.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - ], -} - - -cc_library_static { - name: "webrtc_audio_processing_config", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/include/config.cc", - ], -} - - -cc_library_static { - name: "webrtc_generic_frame_descriptor", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_video/generic_frame_descriptor/generic_frame_info.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - ], -} - - -cc_library_static { - name: "webrtc_audio_options_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_options.cc", - ], - static_libs: [ - "webrtc_stringutils", - ], -} - - -cc_library_static { - name: "webrtc_data_size", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/units/data_size.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - ], -} - - -cc_library_static { - name: "webrtc_audio_interfaces", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/transport/media/audio_transport.cc", - ], -} - - -filegroup { - name: "webrtc_rtc_operations_chain", - srcs: [ - "rtc_base/operations_chain.cc", - ], -} - - -cc_library_static { - name: "webrtc_timeutils", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/time_utils.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - ], -} - - -cc_library_static { - name: "webrtc_time_delta", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/units/time_delta.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - ], -} - - -cc_library_static { - name: "webrtc_rtc_event", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/event.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_yield_policy", - ], -} - - -cc_library_static { - name: "webrtc_timestamp", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/units/timestamp.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_time_delta", - ], -} - - -cc_library_static { - name: "webrtc_frequency", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/units/frequency.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_time_delta", - ], -} - - -cc_library_static { - name: "webrtc_weak_ptr", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/weak_ptr.cc", - ], - static_libs: [ - "webrtc_sequence_checker", - ], -} - - -cc_library_static { - name: "webrtc_platform_thread", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/platform_thread.cc", - ], - static_libs: [ - "webrtc_platform_thread_types", - "webrtc_rtc_base_checks", - "webrtc_timeutils", - "webrtc_rtc_event", - ], -} - - -cc_library_static { - name: "webrtc_pending_task_safety_flag", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/task_utils/pending_task_safety_flag.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_sequence_checker", - ], -} - - -cc_library_static { - name: "webrtc_rtc_event_log", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/rtc_event_log/rtc_event.cc", - "api/rtc_event_log/rtc_event_log.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_timeutils", - ], -} - - -cc_library_static { - name: "webrtc_logging", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/logging.cc", - ], - static_libs: [ - "webrtc_platform_thread_types", - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_criticalsection", - "webrtc_timeutils", - ], -} - - -cc_library_static { - name: "webrtc_data_rate", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/units/data_rate.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_data_size", - "webrtc_time_delta", - "webrtc_frequency", - ], -} - - -cc_library_static { - name: "webrtc_field_trial", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "system_wrappers/source/field_trial.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_logging", - ], -} - - -cc_library_static { - name: "webrtc_network_control", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/transport/network_types.cc", - ], - static_libs: [ - "webrtc_data_size", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_data_rate", - ], -} - - -cc_library_static { - name: "webrtc_field_trial_parser", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/field_trial_list.cc", - "rtc_base/experiments/field_trial_parser.cc", - "rtc_base/experiments/field_trial_units.cc", - "rtc_base/experiments/struct_parameters_parser.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_data_size", - "webrtc_time_delta", - "webrtc_logging", - "webrtc_data_rate", - ], -} - - -cc_library_static { - name: "webrtc_rtc_event_pacing", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "logging/rtc_event_log/events/rtc_event_alr_state.cc", - ], - static_libs: [ - "webrtc_rtc_event_log", - ], -} - - -cc_library_static { - name: "webrtc_field_trial_based_config", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/transport/field_trial_based_config.cc", - ], - static_libs: [ - "webrtc_field_trial", - ], -} - - -cc_library_static { - name: "webrtc_frame_dependencies_calculator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/frame_dependencies_calculator.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_generic_frame_descriptor", - "webrtc_logging", - ], -} - - -cc_library_static { - name: "webrtc_rtc_task_queue_libevent", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/task_queue_libevent.cc", - ], - static_libs: [ - "webrtc_platform_thread_types", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_criticalsection", - "webrtc_timeutils", - "webrtc_platform_thread", - "webrtc_logging", - "libevent", - ], -} - - -cc_library_static { - name: "webrtc_rtc_error", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/rtc_error.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_logging", - ], -} - - -cc_library_static { - name: "webrtc_repeating_task", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/task_utils/repeating_task.cc", - ], - static_libs: [ - "webrtc_task_queue", - "webrtc_sequence_checker", - "webrtc_timeutils", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_logging", - ], -} - - -cc_library_static { - name: "webrtc_rtc_event_log_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/rtc_event_log/rtc_event_log_factory.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_rtc_event_log", - ], -} - - -cc_library_static { - name: "webrtc_link_capacity_estimator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/goog_cc/link_capacity_estimator.cc", - ], - static_libs: [ - "webrtc_data_rate", - ], -} - - -cc_library_static { - name: "webrtc_video_bitrate_allocator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/video_bitrate_allocator.cc", - ], - static_libs: [ - "webrtc_video_bitrate_allocation", - "webrtc_data_rate", - ], -} - - -cc_library_static { - name: "webrtc_keyframe_interval_settings_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/keyframe_interval_settings.cc", - ], - static_libs: [ - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - ], -} - - -cc_library_static { - name: "webrtc_rtc_task_queue", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/task_queue.cc", - ], - static_libs: [ - "webrtc_task_queue", - ], -} - - -cc_library_static { - name: "webrtc_default_task_queue_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/task_queue/default_task_queue_factory_libevent.cc", - ], - static_libs: [ - "webrtc_task_queue", - "webrtc_rtc_task_queue_libevent", - ], -} - - -cc_library_static { - name: "webrtc_rtc_base_approved", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/bit_buffer.cc", - "rtc_base/buffer_queue.cc", - "rtc_base/byte_buffer.cc", - "rtc_base/copy_on_write_buffer.cc", - "rtc_base/event_tracer.cc", - "rtc_base/location.cc", - "rtc_base/numerics/histogram_percentile_counter.cc", - "rtc_base/numerics/sample_counter.cc", - "rtc_base/race_checker.cc", - "rtc_base/random.cc", - "rtc_base/rate_statistics.cc", - "rtc_base/rate_tracker.cc", - "rtc_base/timestamp_aligner.cc", - "rtc_base/zero_memory.cc", - ], - static_libs: [ - "webrtc_platform_thread_types", - "webrtc_base64", - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_criticalsection", - "webrtc_timeutils", - "webrtc_rtc_event", - "webrtc_platform_thread", - "webrtc_logging", - "webrtc_rtc_task_queue", - ], -} - - -cc_library_static { - name: "webrtc_rtc_event_log_output_file", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/rtc_event_log_output_file.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_file_wrapper", - "webrtc_rtc_event_log", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_jitter_upper_bound_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/jitter_upper_bound_experiment.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_biquad_filter", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/biquad_filter.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - ], -} - -cc_library_static { - name: "webrtc_rtc_numerics", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/numerics/event_based_exponential_moving_average.cc", - "rtc_base/numerics/event_rate_counter.cc", - "rtc_base/numerics/exp_filter.cc", - "rtc_base/numerics/moving_average.cc", - "rtc_base/numerics/sample_stats.cc", - "rtc_base/numerics/samples_stats_counter.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_data_rate", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_cpu_speed_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/cpu_speed_experiment.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_system_wrappers", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "system_wrappers/source/clock.cc", - "system_wrappers/source/cpu_features.cc", - "system_wrappers/source/cpu_info.cc", - "system_wrappers/source/rtp_to_ntp_estimator.cc", - "system_wrappers/source/sleep.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_cpu_features_linux", - "webrtc_rw_lock_wrapper", - "webrtc_timestamp", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - ], -} - - -cc_library_static { - name: "webrtc_video_rtp_headers", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/color_space.cc", - "api/video/hdr_metadata.cc", - "api/video/video_content_type.cc", - "api/video/video_timing.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_audio_encoder_opus_config", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc", - "api/audio_codecs/opus/audio_encoder_opus_config.cc", - ], - cflags: [ - "-DWEBRTC_OPUS_VARIABLE_COMPLEXITY=0", - ], - static_libs: [ - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_aec3_config", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio/echo_canceller3_config.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_opus_wrapper", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/opus/opus_interface.cc", - ], - cflags: [ - "-DWEBRTC_CODEC_ILBC", - "-DWEBRTC_CODEC_OPUS", - "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1", - "-DWEBRTC_CODEC_ISAC", - "-DWEBRTC_CODEC_RED", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "libopus", - ], -} - - -cc_library_static { - name: "webrtc_agc2_common", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/agc2_common.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_alr_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/alr_experiment.cc", - ], - static_libs: [ - "webrtc_field_trial_based_config", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_quality_scaler_settings", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/quality_scaler_settings.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_normalize_simulcast_size_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/normalize_simulcast_size_experiment.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_audio_codecs_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/audio_codec_pair_id.cc", - "api/audio_codecs/audio_decoder.cc", - "api/audio_codecs/audio_encoder.cc", - "api/audio_codecs/audio_format.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_time_delta", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_rtt_mult_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/rtt_mult_experiment.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_quality_rampup_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/quality_rampup_experiment.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_rtc_stats", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "stats/rtc_stats.cc", - "stats/rtc_stats_report.cc", - "stats/rtcstats_objects.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_system_wrappers_metrics", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "system_wrappers/source/metrics.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - ], -} - - -filegroup { - name: "webrtc_tick_timer", - srcs: [ - "api/neteq/tick_timer.cc", - ], -} - - -cc_library_static { - name: "webrtc_rtc_base", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/async_invoker.cc", - "rtc_base/async_packet_socket.cc", - "rtc_base/async_resolver_interface.cc", - "rtc_base/async_socket.cc", - "rtc_base/async_tcp_socket.cc", - "rtc_base/async_udp_socket.cc", - "rtc_base/crc32.cc", - "rtc_base/crypt_string.cc", - "rtc_base/data_rate_limiter.cc", - "rtc_base/file_rotating_stream.cc", - "rtc_base/helpers.cc", - "rtc_base/http_common.cc", - "rtc_base/ip_address.cc", - "rtc_base/message_digest.cc", - "rtc_base/message_handler.cc", - "rtc_base/net_helper.cc", - "rtc_base/net_helpers.cc", - "rtc_base/network.cc", - "rtc_base/network_constants.cc", - "rtc_base/network_monitor.cc", - "rtc_base/network_route.cc", - "rtc_base/null_socket_server.cc", - "rtc_base/openssl_adapter.cc", - "rtc_base/openssl_certificate.cc", - "rtc_base/openssl_digest.cc", - "rtc_base/openssl_identity.cc", - "rtc_base/openssl_session_cache.cc", - "rtc_base/openssl_stream_adapter.cc", - "rtc_base/openssl_utility.cc", - "rtc_base/physical_socket_server.cc", - "rtc_base/proxy_info.cc", - "rtc_base/rtc_certificate.cc", - "rtc_base/rtc_certificate_generator.cc", - "rtc_base/signal_thread.cc", - "rtc_base/socket.cc", - "rtc_base/socket_adapters.cc", - "rtc_base/socket_address.cc", - "rtc_base/socket_address_pair.cc", - "rtc_base/ssl_adapter.cc", - "rtc_base/ssl_certificate.cc", - "rtc_base/ssl_fingerprint.cc", - "rtc_base/ssl_identity.cc", - "rtc_base/ssl_stream_adapter.cc", - "rtc_base/stream.cc", - "rtc_base/thread.cc", - "rtc_base/unique_id_generator.cc", - "rtc_base/log_sinks.cc", - "rtc_base/ifaddrs_converter.cc", - ], - static_libs: [ - "webrtc_sigslot", - "webrtc_sent_packet", - "webrtc_base64", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_stringutils", - "webrtc_file_wrapper", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - ], - shared_libs: [ - "libcrypto", - "libssl", - ], -} - - -cc_library_static { - name: "webrtc_common_audio_cc", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/signal_processing/dot_product_with_scale.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - ], -} - - -cc_library_static { - name: "webrtc_interval_budget", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/pacing/interval_budget.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - ], -} - - -cc_library_static { - name: "webrtc_common_audio_c", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/ring_buffer.c", - "common_audio/signal_processing/auto_corr_to_refl_coef.c", - "common_audio/signal_processing/auto_correlation.c", - "common_audio/signal_processing/copy_set_operations.c", - "common_audio/signal_processing/cross_correlation.c", - "common_audio/signal_processing/division_operations.c", - "common_audio/signal_processing/downsample_fast.c", - "common_audio/signal_processing/energy.c", - "common_audio/signal_processing/filter_ar.c", - "common_audio/signal_processing/filter_ma_fast_q12.c", - "common_audio/signal_processing/get_hanning_window.c", - "common_audio/signal_processing/get_scaling_square.c", - "common_audio/signal_processing/ilbc_specific_functions.c", - "common_audio/signal_processing/levinson_durbin.c", - "common_audio/signal_processing/lpc_to_refl_coef.c", - "common_audio/signal_processing/min_max_operations.c", - "common_audio/signal_processing/randomization_functions.c", - "common_audio/signal_processing/real_fft.c", - "common_audio/signal_processing/refl_coef_to_lpc.c", - "common_audio/signal_processing/resample.c", - "common_audio/signal_processing/resample_48khz.c", - "common_audio/signal_processing/resample_by_2.c", - "common_audio/signal_processing/resample_by_2_internal.c", - "common_audio/signal_processing/resample_fractional.c", - "common_audio/signal_processing/spl_init.c", - "common_audio/signal_processing/spl_inl.c", - "common_audio/signal_processing/spl_sqrt.c", - "common_audio/signal_processing/splitting_filter.c", - "common_audio/signal_processing/sqrt_of_one_minus_x_squared.c", - "common_audio/signal_processing/vector_scaling_operations.c", - "common_audio/vad/vad_core.c", - "common_audio/vad/vad_filterbank.c", - "common_audio/vad/vad_gmm.c", - "common_audio/vad/vad_sp.c", - "common_audio/vad/webrtc_vad.c", - "common_audio/signal_processing/complex_fft.c", - "common_audio/signal_processing/complex_bit_reverse.c", - "common_audio/signal_processing/filter_ar_fast_q12.c", - ], - static_libs: [ - "webrtc_spl_sqrt_floor", - "webrtc_ooura_fft_size_256", - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_common_audio_cc", - ], -} - - -cc_library_static { - name: "webrtc_aecm_core", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/aecm/aecm_core.cc", - "modules/audio_processing/aecm/echo_control_mobile.cc", - "modules/audio_processing/aecm/aecm_core_c.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_legacy_delay_estimator", - "webrtc_rtc_base_approved", - "webrtc_common_audio_c", - ], -} - - -cc_library_static { - name: "webrtc_video_processing_sse2", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_processing/util/denoiser_filter_sse2.cc", - ], - cflags: [ - "-msse2", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - ], arch: { arm: { enabled: false, @@ -1565,4827 +51,5442 @@ cc_library_static { }, } - cc_library_static { - name: "webrtc_gain_applier", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/gain_applier.cc", - ], - static_libs: [ - "webrtc_agc2_common", - ], + name: "webrtc_spl_sqrt_floor__spl_sqrt_floor", + defaults: ["webrtc_defaults"], + srcs: ["common_audio/third_party/spl_sqrt_floor/spl_sqrt_floor.c"], + host_supported: true, } - cc_library_static { - name: "webrtc_audio_format_to_string", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/strings/audio_format_to_string.cc", - ], - static_libs: [ - "webrtc_stringutils", - "webrtc_audio_codecs_api", - ], + name: "webrtc_fft__fft", + defaults: ["webrtc_defaults"], + srcs: ["modules/third_party/fft/fft.c"], + host_supported: true, } - cc_library_static { - name: "webrtc_fifo_buffer", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/memory/fifo_buffer.cc", - ], - static_libs: [ - "webrtc_rtc_base", - ], + name: "webrtc_ooura__fft_size_256", + defaults: ["webrtc_defaults"], + srcs: ["common_audio/third_party/ooura/fft_size_256/fft4g.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_rtp_headers", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/rtp_headers.cc", - ], - static_libs: [ - "webrtc_timestamp", - "webrtc_video_rtp_headers", - ], + name: "webrtc_audio_coding__audio_network_adaptor_config", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/audio_network_adaptor/audio_network_adaptor_config.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_rate_limiter", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/rate_limiter.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - ], + name: "webrtc_audio_coding__pcm16b_c", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/pcm16b/pcm16b.c"], + host_supported: true, } - cc_library_static { - name: "webrtc_audio_coding_opus_common", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/opus/audio_coder_opus_common.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_audio_codecs_api", - ], + name: "webrtc_sigslot__sigslot", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/third_party/sigslot/sigslot.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_rtc_stream_config", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "logging/rtc_event_log/rtc_stream_config.cc", - ], - static_libs: [ - "webrtc_rtp_parameters", - "webrtc_rtp_headers", - ], + name: "webrtc_network__sent_packet", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/network/sent_packet.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_legacy_encoded_audio_frame", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/legacy_encoded_audio_frame.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - ], + name: "webrtc_pc__media_protocol_names", + defaults: ["webrtc_defaults"], + srcs: ["pc/media_protocol_names.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_multiopus", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_g722__g722_3p", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.cc", - "modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc", - ], - cflags: [ - "-DWEBRTC_CODEC_ILBC", - "-DWEBRTC_CODEC_OPUS", - "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1", - "-DWEBRTC_CODEC_ISAC", - "-DWEBRTC_CODEC_RED", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_time_delta", - "webrtc_logging", - "webrtc_rtc_base_approved", - "webrtc_audio_encoder_opus_config", - "webrtc_opus_wrapper", - "webrtc_audio_codecs_api", - "webrtc_audio_coding_opus_common", - "libopus", + "modules/third_party/g722/g722_decode.c", + "modules/third_party/g722/g722_encode.c", ], + host_supported: true, } - cc_library_static { - name: "webrtc_rtp_packet_info", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/rtp_packet_info.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_rtp_headers", - ], + name: "webrtc_media__rtc_constants", + defaults: ["webrtc_defaults"], + srcs: ["media/engine/constants.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_api_crypto_options", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/crypto/crypto_options.cc", - ], - static_libs: [ - "webrtc_rtc_base", - ], + name: "webrtc_api__transport_api", + defaults: ["webrtc_defaults"], + srcs: ["api/call/transport.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_rtc_h264_profile_id", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/base/h264_profile_level_id.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_rtc_base", - ], + name: "webrtc_synchronization__yield", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/synchronization/yield.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_cng", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/cng/webrtc_cng.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_common_audio_c", - ], + name: "webrtc_g711__g711_3p", + defaults: ["webrtc_defaults"], + srcs: ["modules/third_party/g711/g711.c"], + host_supported: true, } - cc_library_static { - name: "webrtc_common_audio_sse2", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/fir_filter_sse.cc", - "common_audio/resampler/sinc_resampler_sse.cc", - ], - cflags: [ - "-msse2", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_aligned_malloc", - "webrtc_rtc_base_approved", - ], - arch: { - arm: { - enabled: false, - }, - }, + name: "webrtc_audio_processing__audio_processing_statistics", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/include/audio_processing_statistics.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_rtc_event_video", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc", - "logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_event_log", - "webrtc_rtc_stream_config", - ], + name: "webrtc_transport__bitrate_settings", + defaults: ["webrtc_defaults"], + srcs: ["api/transport/bitrate_settings.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_common_audio", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/audio_converter.cc", - "common_audio/audio_util.cc", - "common_audio/channel_buffer.cc", - "common_audio/real_fourier.cc", - "common_audio/real_fourier_ooura.cc", - "common_audio/resampler/push_resampler.cc", - "common_audio/resampler/push_sinc_resampler.cc", - "common_audio/resampler/resampler.cc", - "common_audio/resampler/sinc_resampler.cc", - "common_audio/smoothing_filter.cc", - "common_audio/vad/vad.cc", - "common_audio/wav_file.cc", - "common_audio/wav_header.cc", - "common_audio/window_generator.cc", - ], - static_libs: [ - "webrtc_ooura_fft_size_256", - "webrtc_rtc_base_checks", - "webrtc_aligned_malloc", - "webrtc_file_wrapper", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_common_audio_c", - "webrtc_common_audio_sse2", - ], + name: "webrtc_base64__base64", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/third_party/base64/base64.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_simulated_network", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/simulated_network.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_sequence_checker", - "webrtc_data_size", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_data_rate", - "webrtc_rtc_base_approved", - ], + name: "webrtc_audio_coding__g711_c", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/g711/g711_interface.c"], + host_supported: true, + static_libs: ["webrtc_g711__g711_3p"], } - cc_library_static { - name: "webrtc_bitrate_allocator", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_ooura__fft_size_128", + defaults: ["webrtc_defaults"], srcs: [ - "call/bitrate_allocator.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_sequence_checker", - "webrtc_time_delta", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_network_control", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_system_wrappers_metrics", + "common_audio/third_party/ooura/fft_size_128/ooura_fft.cc", + "common_audio/third_party/ooura/fft_size_128/ooura_fft_sse2.cc", ], + host_supported: true, + cflags: ["-msse2"], } - cc_library_static { - name: "webrtc_g722", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/g722/audio_decoder_g722.cc", - "modules/audio_coding/codecs/g722/audio_encoder_g722.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_g722_c", - "webrtc_time_delta", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_legacy_encoded_audio_frame", - ], + name: "webrtc_rtc_base__checks", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/checks.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_audio_device_buffer", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_audio_coding__isac_vad", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_device/audio_device_buffer.cc", - "modules/audio_device/fine_audio_buffer.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio_c", + "modules/audio_coding/codecs/isac/main/source/filter_functions.c", + "modules/audio_coding/codecs/isac/main/source/isac_vad.c", + "modules/audio_coding/codecs/isac/main/source/pitch_estimator.c", + "modules/audio_coding/codecs/isac/main/source/pitch_filter.c", ], + host_supported: true, + static_libs: ["webrtc_fft__fft"], } - cc_library_static { - name: "webrtc_audio_frame_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio/audio_frame.cc", - "api/audio/channel_layout.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_rtp_packet_info", - ], + name: "webrtc_memory__aligned_malloc", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/memory/aligned_malloc.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], } - -cc_library_static { - name: "webrtc_alr_detector", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/goog_cc/alr_detector.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_timeutils", - "webrtc_rtc_event_log", - "webrtc_field_trial_parser", - "webrtc_rtc_event_pacing", - "webrtc_field_trial_based_config", - "webrtc_alr_experiment", - "webrtc_interval_budget", - ], -} - - filegroup { - name: "webrtc_stun_types", - srcs: [ - "api/transport/stun.cc", - ], + name: "webrtc_rtp__dependency_descriptor", + srcs: ["api/transport/rtp/dependency_descriptor.cc"], } - cc_library_static { - name: "webrtc_video_frame", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/video_frame.cc", - "api/video/video_frame_buffer.cc", - "api/video/video_source_interface.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_rtp_packet_info", - ], + name: "webrtc_audio_coding__g722_c", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/g722/g722_interface.c"], + host_supported: true, + static_libs: ["webrtc_g722__g722_3p"], } - cc_library_static { - name: "webrtc_apm_logging", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/logging/apm_data_dumper.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_common_audio", - ], + name: "webrtc_system_wrappers__cpu_features_linux", + defaults: ["webrtc_defaults"], + srcs: ["system_wrappers/source/cpu_features_linux.c"], + host_supported: true, } - cc_library_static { - name: "webrtc_ilbc_c", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_generic_frame_descriptor__generic_frame_descriptor", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_coding/codecs/ilbc/abs_quant.c", - "modules/audio_coding/codecs/ilbc/abs_quant_loop.c", - "modules/audio_coding/codecs/ilbc/augmented_cb_corr.c", - "modules/audio_coding/codecs/ilbc/bw_expand.c", - "modules/audio_coding/codecs/ilbc/cb_construct.c", - "modules/audio_coding/codecs/ilbc/cb_mem_energy.c", - "modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c", - "modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c", - "modules/audio_coding/codecs/ilbc/cb_search.c", - "modules/audio_coding/codecs/ilbc/cb_search_core.c", - "modules/audio_coding/codecs/ilbc/cb_update_best_index.c", - "modules/audio_coding/codecs/ilbc/chebyshev.c", - "modules/audio_coding/codecs/ilbc/comp_corr.c", - "modules/audio_coding/codecs/ilbc/constants.c", - "modules/audio_coding/codecs/ilbc/create_augmented_vec.c", - "modules/audio_coding/codecs/ilbc/decode.c", - "modules/audio_coding/codecs/ilbc/decode_residual.c", - "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c", - "modules/audio_coding/codecs/ilbc/do_plc.c", - "modules/audio_coding/codecs/ilbc/encode.c", - "modules/audio_coding/codecs/ilbc/energy_inverse.c", - "modules/audio_coding/codecs/ilbc/enh_upsample.c", - "modules/audio_coding/codecs/ilbc/enhancer.c", - "modules/audio_coding/codecs/ilbc/enhancer_interface.c", - "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c", - "modules/audio_coding/codecs/ilbc/frame_classify.c", - "modules/audio_coding/codecs/ilbc/gain_dequant.c", - "modules/audio_coding/codecs/ilbc/gain_quant.c", - "modules/audio_coding/codecs/ilbc/get_cd_vec.c", - "modules/audio_coding/codecs/ilbc/get_lsp_poly.c", - "modules/audio_coding/codecs/ilbc/get_sync_seq.c", - "modules/audio_coding/codecs/ilbc/hp_input.c", - "modules/audio_coding/codecs/ilbc/hp_output.c", - "modules/audio_coding/codecs/ilbc/ilbc.c", - "modules/audio_coding/codecs/ilbc/index_conv_dec.c", - "modules/audio_coding/codecs/ilbc/index_conv_enc.c", - "modules/audio_coding/codecs/ilbc/init_decode.c", - "modules/audio_coding/codecs/ilbc/init_encode.c", - "modules/audio_coding/codecs/ilbc/interpolate.c", - "modules/audio_coding/codecs/ilbc/interpolate_samples.c", - "modules/audio_coding/codecs/ilbc/lpc_encode.c", - "modules/audio_coding/codecs/ilbc/lsf_check.c", - "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c", - "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c", - "modules/audio_coding/codecs/ilbc/lsf_to_lsp.c", - "modules/audio_coding/codecs/ilbc/lsf_to_poly.c", - "modules/audio_coding/codecs/ilbc/lsp_to_lsf.c", - "modules/audio_coding/codecs/ilbc/my_corr.c", - "modules/audio_coding/codecs/ilbc/nearest_neighbor.c", - "modules/audio_coding/codecs/ilbc/pack_bits.c", - "modules/audio_coding/codecs/ilbc/poly_to_lsf.c", - "modules/audio_coding/codecs/ilbc/poly_to_lsp.c", - "modules/audio_coding/codecs/ilbc/refiner.c", - "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c", - "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c", - "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c", - "modules/audio_coding/codecs/ilbc/simple_lsf_quant.c", - "modules/audio_coding/codecs/ilbc/smooth.c", - "modules/audio_coding/codecs/ilbc/smooth_out_data.c", - "modules/audio_coding/codecs/ilbc/sort_sq.c", - "modules/audio_coding/codecs/ilbc/split_vq.c", - "modules/audio_coding/codecs/ilbc/state_construct.c", - "modules/audio_coding/codecs/ilbc/state_search.c", - "modules/audio_coding/codecs/ilbc/swap_bytes.c", - "modules/audio_coding/codecs/ilbc/unpack_bits.c", - "modules/audio_coding/codecs/ilbc/vq3.c", - "modules/audio_coding/codecs/ilbc/vq4.c", - "modules/audio_coding/codecs/ilbc/window32_w32.c", - "modules/audio_coding/codecs/ilbc/xcorr_coef.c", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_common_audio_c", - "webrtc_common_audio", + "common_video/generic_frame_descriptor/generic_frame_info.cc", + ":webrtc_rtp__dependency_descriptor", ], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], } - cc_library_static { - name: "webrtc_audio_encoder_multiopus", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_encoder_opus_config", - "webrtc_audio_codecs_api", - "webrtc_multiopus", - "libopus", - ], + name: "webrtc_rtc_base__platform_thread_types", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/platform_thread_types.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_utility", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/utility/source/process_thread_impl.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_common_audio", - ], + name: "webrtc_modules__module_api", + defaults: ["webrtc_defaults"], + srcs: ["modules/include/module_common_types.cc"], + host_supported: true, } - cc_library_static { - name: "webrtc_nack_module", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/histogram.cc", - "modules/video_coding/nack_module.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_system_wrappers", - "webrtc_utility", - ], + name: "webrtc_task_queue__task_queue", + defaults: ["webrtc_defaults"], + srcs: ["api/task_queue/task_queue_base.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], } - cc_library_static { - name: "webrtc_audio_encoder_g722", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/g722/audio_encoder_g722.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_g722", - ], + name: "webrtc_utility__pffft_wrapper", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/utility/pffft_wrapper.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], } - -cc_library_static { - name: "webrtc_audio_coding_isac_c", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/isac/main/source/arith_routines.c", - "modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c", - "modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c", - "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c", - "modules/audio_coding/codecs/isac/main/source/crc.c", - "modules/audio_coding/codecs/isac/main/source/decode.c", - "modules/audio_coding/codecs/isac/main/source/decode_bwe.c", - "modules/audio_coding/codecs/isac/main/source/encode.c", - "modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c", - "modules/audio_coding/codecs/isac/main/source/entropy_coding.c", - "modules/audio_coding/codecs/isac/main/source/filterbanks.c", - "modules/audio_coding/codecs/isac/main/source/intialize.c", - "modules/audio_coding/codecs/isac/main/source/isac.c", - "modules/audio_coding/codecs/isac/main/source/lattice.c", - "modules/audio_coding/codecs/isac/main/source/lpc_analysis.c", - "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c", - "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c", - "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c", - "modules/audio_coding/codecs/isac/main/source/lpc_tables.c", - "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c", - "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c", - "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c", - "modules/audio_coding/codecs/isac/main/source/transform.c", - ], - static_libs: [ - "webrtc_fft", - "webrtc_rtc_base_checks", - "webrtc_isac_vad", - "webrtc_rtc_base_approved", - "webrtc_common_audio_c", - "webrtc_common_audio", - ], -} - - -cc_library_static { - name: "webrtc_g711", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/g711/audio_decoder_pcm.cc", - "modules/audio_coding/codecs/g711/audio_encoder_pcm.cc", - ], - static_libs: [ - "webrtc_g711_c", - "webrtc_rtc_base_checks", - "webrtc_time_delta", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_legacy_encoded_audio_frame", - ], -} - - -cc_library_static { - name: "webrtc_audio_decoder_multiopus", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_multiopus", - "libopus", - ], -} - - -cc_library_static { - name: "webrtc_fir_filter_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/fir_filter_c.cc", - "common_audio/fir_filter_factory.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_common_audio_sse2", - ], -} - - -cc_library_static { - name: "webrtc_ilbc", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc", - "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_time_delta", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_legacy_encoded_audio_frame", - "webrtc_common_audio", - "webrtc_ilbc_c", - ], -} - - filegroup { - name: "webrtc_neteq_api", - srcs: [ - "api/neteq/neteq.cc", - ], + name: "webrtc_audio_processing__rms_level", + srcs: ["modules/audio_processing/rms_level.cc"], } +cc_library_static { + name: "webrtc_utility__cascaded_biquad_filter", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/utility/cascaded_biquad_filter.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], +} cc_library_static { - name: "webrtc_audio_encoder_cng", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_synchronization__yield_policy", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/synchronization/yield_policy.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], +} + +cc_library_static { + name: "webrtc_rtc_base__stringutils", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_coding/codecs/cng/audio_encoder_cng.cc", + "rtc_base/string_encode.cc", + "rtc_base/string_to_number.cc", + "rtc_base/string_utils.cc", + "rtc_base/strings/string_builder.cc", + "rtc_base/strings/string_format.cc", ], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], +} + +cc_library_static { + name: "webrtc_rtc_base__criticalsection", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/deprecated/recursive_critical_section.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_time_delta", - "webrtc_audio_codecs_api", - "webrtc_cng", - "webrtc_common_audio", + "webrtc_synchronization__yield", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__platform_thread_types", ], } - cc_library_static { - name: "webrtc_fixed_digital", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/fixed_digital_level_estimator.cc", - "modules/audio_processing/agc2/interpolated_gain_curve.cc", - "modules/audio_processing/agc2/limiter.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], + name: "webrtc_system__file_wrapper", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/system/file_wrapper.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_agc2_common", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio", - "webrtc_apm_logging", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__criticalsection", ], } - cc_library_static { - name: "webrtc_rtc_event_audio", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc", - "logging/rtc_event_log/events/rtc_event_audio_playout.cc", - "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc", - "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc", - ], + name: "webrtc_synchronization__mutex", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/synchronization/mutex.cc"], + host_supported: true, static_libs: [ - "webrtc_audio_network_adaptor_config", - "webrtc_rtc_base_checks", - "webrtc_rtc_event_log", - "webrtc_rtc_stream_config", + "webrtc_synchronization__yield", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__platform_thread_types", ], } +cc_library_static { + name: "webrtc_synchronization__rw_lock_wrapper", + defaults: ["webrtc_defaults"], + srcs: [ + "rtc_base/synchronization/rw_lock_wrapper.cc", + "rtc_base/synchronization/rw_lock_posix.cc", + ], + host_supported: true, +} cc_library_static { - name: "webrtc_min_video_bitrate_experiment", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_time__timestamp_extrapolator", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/time/timestamp_extrapolator.cc"], + host_supported: true, + static_libs: ["webrtc_synchronization__rw_lock_wrapper"], +} + +cc_library_static { + name: "webrtc_utility__legacy_delay_estimator", + defaults: ["webrtc_defaults"], srcs: [ - "rtc_base/experiments/min_video_bitrate_experiment.cc", + "modules/audio_processing/utility/delay_estimator.cc", + "modules/audio_processing/utility/delay_estimator_wrapper.cc", ], + host_supported: true, + static_libs: ["webrtc_rtc_base__checks"], +} + +cc_library_static { + name: "webrtc_synchronization__sequence_checker", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/synchronization/sequence_checker.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_logging", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_video_frame", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__platform_thread_types", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__stringutils", + "webrtc_rtc_base__criticalsection", + "webrtc_synchronization__mutex", ], } - cc_library_static { - name: "webrtc_encoded_image", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/encoded_image.cc", - ], + name: "webrtc_video__video_bitrate_allocation", + defaults: ["webrtc_defaults"], + srcs: ["api/video/video_bitrate_allocation.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_rtp_packet_info", - "webrtc_video_frame", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", ], } - cc_library_static { - name: "webrtc_legacy_agc", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_api__rtp_parameters", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_processing/agc/legacy/analog_agc.cc", - "modules/audio_processing/agc/legacy/digital_agc.cc", + "api/media_types.cc", + "api/rtp_parameters.cc", ], + host_supported: true, static_libs: [ - "webrtc_ooura_fft_size_256", - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_common_audio_c", - "webrtc_common_audio", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", ], } - cc_library_static { - name: "webrtc_audio_decoder_g722", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/g722/audio_decoder_g722.cc", - ], + name: "webrtc_video__video_adaptation", + defaults: ["webrtc_defaults"], + srcs: ["api/video/video_adaptation_counters.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_g722", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", ], } +cc_library_static { + name: "webrtc_audio_processing__config", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/include/config.cc"], + host_supported: true, +} cc_library_static { - name: "webrtc_pcm16b", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc", - "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc", - "modules/audio_coding/codecs/pcm16b/pcm16b_common.cc", - ], + name: "webrtc_api__audio_options_api", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_options.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__stringutils"], +} + +cc_library_static { + name: "webrtc_units__data_size", + defaults: ["webrtc_defaults"], + srcs: ["api/units/data_size.cc"], + host_supported: true, static_libs: [ - "webrtc_pcm16b_c", - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_legacy_encoded_audio_frame", - "webrtc_g711", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", ], } - -cc_library_static { - name: "webrtc_audio_coding_isac", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc", - "modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc", - ], - static_libs: [ - "webrtc_audio_codecs_api", - "webrtc_audio_coding_isac_c", - ], -} - - -cc_library_static { - name: "webrtc_video_interfaces", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/transport/media/video_transport.cc", - ], - static_libs: [ - "webrtc_encoded_image", - ], -} - - -cc_library_static { - name: "webrtc_video_frame_i420", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/i420_buffer.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_aligned_malloc", - "webrtc_video_rtp_headers", - "webrtc_rtc_base", - "webrtc_video_frame", - "libyuv", - ], -} - - -cc_library_static { - name: "webrtc_video_frame_i010", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/i010_buffer.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_aligned_malloc", - "webrtc_video_rtp_headers", - "webrtc_rtc_base", - "webrtc_video_frame", - "libyuv", - ], -} - - -cc_library_static { - name: "webrtc_media_transport_interface", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/transport/media/media_transport_config.cc", - "api/transport/media/media_transport_interface.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_audio_interfaces", - "webrtc_data_rate", - "webrtc_network_control", - "webrtc_rtc_error", - "webrtc_rtc_base_approved", - "webrtc_rtc_base", - "webrtc_video_interfaces", - ], -} - - -cc_library_static { - name: "webrtc_audio_encoder_isac_float", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/isac/audio_encoder_isac_float.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_audio_coding_isac", - ], -} - - -cc_library_static { - name: "webrtc_audio_frame_operations", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "audio/utility/audio_frame_operations.cc", - "audio/utility/channel_mixer.cc", - "audio/utility/channel_mixing_matrix.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_common_audio", - "webrtc_audio_frame_api", - ], -} - - -cc_library_static { - name: "webrtc_audio_processing_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/include/audio_processing.cc", - ], - static_libs: [ - "webrtc_audio_processing_statistics", - "webrtc_file_wrapper", - "webrtc_audio_processing_config", - "webrtc_rtc_base_approved", - "webrtc_aec3_config", - "webrtc_audio_frame_api", - ], -} - - -cc_library_static { - name: "webrtc_transient_suppressor_impl", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/transient/moving_moments.cc", - "modules/audio_processing/transient/transient_detector.cc", - "modules/audio_processing/transient/transient_suppressor_impl.cc", - "modules/audio_processing/transient/wpd_node.cc", - "modules/audio_processing/transient/wpd_tree.cc", - ], - static_libs: [ - "webrtc_ooura_fft_size_256", - "webrtc_rtc_base_checks", - "webrtc_logging", - "webrtc_common_audio_c", - "webrtc_common_audio", - "webrtc_fir_filter_factory", - ], -} - - -cc_library_static { - name: "webrtc_audio_encoder_ilbc", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/ilbc/audio_encoder_ilbc.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_ilbc", - ], -} - - -cc_library_static { - name: "webrtc_rtp_video_header", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/rtp_rtcp/source/rtp_video_header.cc", - ], - static_libs: [ - "webrtc_video_rtp_headers", - "webrtc_video_frame", - ], -} - - -cc_library_static { - name: "webrtc_ooura_fft_size_128", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_audio/third_party/ooura/fft_size_128/ooura_fft.cc", - ], - arch: { - x86: { - srcs: [ - "common_audio/third_party/ooura/fft_size_128/ooura_fft_sse2.cc", - ], - cflags: [ - "-msse2", - ], - }, - x86_64: { - srcs: [ - "common_audio/third_party/ooura/fft_size_128/ooura_fft_sse2.cc", - ], - cflags: [ - "-msse2", - ], - }, - }, -} - - -cc_library_static { - name: "webrtc_noise_level_estimator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/down_sampler.cc", - "modules/audio_processing/agc2/noise_level_estimator.cc", - "modules/audio_processing/agc2/noise_spectrum_estimator.cc", - "modules/audio_processing/agc2/signal_classifier.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], - static_libs: [ - "webrtc_ooura_fft_size_128", - "webrtc_rtc_base_checks", - "webrtc_biquad_filter", - "webrtc_common_audio", - "webrtc_apm_logging", - ], -} - - -cc_library_static { - name: "webrtc_audio_buffer", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/audio_buffer.cc", - "modules/audio_processing/splitting_filter.cc", - "modules/audio_processing/three_band_filter_bank.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_common_audio_c", - "webrtc_common_audio", - "webrtc_audio_processing_api", - ], -} - - -cc_library_static { - name: "webrtc_audio_decoder_isac_float", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/isac/audio_decoder_isac_float.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_audio_coding_isac", - ], -} - - -cc_library_static { - name: "webrtc_vad", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/vad/gmm.cc", - "modules/audio_processing/vad/pitch_based_vad.cc", - "modules/audio_processing/vad/pitch_internal.cc", - "modules/audio_processing/vad/pole_zero_filter.cc", - "modules/audio_processing/vad/standalone_vad.cc", - "modules/audio_processing/vad/vad_audio_proc.cc", - "modules/audio_processing/vad/vad_circular_buffer.cc", - "modules/audio_processing/vad/voice_activity_detector.cc", - ], - static_libs: [ - "webrtc_ooura_fft_size_256", - "webrtc_rtc_base_checks", - "webrtc_isac_vad", - "webrtc_common_audio_c", - "webrtc_common_audio", - "webrtc_audio_frame_operations", - ], -} - - -cc_library_static { - name: "webrtc_audio_device_generic", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_device/audio_device_generic.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_device_buffer", - ], -} - - -cc_library_static { - name: "webrtc_high_pass_filter", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/high_pass_filter.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_cascaded_biquad_filter", - "webrtc_audio_buffer", - ], -} - - -cc_library_static { - name: "webrtc_ns", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/ns/fast_math.cc", - "modules/audio_processing/ns/histograms.cc", - "modules/audio_processing/ns/noise_estimator.cc", - "modules/audio_processing/ns/noise_suppressor.cc", - "modules/audio_processing/ns/ns_fft.cc", - "modules/audio_processing/ns/prior_signal_model.cc", - "modules/audio_processing/ns/prior_signal_model_estimator.cc", - "modules/audio_processing/ns/quantile_noise_estimator.cc", - "modules/audio_processing/ns/signal_model.cc", - "modules/audio_processing/ns/signal_model_estimator.cc", - "modules/audio_processing/ns/speech_probability_estimator.cc", - "modules/audio_processing/ns/suppression_params.cc", - "modules/audio_processing/ns/wiener_filter.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], - static_libs: [ - "webrtc_ooura_fft_size_256", - "webrtc_ooura_fft_size_128", - "webrtc_rtc_base_checks", - "webrtc_cascaded_biquad_filter", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio_c", - "webrtc_apm_logging", - "webrtc_audio_buffer", - "webrtc_high_pass_filter", - ], -} - - -cc_library_static { - name: "webrtc_common_video", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "common_video/bitrate_adjuster.cc", - "common_video/frame_rate_estimator.cc", - "common_video/h264/h264_bitstream_parser.cc", - "common_video/h264/h264_common.cc", - "common_video/h264/pps_parser.cc", - "common_video/h264/sps_parser.cc", - "common_video/h264/sps_vui_rewriter.cc", - "common_video/i420_buffer_pool.cc", - "common_video/incoming_video_stream.cc", - "common_video/libyuv/webrtc_libyuv.cc", - "common_video/video_frame_buffer.cc", - "common_video/video_render_frames.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_video_bitrate_allocation", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_video_bitrate_allocator", - "webrtc_rtc_task_queue", - "webrtc_video_rtp_headers", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_rtc_h264_profile_id", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "libyuv", - ], -} - - -cc_library_static { - name: "webrtc_audio_encoder_g711", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/g711/audio_encoder_g711.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_g711", - ], -} - - -cc_library_static { - name: "webrtc_audio_encoder_L16", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/L16/audio_encoder_L16.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_pcm16b", - ], -} - - -cc_library_static { - name: "webrtc_audio_frame_proxies", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/include/audio_frame_proxies.cc", - ], - static_libs: [ - "webrtc_audio_frame_api", - "webrtc_audio_processing_api", - ], -} - - -cc_library_static { - name: "webrtc_audio_decoder_ilbc", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/ilbc/audio_decoder_ilbc.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_ilbc", - ], -} - - -cc_library_static { - name: "webrtc_audio_decoder_g711", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/g711/audio_decoder_g711.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_g711", - ], -} - - -cc_library_static { - name: "webrtc_optionally_built_submodule_creators", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/optionally_built_submodule_creators.cc", - ], - static_libs: [ - "webrtc_transient_suppressor_impl", - ], -} - - -cc_library_static { - name: "webrtc_audio_decoder_L16", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/L16/audio_decoder_L16.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_pcm16b", - ], -} - - -cc_library_static { - name: "webrtc_video_codecs_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video_codecs/sdp_video_format.cc", - "api/video_codecs/video_codec.cc", - "api/video_codecs/video_decoder.cc", - "api/video_codecs/video_decoder_factory.cc", - "api/video_codecs/video_encoder.cc", - "api/video_codecs/video_encoder_config.cc", - "api/video_codecs/vp8_frame_config.cc", - "api/video_codecs/vp8_temporal_layers.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_bitrate_allocation", - "webrtc_data_rate", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_video_frame", - "webrtc_encoded_image", - ], -} - - -cc_library_static { - name: "webrtc_audio_network_adaptor", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.cc", - "modules/audio_coding/audio_network_adaptor/bitrate_controller.cc", - "modules/audio_coding/audio_network_adaptor/channel_controller.cc", - "modules/audio_coding/audio_network_adaptor/controller.cc", - "modules/audio_coding/audio_network_adaptor/controller_manager.cc", - "modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc", - "modules/audio_coding/audio_network_adaptor/dtx_controller.cc", - "modules/audio_coding/audio_network_adaptor/event_log_writer.cc", - "modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.cc", - "modules/audio_coding/audio_network_adaptor/frame_length_controller.cc", - ], - static_libs: [ - "webrtc_audio_network_adaptor_config", - "webrtc_rtc_base_checks", - "webrtc_file_wrapper", - "webrtc_rtc_event_log", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_audio_codecs_api", - "webrtc_common_audio", - "webrtc_rtc_event_audio", - ], -} - - -cc_library_static { - name: "webrtc_level_estimation", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc/agc.cc", - "modules/audio_processing/agc/loudness_histogram.cc", - "modules/audio_processing/agc/utility.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_vad", - ], -} - - -cc_library_static { - name: "webrtc_media_stream_interface", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/media_stream_interface.cc", - ], - static_libs: [ - "webrtc_audio_processing_statistics", - "webrtc_rtc_base_checks", - "webrtc_rtp_parameters", - "webrtc_audio_options_api", - "webrtc_video_frame", - ], -} - - -cc_library_static { - name: "webrtc_audio_frame_manipulator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_mixer/audio_frame_manipulator.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_audio_frame_api", - "webrtc_audio_frame_operations", - ], -} - - -cc_library_static { - name: "webrtc_quality_scaling_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/quality_scaling_experiment.cc", - ], - static_libs: [ - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_video_codecs_api", - ], -} - - -cc_library_static { - name: "webrtc_opus", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/codecs/opus/audio_decoder_opus.cc", - "modules/audio_coding/codecs/opus/audio_encoder_opus.cc", - ], - cflags: [ - "-DWEBRTC_CODEC_ILBC", - "-DWEBRTC_CODEC_OPUS", - "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1", - "-DWEBRTC_CODEC_ISAC", - "-DWEBRTC_CODEC_RED", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_audio_encoder_opus_config", - "webrtc_opus_wrapper", - "webrtc_audio_codecs_api", - "webrtc_audio_coding_opus_common", - "webrtc_common_audio", - "webrtc_audio_network_adaptor", - "libopus", - ], -} - - filegroup { - name: "webrtc_aec_dump_interface", - srcs: [ - "modules/audio_processing/include/aec_dump.cc", - ], + name: "webrtc_rtc_base__rtc_operations_chain", + srcs: ["rtc_base/operations_chain.cc"], } - -cc_library_static { - name: "webrtc_voice_detection", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/voice_detection.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_common_audio_c", - "webrtc_audio_frame_api", - "webrtc_audio_processing_api", - "webrtc_audio_buffer", - ], -} - - -cc_library_static { - name: "webrtc_rtc_vp9_profile", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/base/vp9_profile.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_video_codecs_api", - ], -} - - -cc_library_static { - name: "webrtc_aec3", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/aec3/adaptive_fir_filter.cc", - "modules/audio_processing/aec3/adaptive_fir_filter_erl.cc", - "modules/audio_processing/aec3/aec3_common.cc", - "modules/audio_processing/aec3/aec3_fft.cc", - "modules/audio_processing/aec3/aec_state.cc", - "modules/audio_processing/aec3/alignment_mixer.cc", - "modules/audio_processing/aec3/api_call_jitter_metrics.cc", - "modules/audio_processing/aec3/block_buffer.cc", - "modules/audio_processing/aec3/block_delay_buffer.cc", - "modules/audio_processing/aec3/block_framer.cc", - "modules/audio_processing/aec3/block_processor.cc", - "modules/audio_processing/aec3/block_processor_metrics.cc", - "modules/audio_processing/aec3/clockdrift_detector.cc", - "modules/audio_processing/aec3/coarse_filter_update_gain.cc", - "modules/audio_processing/aec3/comfort_noise_generator.cc", - "modules/audio_processing/aec3/decimator.cc", - "modules/audio_processing/aec3/dominant_nearend_detector.cc", - "modules/audio_processing/aec3/downsampled_render_buffer.cc", - "modules/audio_processing/aec3/echo_audibility.cc", - "modules/audio_processing/aec3/echo_canceller3.cc", - "modules/audio_processing/aec3/echo_path_delay_estimator.cc", - "modules/audio_processing/aec3/echo_path_variability.cc", - "modules/audio_processing/aec3/echo_remover.cc", - "modules/audio_processing/aec3/echo_remover_metrics.cc", - "modules/audio_processing/aec3/erl_estimator.cc", - "modules/audio_processing/aec3/erle_estimator.cc", - "modules/audio_processing/aec3/fft_buffer.cc", - "modules/audio_processing/aec3/filter_analyzer.cc", - "modules/audio_processing/aec3/frame_blocker.cc", - "modules/audio_processing/aec3/fullband_erle_estimator.cc", - "modules/audio_processing/aec3/matched_filter.cc", - "modules/audio_processing/aec3/matched_filter_lag_aggregator.cc", - "modules/audio_processing/aec3/moving_average.cc", - "modules/audio_processing/aec3/refined_filter_update_gain.cc", - "modules/audio_processing/aec3/render_buffer.cc", - "modules/audio_processing/aec3/render_delay_buffer.cc", - "modules/audio_processing/aec3/render_delay_controller.cc", - "modules/audio_processing/aec3/render_delay_controller_metrics.cc", - "modules/audio_processing/aec3/render_signal_analyzer.cc", - "modules/audio_processing/aec3/residual_echo_estimator.cc", - "modules/audio_processing/aec3/reverb_decay_estimator.cc", - "modules/audio_processing/aec3/reverb_frequency_response.cc", - "modules/audio_processing/aec3/reverb_model.cc", - "modules/audio_processing/aec3/reverb_model_estimator.cc", - "modules/audio_processing/aec3/signal_dependent_erle_estimator.cc", - "modules/audio_processing/aec3/spectrum_buffer.cc", - "modules/audio_processing/aec3/stationarity_estimator.cc", - "modules/audio_processing/aec3/subband_erle_estimator.cc", - "modules/audio_processing/aec3/subband_nearend_detector.cc", - "modules/audio_processing/aec3/subtractor.cc", - "modules/audio_processing/aec3/subtractor_output.cc", - "modules/audio_processing/aec3/subtractor_output_analyzer.cc", - "modules/audio_processing/aec3/suppression_filter.cc", - "modules/audio_processing/aec3/suppression_gain.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], - static_libs: [ - "webrtc_ooura_fft_size_128", - "webrtc_rtc_base_checks", - "webrtc_cascaded_biquad_filter", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_base_approved", - "webrtc_aec3_config", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio_c", - "webrtc_apm_logging", - "webrtc_audio_buffer", - "webrtc_high_pass_filter", - ], -} - - -cc_library_static { - name: "webrtc_audio_decoder_opus", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/opus/audio_decoder_opus.cc", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_opus", - "libopus", - ], -} - - -cc_library_static { - name: "webrtc_rate_control_settings", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/rate_control_settings.cc", - ], - static_libs: [ - "webrtc_data_size", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - "webrtc_rtc_base_approved", - "webrtc_video_codecs_api", - ], -} - - -cc_library_static { - name: "webrtc_video_codec_interface", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/include/video_codec_interface.cc", - "modules/video_coding/video_coding_defines.cc", - ], - static_libs: [ - "webrtc_module_api", - "webrtc_generic_frame_descriptor", - "webrtc_video_rtp_headers", - "webrtc_video_frame", - "webrtc_common_video", - "webrtc_video_codecs_api", - ], -} - - -cc_library_static { - name: "webrtc_libaom_av1_encoder", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/codecs/av1/libaom_av1_encoder.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_logging", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "libaom", - ], -} - - -cc_library_static { - name: "webrtc_audio_device_impl", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_device/dummy/audio_device_dummy.cc", - "modules/audio_device/dummy/file_audio_device.cc", - "modules/audio_device/include/test_audio_device.cc", - "modules/audio_device/audio_device_data_observer.cc", - "modules/audio_device/audio_device_impl.cc", - "modules/audio_device/linux/alsasymboltable_linux.cc", - "modules/audio_device/linux/audio_device_alsa_linux.cc", - "modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc", - "modules/audio_device/linux/latebindingsymboltable_linux.cc", - "modules/audio_device/linux/audio_device_pulse_linux.cc", - "modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc", - "modules/audio_device/linux/pulseaudiosymboltable_linux.cc", - "modules/audio_device/dummy/file_audio_device_factory.cc", - ], - cflags: [ - "-DWEBRTC_ENABLE_LINUX_ALSA", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_file_wrapper", - "webrtc_field_trial", - "webrtc_repeating_task", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_common_audio_c", - "webrtc_common_audio", - "webrtc_audio_device_buffer", - "webrtc_utility", - "webrtc_audio_device_generic", - ], -} - - -cc_library_static { - name: "webrtc_libaom_av1_decoder", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/codecs/av1/libaom_av1_decoder.cc", - ], - static_libs: [ - "webrtc_logging", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "libaom", - "libyuv", - ], -} - - -cc_library_static { - name: "webrtc_neteq", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/neteq/accelerate.cc", - "modules/audio_coding/neteq/audio_multi_vector.cc", - "modules/audio_coding/neteq/audio_vector.cc", - "modules/audio_coding/neteq/background_noise.cc", - "modules/audio_coding/neteq/buffer_level_filter.cc", - "modules/audio_coding/neteq/comfort_noise.cc", - "modules/audio_coding/neteq/cross_correlation.cc", - "modules/audio_coding/neteq/decision_logic.cc", - "modules/audio_coding/neteq/decoder_database.cc", - "modules/audio_coding/neteq/delay_manager.cc", - "modules/audio_coding/neteq/dsp_helper.cc", - "modules/audio_coding/neteq/dtmf_buffer.cc", - "modules/audio_coding/neteq/dtmf_tone_generator.cc", - "modules/audio_coding/neteq/expand.cc", - "modules/audio_coding/neteq/expand_uma_logger.cc", - "modules/audio_coding/neteq/histogram.cc", - "modules/audio_coding/neteq/merge.cc", - "modules/audio_coding/neteq/nack_tracker.cc", - "modules/audio_coding/neteq/neteq_impl.cc", - "modules/audio_coding/neteq/normal.cc", - "modules/audio_coding/neteq/packet.cc", - "modules/audio_coding/neteq/packet_buffer.cc", - "modules/audio_coding/neteq/post_decode_vad.cc", - "modules/audio_coding/neteq/preemptive_expand.cc", - "modules/audio_coding/neteq/random_vector.cc", - "modules/audio_coding/neteq/red_payload_splitter.cc", - "modules/audio_coding/neteq/statistics_calculator.cc", - "modules/audio_coding/neteq/sync_buffer.cc", - "modules/audio_coding/neteq/time_stretch.cc", - "modules/audio_coding/neteq/timestamp_scaler.cc", - ":webrtc_tick_timer", - ":webrtc_neteq_api", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_audio_codecs_api", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio_c", - "webrtc_audio_format_to_string", - "webrtc_rtp_headers", - "webrtc_rtp_packet_info", - "webrtc_cng", - "webrtc_common_audio", - "webrtc_audio_frame_api", - ], -} - - -cc_library_static { - name: "webrtc_pushback_controller", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_data_size", - "webrtc_network_control", - "webrtc_rate_control_settings", - ], -} - - -cc_library_static { - name: "webrtc_video_processing", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_processing/util/denoiser_filter.cc", - "modules/video_processing/util/denoiser_filter_c.cc", - "modules/video_processing/util/noise_estimation.cc", - "modules/video_processing/util/skin_detection.cc", - "modules/video_processing/video_denoiser.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_video_processing_sse2", - "webrtc_common_audio", - "webrtc_video_frame", - "webrtc_utility", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "libyuv", - ], -} - - -cc_library_static { - name: "webrtc_rtp_rtcp_format", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/rtp_rtcp/include/report_block_data.cc", - "modules/rtp_rtcp/include/rtp_rtcp_defines.cc", - "modules/rtp_rtcp/source/rtcp_packet.cc", - "modules/rtp_rtcp/source/rtcp_packet/app.cc", - "modules/rtp_rtcp/source/rtcp_packet/bye.cc", - "modules/rtp_rtcp/source/rtcp_packet/common_header.cc", - "modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc", - "modules/rtp_rtcp/source/rtcp_packet/dlrr.cc", - "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc", - "modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc", - "modules/rtp_rtcp/source/rtcp_packet/fir.cc", - "modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc", - "modules/rtp_rtcp/source/rtcp_packet/nack.cc", - "modules/rtp_rtcp/source/rtcp_packet/pli.cc", - "modules/rtp_rtcp/source/rtcp_packet/psfb.cc", - "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.cc", - "modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc", - "modules/rtp_rtcp/source/rtcp_packet/remb.cc", - "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc", - "modules/rtp_rtcp/source/rtcp_packet/report_block.cc", - "modules/rtp_rtcp/source/rtcp_packet/rrtr.cc", - "modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc", - "modules/rtp_rtcp/source/rtcp_packet/sdes.cc", - "modules/rtp_rtcp/source/rtcp_packet/sender_report.cc", - "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc", - "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.cc", - "modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc", - "modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc", - "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc", - "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc", - "modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc", - "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc", - "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc", - "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc", - "modules/rtp_rtcp/source/rtp_header_extension_map.cc", - "modules/rtp_rtcp/source/rtp_header_extensions.cc", - "modules/rtp_rtcp/source/rtp_packet.cc", - "modules/rtp_rtcp/source/rtp_packet_received.cc", - "modules/rtp_rtcp/source/rtp_packet_to_send.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_rtp_parameters", - "webrtc_time_delta", - "webrtc_network_control", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_audio_codecs_api", - "webrtc_rtp_headers", - "webrtc_video_frame", - "webrtc_common_video", - ], -} - - filegroup { - name: "webrtc_default_neteq_controller_factory", - srcs: [ - "api/neteq/default_neteq_controller_factory.cc", - ], + name: "webrtc_av1__scalable_video_controller", + srcs: ["modules/video_coding/codecs/av1/scalable_video_controller_no_layering.cc"], } - cc_library_static { - name: "webrtc_balanced_degradation_settings", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/balanced_degradation_settings.cc", - ], + name: "webrtc_rtc_base__timeutils", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/time_utils.cc"], + host_supported: true, static_libs: [ - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_base_approved", - "webrtc_video_codecs_api", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", ], } +cc_library_static { + name: "webrtc_units__time_delta", + defaults: ["webrtc_defaults"], + srcs: ["api/units/time_delta.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__rtc_event", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/event.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_synchronization__yield_policy", + ], +} + +cc_library_static { + name: "webrtc_units__timestamp", + defaults: ["webrtc_defaults"], + srcs: ["api/units/timestamp.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_units__time_delta", + ], +} + +cc_library_static { + name: "webrtc_units__frequency", + defaults: ["webrtc_defaults"], + srcs: ["api/units/frequency.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_units__time_delta", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__weak_ptr", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/weak_ptr.cc"], + host_supported: true, + static_libs: ["webrtc_synchronization__sequence_checker"], +} + +cc_library_static { + name: "webrtc_rtc_base__platform_thread", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/platform_thread.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__platform_thread_types", + "webrtc_rtc_base__timeutils", + "webrtc_rtc_base__rtc_event", + ], +} + +cc_library_static { + name: "webrtc_task_utils__pending_task_safety_flag", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/task_utils/pending_task_safety_flag.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_synchronization__sequence_checker", + ], +} + +cc_library_static { + name: "webrtc_rtc_event_log__rtc_event_log", + defaults: ["webrtc_defaults"], + srcs: [ + "api/rtc_event_log/rtc_event.cc", + "api/rtc_event_log/rtc_event_log.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__timeutils", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__logging", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/logging.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__platform_thread_types", + "webrtc_rtc_base__stringutils", + "webrtc_rtc_base__criticalsection", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__timeutils", + ], +} + +cc_library_static { + name: "webrtc_units__data_rate", + defaults: ["webrtc_defaults"], + srcs: ["api/units/data_rate.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_units__data_size", + "webrtc_units__time_delta", + "webrtc_units__frequency", + ], +} + +cc_library_static { + name: "webrtc_system_wrappers__field_trial", + defaults: ["webrtc_defaults"], + srcs: ["system_wrappers/source/field_trial.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_rtc_base__logging", + ], +} + +cc_library_static { + name: "webrtc_video_coding__chain_diff_calculator", + defaults: ["webrtc_defaults"], + srcs: ["modules/video_coding/chain_diff_calculator.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__logging", + ], +} + +cc_library_static { + name: "webrtc_transport__network_control", + defaults: ["webrtc_defaults"], + srcs: ["api/transport/network_types.cc"], + host_supported: true, + static_libs: [ + "webrtc_units__data_size", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_units__data_rate", + ], +} + +cc_library_static { + name: "webrtc_experiments__field_trial_parser", + defaults: ["webrtc_defaults"], + srcs: [ + "rtc_base/experiments/field_trial_list.cc", + "rtc_base/experiments/field_trial_parser.cc", + "rtc_base/experiments/field_trial_units.cc", + "rtc_base/experiments/struct_parameters_parser.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_units__data_size", + "webrtc_units__time_delta", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + ], +} + +cc_library_static { + name: "webrtc_logging__rtc_event_pacing", + defaults: ["webrtc_defaults"], + srcs: ["logging/rtc_event_log/events/rtc_event_alr_state.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_event_log__rtc_event_log"], +} + +cc_library_static { + name: "webrtc_transport__field_trial_based_config", + defaults: ["webrtc_defaults"], + srcs: ["api/transport/field_trial_based_config.cc"], + host_supported: true, + static_libs: ["webrtc_system_wrappers__field_trial"], +} + +cc_library_static { + name: "webrtc_video_coding__frame_dependencies_calculator", + defaults: ["webrtc_defaults"], + srcs: ["modules/video_coding/frame_dependencies_calculator.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_generic_frame_descriptor__generic_frame_descriptor", + "webrtc_rtc_base__logging", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__rtc_task_queue_libevent", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/task_queue_libevent.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__platform_thread_types", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__criticalsection", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__timeutils", + "webrtc_rtc_base__platform_thread", + "webrtc_rtc_base__logging", + ], +} + +cc_library_static { + name: "webrtc_api__rtc_error", + defaults: ["webrtc_defaults"], + srcs: ["api/rtc_error.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__logging", + ], +} + +cc_library_static { + name: "webrtc_rtc_event_log__rtc_event_log_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/rtc_event_log/rtc_event_log_factory.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_rtc_event_log__rtc_event_log", + ], +} + +cc_library_static { + name: "webrtc_goog_cc__link_capacity_estimator", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/goog_cc/link_capacity_estimator.cc"], + host_supported: true, + static_libs: ["webrtc_units__data_rate"], +} + +cc_library_static { + name: "webrtc_video__video_bitrate_allocator", + defaults: ["webrtc_defaults"], + srcs: ["api/video/video_bitrate_allocator.cc"], + host_supported: true, + static_libs: [ + "webrtc_video__video_bitrate_allocation", + "webrtc_units__data_rate", + ], +} + +cc_library_static { + name: "webrtc_experiments__keyframe_interval_settings_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/keyframe_interval_settings.cc"], + host_supported: true, + static_libs: [ + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__rtc_task_queue", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/task_queue.cc"], + host_supported: true, + static_libs: ["webrtc_task_queue__task_queue"], +} + +cc_library_static { + name: "webrtc_task_queue__default_task_queue_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/task_queue/default_task_queue_factory_libevent.cc"], + host_supported: true, + static_libs: [ + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__rtc_task_queue_libevent", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__rtc_base_approved", + defaults: ["webrtc_defaults"], + srcs: [ + "rtc_base/bit_buffer.cc", + "rtc_base/buffer_queue.cc", + "rtc_base/byte_buffer.cc", + "rtc_base/copy_on_write_buffer.cc", + "rtc_base/event_tracer.cc", + "rtc_base/location.cc", + "rtc_base/numerics/histogram_percentile_counter.cc", + "rtc_base/numerics/sample_counter.cc", + "rtc_base/race_checker.cc", + "rtc_base/random.cc", + "rtc_base/rate_statistics.cc", + "rtc_base/rate_tracker.cc", + "rtc_base/timestamp_aligner.cc", + "rtc_base/zero_memory.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_base64__base64", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__platform_thread_types", + "webrtc_rtc_base__stringutils", + "webrtc_rtc_base__criticalsection", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__timeutils", + "webrtc_rtc_base__rtc_event", + "webrtc_rtc_base__platform_thread", + "webrtc_rtc_base__logging", + "webrtc_rtc_base__rtc_task_queue", + ], +} filegroup { - name: "webrtc_default_neteq_factory", - srcs: [ - "modules/audio_coding/neteq/default_neteq_factory.cc", - ], + name: "webrtc_adaptation__resource_adaptation_api", + srcs: ["api/adaptation/resource.cc"], } - cc_library_static { - name: "webrtc_encoded_frame", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/encoded_frame.cc", - ], + name: "webrtc_api__rtc_event_log_output_file", + defaults: ["webrtc_defaults"], + srcs: ["api/rtc_event_log_output_file.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_alr_experiment", - "webrtc_rtt_mult_experiment", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_rtp_video_header", - "webrtc_video_codec_interface", + "webrtc_rtc_base__checks", + "webrtc_system__file_wrapper", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_stable_target_rate_experiment", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "rtc_base/experiments/stable_target_rate_experiment.cc", - ], + name: "webrtc_experiments__jitter_upper_bound_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/jitter_upper_bound_experiment.cc"], + host_supported: true, static_libs: [ - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - "webrtc_rate_control_settings", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", ], } +cc_library_static { + name: "webrtc_agc2__biquad_filter", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/agc2/biquad_filter.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__rtc_base_approved"], +} cc_library_static { - name: "webrtc_audio_mixer_impl", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_rtc_base__rtc_numerics", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_mixer/audio_mixer_impl.cc", - "modules/audio_mixer/default_output_rate_calculator.cc", - "modules/audio_mixer/frame_combiner.cc", + "rtc_base/numerics/event_based_exponential_moving_average.cc", + "rtc_base/numerics/event_rate_counter.cc", + "rtc_base/numerics/exp_filter.cc", + "rtc_base/numerics/moving_average.cc", + "rtc_base/numerics/sample_stats.cc", + "rtc_base/numerics/samples_stats_counter.cc", ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_units__data_rate", + "webrtc_rtc_base__rtc_base_approved", + ], +} + +cc_library_static { + name: "webrtc_experiments__cpu_speed_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/cpu_speed_experiment.cc"], + host_supported: true, + static_libs: [ + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + ], +} + +cc_library_static { + name: "webrtc_system_wrappers__system_wrappers", + defaults: ["webrtc_defaults"], + srcs: [ + "system_wrappers/source/clock.cc", + "system_wrappers/source/cpu_features.cc", + "system_wrappers/source/cpu_info.cc", + "system_wrappers/source/rtp_to_ntp_estimator.cc", + "system_wrappers/source/sleep.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_system_wrappers__cpu_features_linux", + "webrtc_synchronization__mutex", + "webrtc_synchronization__rw_lock_wrapper", + "webrtc_units__timestamp", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + ], +} + +cc_library_static { + name: "webrtc_video__video_rtp_headers", + defaults: ["webrtc_defaults"], + srcs: [ + "api/video/color_space.cc", + "api/video/hdr_metadata.cc", + "api/video/video_content_type.cc", + "api/video/video_timing.cc", + ], + host_supported: true, + static_libs: ["webrtc_rtc_base__rtc_base_approved"], +} + +cc_library_static { + name: "webrtc_opus__audio_encoder_opus_config", + defaults: ["webrtc_defaults"], + srcs: [ + "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc", + "api/audio_codecs/opus/audio_encoder_opus_config.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_OPUS_VARIABLE_COMPLEXITY=0"], + static_libs: ["webrtc_rtc_base__rtc_base_approved"], +} + +cc_library_static { + name: "webrtc_audio__aec3_config", + defaults: ["webrtc_defaults"], + srcs: ["api/audio/echo_canceller3_config.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__webrtc_opus_wrapper", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/opus/opus_interface.cc"], + host_supported: true, cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", + "-DWEBRTC_CODEC_ILBC", + "-DWEBRTC_CODEC_OPUS", + "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1", + "-DWEBRTC_CODEC_ISAC", ], static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio", - "webrtc_audio_frame_api", - "webrtc_apm_logging", - "webrtc_fixed_digital", - "webrtc_audio_frame_operations", - "webrtc_audio_processing_api", - "webrtc_audio_frame_manipulator", + "webrtc_rtc_base__checks", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_pffft_wrapper", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/utility/pffft_wrapper.cc", - ], + name: "webrtc_agc2__common", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/agc2/agc2_common.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "libpffft", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_agc2_rnn_vad", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/rnn_vad/auto_correlation.cc", - "modules/audio_processing/agc2/rnn_vad/common.cc", - "modules/audio_processing/agc2/rnn_vad/features_extraction.cc", - "modules/audio_processing/agc2/rnn_vad/lp_residual.cc", - "modules/audio_processing/agc2/rnn_vad/pitch_search.cc", - "modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc", - "modules/audio_processing/agc2/rnn_vad/rnn.cc", - "modules/audio_processing/agc2/rnn_vad/spectral_features.cc", - "modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc", - ], + name: "webrtc_experiments__alr_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/alr_experiment.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_pffft_wrapper", - "webrtc_rtc_base_approved", - "webrtc_biquad_filter", - "rnnoise_rnn_vad", + "webrtc_transport__field_trial_based_config", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_agc2_rnn_vad_with_level", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/vad_with_level.cc", - ], + name: "webrtc_experiments__quality_scaler_settings", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/quality_scaler_settings.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_agc2_rnn_vad", - "webrtc_common_audio", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_adaptive_digital", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/agc2/adaptive_agc.cc", - "modules/audio_processing/agc2/adaptive_digital_gain_applier.cc", - "modules/audio_processing/agc2/adaptive_mode_level_estimator.cc", - "modules/audio_processing/agc2/saturation_protector.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], + name: "webrtc_experiments__normalize_simulcast_size_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/normalize_simulcast_size_experiment.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_agc2_common", - "webrtc_system_wrappers_metrics", - "webrtc_gain_applier", - "webrtc_common_audio", - "webrtc_agc2_rnn_vad_with_level", - "webrtc_apm_logging", - "webrtc_audio_processing_api", - "webrtc_noise_level_estimator", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_level_estimation_agc", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_audio_codecs__audio_codecs_api", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", + "api/audio_codecs/audio_codec_pair_id.cc", + "api/audio_codecs/audio_decoder.cc", + "api/audio_codecs/audio_encoder.cc", + "api/audio_codecs/audio_format.cc", ], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_agc2_common", - "webrtc_gain_applier", - "webrtc_common_audio", - "webrtc_agc2_rnn_vad_with_level", - "webrtc_apm_logging", - "webrtc_audio_processing_api", - "webrtc_noise_level_estimator", - "webrtc_vad", - "webrtc_adaptive_digital", - "webrtc_level_estimation", + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_remote_bitrate_estimator", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/remote_bitrate_estimator/aimd_rate_control.cc", - "modules/remote_bitrate_estimator/bwe_defines.cc", - "modules/remote_bitrate_estimator/inter_arrival.cc", - "modules/remote_bitrate_estimator/overuse_detector.cc", - "modules/remote_bitrate_estimator/overuse_estimator.cc", - "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc", - "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc", - "modules/remote_bitrate_estimator/remote_estimator_proxy.cc", - ], - cflags: [ - "-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0", - ], + name: "webrtc_experiments__quality_rampup_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/quality_rampup_experiment.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_timestamp", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - "webrtc_link_capacity_estimator", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_system_wrappers", - "webrtc_system_wrappers_metrics", - "webrtc_rtp_headers", - "webrtc_rtp_rtcp_format", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_agc", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_stats__rtc_stats", + defaults: ["webrtc_defaults"], srcs: [ - "modules/audio_processing/agc/agc_manager_direct.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", + "stats/rtc_stats.cc", + "stats/rtc_stats_report.cc", + "stats/rtcstats_objects.cc", ], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_logging", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio_c", - "webrtc_common_audio", - "webrtc_apm_logging", - "webrtc_audio_buffer", - "webrtc_vad", - "webrtc_level_estimation", - "webrtc_level_estimation_agc", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_audio_encoder_opus", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/opus/audio_encoder_opus.cc", - ], + name: "webrtc_system_wrappers__metrics", + defaults: ["webrtc_defaults"], + srcs: ["system_wrappers/source/metrics.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_encoder_opus_config", - "webrtc_audio_codecs_api", - "webrtc_opus", - "libopus", + "webrtc_rtc_base__checks", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__rtc_base_approved", ], } - -cc_library_static { - name: "webrtc_transport_feedback", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/rtp/transport_feedback_adapter.cc", - "modules/congestion_controller/rtp/transport_feedback_demuxer.cc", - ], - static_libs: [ - "webrtc_sent_packet", - "webrtc_rtc_base_checks", - "webrtc_data_size", - "webrtc_timestamp", - "webrtc_field_trial", - "webrtc_network_control", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_rtc_base", - "webrtc_rtp_rtcp_format", - ], -} - - -cc_library_static { - name: "webrtc_builtin_audio_decoder_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/builtin_audio_decoder_factory.cc", - ], - cflags: [ - "-DWEBRTC_USE_BUILTIN_ILBC=1", - "-DWEBRTC_USE_BUILTIN_OPUS=1", - "-DWEBRTC_USE_BUILTIN_ISAC_FIX=0", - "-DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_audio_decoder_multiopus", - "webrtc_audio_decoder_g722", - "webrtc_audio_decoder_ilbc", - "webrtc_audio_decoder_g711", - "webrtc_audio_decoder_L16", - "webrtc_audio_decoder_opus", - ], -} - - -cc_library_static { - name: "webrtc_audio_coding", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_coding/acm2/acm_receiver.cc", - "modules/audio_coding/acm2/acm_remixing.cc", - "modules/audio_coding/acm2/acm_resampler.cc", - "modules/audio_coding/acm2/audio_coding_module.cc", - "modules/audio_coding/acm2/call_statistics.cc", - ":webrtc_neteq_api", - ":webrtc_default_neteq_factory", - ":webrtc_default_neteq_controller_factory", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_audio_codecs_api", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio_c", - "webrtc_audio_format_to_string", - "webrtc_common_audio", - "webrtc_audio_frame_api", - "webrtc_neteq", - ], -} - - -cc_library_static { - name: "webrtc_aec3_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio/echo_canceller3_factory.cc", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_aec3_config", - "webrtc_aec3", - ], -} - - -cc_library_static { - name: "webrtc_rtc_event_rtp_rtcp", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc", - "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc", - "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc", - "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_event_log", - "webrtc_rtc_base_approved", - "webrtc_rtp_rtcp_format", - ], -} - - -cc_library_static { - name: "webrtc_vp9_helpers", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/codecs/vp9/svc_config.cc", - "modules/video_coding/codecs/vp9/svc_rate_allocator.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_bitrate_allocation", - "webrtc_video_bitrate_allocator", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_stable_target_rate_experiment", - ], -} - - -cc_library_static { - name: "webrtc_null_aec_dump_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/aec_dump/null_aec_dump_factory.cc", - ":webrtc_aec_dump_interface", - ], -} - - -cc_library_static { - name: "webrtc_api_video_encoded_frame", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/encoded_frame.cc", - ], - static_libs: [ - "webrtc_encoded_frame", - ], -} - - -cc_library_static { - name: "webrtc_rtc_event_bwe", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc", - "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc", - "logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc", - "logging/rtc_event_log/events/rtc_event_probe_result_failure.cc", - "logging/rtc_event_log/events/rtc_event_probe_result_success.cc", - "logging/rtc_event_log/events/rtc_event_route_change.cc", - ], - static_libs: [ - "webrtc_rtc_event_log", - "webrtc_data_rate", - "webrtc_remote_bitrate_estimator", - ], -} - - -cc_library_static { - name: "webrtc_builtin_audio_encoder_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/audio_codecs/builtin_audio_encoder_factory.cc", - ], - cflags: [ - "-DWEBRTC_USE_BUILTIN_ILBC=1", - "-DWEBRTC_USE_BUILTIN_OPUS=1", - "-DWEBRTC_USE_BUILTIN_ISAC_FIX=0", - "-DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1", - ], - static_libs: [ - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_audio_encoder_multiopus", - "webrtc_audio_encoder_g722", - "webrtc_audio_encoder_ilbc", - "webrtc_audio_encoder_g711", - "webrtc_audio_encoder_L16", - "webrtc_audio_encoder_opus", - "libopus", - ], -} - - -cc_library_static { - name: "webrtc_audio_processing", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/audio_processing/audio_processing_builder_impl.cc", - "modules/audio_processing/audio_processing_impl.cc", - "modules/audio_processing/echo_control_mobile_impl.cc", - "modules/audio_processing/echo_detector/circular_buffer.cc", - "modules/audio_processing/echo_detector/mean_variance_estimator.cc", - "modules/audio_processing/echo_detector/moving_max.cc", - "modules/audio_processing/echo_detector/normalized_covariance_estimator.cc", - "modules/audio_processing/gain_control_impl.cc", - "modules/audio_processing/gain_controller2.cc", - "modules/audio_processing/level_estimator.cc", - "modules/audio_processing/residual_echo_detector.cc", - "modules/audio_processing/typing_detection.cc", - ":webrtc_rms_level", - ":webrtc_aec_dump_interface", - ], - cflags: [ - "-DWEBRTC_APM_DEBUG_DUMP=0", - ], - static_libs: [ - "webrtc_ooura_fft_size_256", - "webrtc_audio_processing_statistics", - "webrtc_rtc_base_checks", - "webrtc_audio_processing_config", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_aec3_config", - "webrtc_system_wrappers_metrics", - "webrtc_common_audio_c", - "webrtc_aecm_core", - "webrtc_gain_applier", - "webrtc_common_audio", - "webrtc_audio_frame_api", - "webrtc_apm_logging", - "webrtc_fir_filter_factory", - "webrtc_fixed_digital", - "webrtc_legacy_agc", - "webrtc_audio_frame_operations", - "webrtc_audio_processing_api", - "webrtc_audio_buffer", - "webrtc_vad", - "webrtc_high_pass_filter", - "webrtc_ns", - "webrtc_adaptive_digital", - "webrtc_audio_frame_proxies", - "webrtc_optionally_built_submodule_creators", - "webrtc_voice_detection", - "webrtc_aec3", - "webrtc_agc", - "webrtc_null_aec_dump_factory", - ], -} - - -cc_library_static { - name: "webrtc_probe_controller", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/goog_cc/probe_controller.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_logging", - "webrtc_data_rate", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_rtc_event_pacing", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_event_bwe", - ], -} - - -cc_library_static { - name: "webrtc_goog_cc_estimators", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc", - "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc", - "modules/congestion_controller/goog_cc/bitrate_estimator.cc", - "modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc", - "modules/congestion_controller/goog_cc/robust_throughput_estimator.cc", - "modules/congestion_controller/goog_cc/trendline_estimator.cc", - ], - cflags: [ - "-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_logging", - "webrtc_data_rate", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_rtc_numerics", - "webrtc_remote_bitrate_estimator", - "webrtc_rtc_event_bwe", - ], -} - - -cc_library_static { - name: "webrtc_loss_based_controller", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc", - "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc", - ], - cflags: [ - "-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_logging", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_system_wrappers_metrics", - "webrtc_remote_bitrate_estimator", - "webrtc_rtc_event_bwe", - ], -} - - -cc_library_static { - name: "webrtc_rtp_interfaces", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/rtp_config.cc", - ], - static_libs: [ - "webrtc_bitrate_settings", - "webrtc_rtc_base_checks", - "webrtc_rtp_parameters", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_rtc_base_approved", - "webrtc_rtp_headers", - "webrtc_api_crypto_options", - "webrtc_rtp_rtcp_format", - ], -} - - -cc_library_static { - name: "webrtc_video_stream_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/video_receive_stream.cc", - "call/video_send_stream.cc", - ], - static_libs: [ - "webrtc_transport_api", - "webrtc_rtc_base_checks", - "webrtc_rtp_parameters", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_rtp_headers", - "webrtc_api_crypto_options", - "webrtc_video_frame", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_rtp_rtcp_format", - "webrtc_rtp_interfaces", - ], -} - - -cc_library_static { - name: "webrtc_call_interfaces", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/audio_receive_stream.cc", - "call/audio_state.cc", - "call/call_config.cc", - "call/flexfec_receive_stream.cc", - "call/syncable.cc", - "call/audio_send_stream.cc", - ":webrtc_neteq_api", - ], - static_libs: [ - "webrtc_sent_packet", - "webrtc_transport_api", - "webrtc_audio_processing_statistics", - "webrtc_bitrate_settings", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_rtp_parameters", - "webrtc_network_control", - "webrtc_rtc_error", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_rtc_base", - "webrtc_audio_format_to_string", - "webrtc_rtp_headers", - "webrtc_api_crypto_options", - "webrtc_utility", - "webrtc_audio_processing_api", - "webrtc_rtp_rtcp_format", - "webrtc_audio_processing", - "webrtc_rtp_interfaces", - "webrtc_video_stream_api", - ], -} - - -cc_library_static { - name: "webrtc_rtc_media_base", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/base/adapted_video_track_source.cc", - "media/base/codec.cc", - "media/base/media_channel.cc", - "media/base/media_constants.cc", - "media/base/media_engine.cc", - "media/base/rid_description.cc", - "media/base/rtp_data_engine.cc", - "media/base/rtp_utils.cc", - "media/base/stream_params.cc", - "media/base/turn_utils.cc", - "media/base/video_adapter.cc", - "media/base/video_broadcaster.cc", - "media/base/video_common.cc", - "media/base/video_source_base.cc", - ":webrtc_stun_types", - ], - static_libs: [ - "webrtc_sigslot", - "webrtc_audio_processing_statistics", - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_file_wrapper", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_rtp_parameters", - "webrtc_audio_options_api", - "webrtc_field_trial", - "webrtc_rtc_error", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_audio_codecs_api", - "webrtc_rtc_base", - "webrtc_api_crypto_options", - "webrtc_rtc_h264_profile_id", - "webrtc_video_frame", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_media_transport_interface", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_media_stream_interface", - "webrtc_rtc_vp9_profile", - "webrtc_rtp_rtcp_format", - "webrtc_video_stream_api", - "webrtc_call_interfaces", - ], -} - - -cc_library_static { - name: "webrtc_video_capture_module", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_capture/device_info_impl.cc", - "modules/video_capture/video_capture_factory.cc", - "modules/video_capture/video_capture_impl.cc", - ], - static_libs: [ - "webrtc_module_api", - "webrtc_stringutils", - "webrtc_rw_lock_wrapper", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_video_frame", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "webrtc_rtc_media_base", - "libyuv", - ], -} - - -cc_library_static { - name: "webrtc_fake_network", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/fake_network_pipe.cc", - ], - static_libs: [ - "webrtc_transport_api", - "webrtc_rtc_base_checks", - "webrtc_sequence_checker", - "webrtc_rtp_parameters", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_simulated_network", - "webrtc_utility", - "webrtc_call_interfaces", - ], -} - - -cc_library_static { - name: "webrtc_libjingle_peerconnection_api", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/candidate.cc", - "api/data_channel_interface.cc", - "api/dtls_transport_interface.cc", - "api/jsep.cc", - "api/jsep_ice_candidate.cc", - "api/peer_connection_interface.cc", - "api/proxy.cc", - "api/rtp_receiver_interface.cc", - "api/rtp_sender_interface.cc", - "api/rtp_transceiver_interface.cc", - "api/sctp_transport_interface.cc", - "api/stats_types.cc", - ":webrtc_neteq_api", - ], - static_libs: [ - "webrtc_audio_processing_statistics", - "webrtc_bitrate_settings", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_rtp_parameters", - "webrtc_audio_options_api", - "webrtc_audio_interfaces", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_data_rate", - "webrtc_network_control", - "webrtc_rtc_error", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_audio_codecs_api", - "webrtc_rtc_base", - "webrtc_rtp_packet_info", - "webrtc_api_crypto_options", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_interfaces", - "webrtc_media_transport_interface", - "webrtc_media_stream_interface", - "webrtc_rtc_media_base", - ], -} - - filegroup { - name: "webrtc_video_capture_internal_impl", - srcs: [ - "modules/video_capture/linux/device_info_linux.cc", - "modules/video_capture/linux/video_capture_linux.cc", - ], + name: "webrtc_neteq__tick_timer", + srcs: ["api/neteq/tick_timer.cc"], } - cc_library_static { - name: "webrtc_ice_log", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc", - "logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc", - "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc", - "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc", - "logging/rtc_event_log/ice_logger.cc", - ], + name: "webrtc_experiments__rtt_mult_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/rtt_mult_experiment.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_event_log", - "webrtc_rtc_base_approved", - "webrtc_libjingle_peerconnection_api", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", ], } - cc_library_static { - name: "webrtc_bitrate_configurator", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_rnn_vad__rnn_vad", + defaults: ["webrtc_defaults"], srcs: [ - "call/rtp_bitrate_configurator.cc", + "modules/audio_processing/agc2/rnn_vad/auto_correlation.cc", + "modules/audio_processing/agc2/rnn_vad/common.cc", + "modules/audio_processing/agc2/rnn_vad/features_extraction.cc", + "modules/audio_processing/agc2/rnn_vad/lp_residual.cc", + "modules/audio_processing/agc2/rnn_vad/pitch_search.cc", + "modules/audio_processing/agc2/rnn_vad/pitch_search_internal.cc", + "modules/audio_processing/agc2/rnn_vad/rnn.cc", + "modules/audio_processing/agc2/rnn_vad/spectral_features.cc", + "modules/audio_processing/agc2/rnn_vad/spectral_features_internal.cc", ], + host_supported: true, static_libs: [ - "webrtc_bitrate_settings", - "webrtc_rtc_base_checks", - "webrtc_data_rate", - "webrtc_rtc_base_approved", - "webrtc_rtp_interfaces", - "webrtc_libjingle_peerconnection_api", + "webrtc_rtc_base__checks", + "webrtc_utility__pffft_wrapper", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_agc2__biquad_filter", ], } +cc_library_static { + name: "webrtc_rtc_base__rtc_base", + defaults: ["webrtc_defaults"], + srcs: [ + "rtc_base/async_invoker.cc", + "rtc_base/async_packet_socket.cc", + "rtc_base/async_resolver_interface.cc", + "rtc_base/async_socket.cc", + "rtc_base/async_tcp_socket.cc", + "rtc_base/async_udp_socket.cc", + "rtc_base/crc32.cc", + "rtc_base/crypt_string.cc", + "rtc_base/data_rate_limiter.cc", + "rtc_base/deprecated/signal_thread.cc", + "rtc_base/file_rotating_stream.cc", + "rtc_base/helpers.cc", + "rtc_base/http_common.cc", + "rtc_base/ip_address.cc", + "rtc_base/message_digest.cc", + "rtc_base/message_handler.cc", + "rtc_base/net_helper.cc", + "rtc_base/net_helpers.cc", + "rtc_base/network.cc", + "rtc_base/network_constants.cc", + "rtc_base/network_monitor.cc", + "rtc_base/network_route.cc", + "rtc_base/null_socket_server.cc", + "rtc_base/openssl_adapter.cc", + "rtc_base/openssl_certificate.cc", + "rtc_base/openssl_digest.cc", + "rtc_base/openssl_identity.cc", + "rtc_base/openssl_session_cache.cc", + "rtc_base/openssl_stream_adapter.cc", + "rtc_base/openssl_utility.cc", + "rtc_base/physical_socket_server.cc", + "rtc_base/proxy_info.cc", + "rtc_base/rtc_certificate.cc", + "rtc_base/rtc_certificate_generator.cc", + "rtc_base/socket.cc", + "rtc_base/socket_adapters.cc", + "rtc_base/socket_address.cc", + "rtc_base/socket_address_pair.cc", + "rtc_base/ssl_adapter.cc", + "rtc_base/ssl_certificate.cc", + "rtc_base/ssl_fingerprint.cc", + "rtc_base/ssl_identity.cc", + "rtc_base/ssl_stream_adapter.cc", + "rtc_base/stream.cc", + "rtc_base/thread.cc", + "rtc_base/unique_id_generator.cc", + "rtc_base/log_sinks.cc", + "rtc_base/ifaddrs_converter.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_sigslot__sigslot", + "webrtc_network__sent_packet", + "webrtc_base64__base64", + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__stringutils", + "webrtc_system__file_wrapper", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_task_utils__pending_task_safety_flag", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + ], +} cc_library_static { - name: "webrtc_rtp_rtcp", - defaults: [ - "webrtc_defaults", + name: "webrtc_common_audio__common_audio_cc", + defaults: ["webrtc_defaults"], + srcs: ["common_audio/signal_processing/dot_product_with_scale.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", ], +} + +cc_library_static { + name: "webrtc_pacing__interval_budget", + defaults: ["webrtc_defaults"], + srcs: ["modules/pacing/interval_budget.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + ], +} + +cc_library_static { + name: "webrtc_common_audio__common_audio_c", + defaults: ["webrtc_defaults"], srcs: [ - "modules/rtp_rtcp/source/absolute_capture_time_receiver.cc", - "modules/rtp_rtcp/source/absolute_capture_time_sender.cc", - "modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc", - "modules/rtp_rtcp/source/dtmf_queue.cc", - "modules/rtp_rtcp/source/fec_private_tables_bursty.cc", - "modules/rtp_rtcp/source/fec_private_tables_random.cc", - "modules/rtp_rtcp/source/flexfec_header_reader_writer.cc", - "modules/rtp_rtcp/source/flexfec_receiver.cc", - "modules/rtp_rtcp/source/flexfec_sender.cc", - "modules/rtp_rtcp/source/forward_error_correction.cc", - "modules/rtp_rtcp/source/forward_error_correction_internal.cc", - "modules/rtp_rtcp/source/packet_loss_stats.cc", - "modules/rtp_rtcp/source/receive_statistics_impl.cc", - "modules/rtp_rtcp/source/remote_ntp_time_estimator.cc", - "modules/rtp_rtcp/source/rtcp_nack_stats.cc", - "modules/rtp_rtcp/source/rtcp_receiver.cc", - "modules/rtp_rtcp/source/rtcp_sender.cc", - "modules/rtp_rtcp/source/rtp_descriptor_authentication.cc", - "modules/rtp_rtcp/source/rtp_format.cc", - "modules/rtp_rtcp/source/rtp_format_h264.cc", - "modules/rtp_rtcp/source/rtp_format_video_generic.cc", - "modules/rtp_rtcp/source/rtp_format_vp8.cc", - "modules/rtp_rtcp/source/rtp_format_vp9.cc", - "modules/rtp_rtcp/source/rtp_header_extension_size.cc", - "modules/rtp_rtcp/source/rtp_packet_history.cc", - "modules/rtp_rtcp/source/rtp_packetizer_av1.cc", - "modules/rtp_rtcp/source/rtp_rtcp_impl.cc", - "modules/rtp_rtcp/source/rtp_sender.cc", - "modules/rtp_rtcp/source/rtp_sender_audio.cc", - "modules/rtp_rtcp/source/rtp_sender_egress.cc", - "modules/rtp_rtcp/source/rtp_sender_video.cc", - "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc", - "modules/rtp_rtcp/source/rtp_sequence_number_map.cc", - "modules/rtp_rtcp/source/rtp_utility.cc", - "modules/rtp_rtcp/source/source_tracker.cc", - "modules/rtp_rtcp/source/time_util.cc", - "modules/rtp_rtcp/source/tmmbr_help.cc", - "modules/rtp_rtcp/source/ulpfec_generator.cc", - "modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc", - "modules/rtp_rtcp/source/ulpfec_receiver_impl.cc", - "modules/rtp_rtcp/source/video_rtp_depacketizer.cc", - "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc", - "modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc", - "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc", - "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc", - "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc", - "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc", + "common_audio/ring_buffer.c", + "common_audio/signal_processing/auto_corr_to_refl_coef.c", + "common_audio/signal_processing/auto_correlation.c", + "common_audio/signal_processing/copy_set_operations.c", + "common_audio/signal_processing/cross_correlation.c", + "common_audio/signal_processing/division_operations.c", + "common_audio/signal_processing/downsample_fast.c", + "common_audio/signal_processing/energy.c", + "common_audio/signal_processing/filter_ar.c", + "common_audio/signal_processing/filter_ma_fast_q12.c", + "common_audio/signal_processing/get_hanning_window.c", + "common_audio/signal_processing/get_scaling_square.c", + "common_audio/signal_processing/ilbc_specific_functions.c", + "common_audio/signal_processing/levinson_durbin.c", + "common_audio/signal_processing/lpc_to_refl_coef.c", + "common_audio/signal_processing/min_max_operations.c", + "common_audio/signal_processing/randomization_functions.c", + "common_audio/signal_processing/real_fft.c", + "common_audio/signal_processing/refl_coef_to_lpc.c", + "common_audio/signal_processing/resample.c", + "common_audio/signal_processing/resample_48khz.c", + "common_audio/signal_processing/resample_by_2.c", + "common_audio/signal_processing/resample_by_2_internal.c", + "common_audio/signal_processing/resample_fractional.c", + "common_audio/signal_processing/spl_init.c", + "common_audio/signal_processing/spl_inl.c", + "common_audio/signal_processing/spl_sqrt.c", + "common_audio/signal_processing/splitting_filter.c", + "common_audio/signal_processing/sqrt_of_one_minus_x_squared.c", + "common_audio/signal_processing/vector_scaling_operations.c", + "common_audio/vad/vad_core.c", + "common_audio/vad/vad_filterbank.c", + "common_audio/vad/vad_gmm.c", + "common_audio/vad/vad_sp.c", + "common_audio/vad/webrtc_vad.c", + "common_audio/signal_processing/complex_fft.c", + "common_audio/signal_processing/complex_bit_reverse.c", + "common_audio/signal_processing/filter_ar_fast_q12.c", ], + host_supported: true, + static_libs: [ + "webrtc_spl_sqrt_floor__spl_sqrt_floor", + "webrtc_ooura__fft_size_256", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_common_audio__common_audio_cc", + ], +} + +cc_library_static { + name: "webrtc_aecm__aecm_core", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/aecm/aecm_core.cc", + "modules/audio_processing/aecm/echo_control_mobile.cc", + "modules/audio_processing/aecm/aecm_core_c.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_utility__legacy_delay_estimator", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_common_audio__common_audio_c", + ], +} + +cc_library_static { + name: "webrtc_video_processing__video_processing_sse2", + defaults: ["webrtc_defaults"], + srcs: ["modules/video_processing/util/denoiser_filter_sse2.cc"], + host_supported: true, + cflags: ["-msse2"], + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + ], +} + +cc_library_static { + name: "webrtc_agc2__gain_applier", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/agc2/gain_applier.cc"], + host_supported: true, + static_libs: ["webrtc_agc2__common"], +} + +cc_library_static { + name: "webrtc_task_utils__repeating_task", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/task_utils/repeating_task.cc"], + host_supported: true, + static_libs: [ + "webrtc_task_queue__task_queue", + "webrtc_synchronization__sequence_checker", + "webrtc_rtc_base__timeutils", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_base__logging", + "webrtc_system_wrappers__system_wrappers", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__audio_format_to_string", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/strings/audio_format_to_string.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__stringutils", + "webrtc_audio_codecs__audio_codecs_api", + ], +} + +cc_library_static { + name: "webrtc_memory__fifo_buffer", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/memory/fifo_buffer.cc"], + host_supported: true, + static_libs: [ + "webrtc_synchronization__mutex", + "webrtc_rtc_base__rtc_base", + ], +} + +cc_library_static { + name: "webrtc_api__rtp_headers", + defaults: ["webrtc_defaults"], + srcs: ["api/rtp_headers.cc"], + host_supported: true, + static_libs: [ + "webrtc_units__timestamp", + "webrtc_video__video_rtp_headers", + ], +} + +cc_library_static { + name: "webrtc_rtc_base__rate_limiter", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/rate_limiter.cc"], + host_supported: true, + static_libs: [ + "webrtc_synchronization__mutex", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__audio_coding_opus_common", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/opus/audio_coder_opus_common.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_audio_codecs__audio_codecs_api", + ], +} + +cc_library_static { + name: "webrtc_logging__rtc_stream_config", + defaults: ["webrtc_defaults"], + srcs: ["logging/rtc_event_log/rtc_stream_config.cc"], + host_supported: true, + static_libs: [ + "webrtc_api__rtp_parameters", + "webrtc_api__rtp_headers", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__legacy_encoded_audio_frame", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/legacy_encoded_audio_frame.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__webrtc_multiopus", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.cc", + "modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.cc", + ], + host_supported: true, cflags: [ - "-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0", + "-DWEBRTC_CODEC_ILBC", + "-DWEBRTC_CODEC_OPUS", + "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1", + "-DWEBRTC_CODEC_ISAC", ], static_libs: [ - "webrtc_transport_api", - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_task_queue", - "webrtc_timestamp_extrapolator", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_rtp_parameters", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_data_rate", - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - "webrtc_video_bitrate_allocator", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_audio_codecs_api", - "webrtc_system_wrappers_metrics", - "webrtc_rtp_headers", - "webrtc_rate_limiter", - "webrtc_rtp_packet_info", - "webrtc_video_frame", - "webrtc_rtc_event_audio", - "webrtc_encoded_image", - "webrtc_rtp_video_header", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_rtp_rtcp_format", - "webrtc_remote_bitrate_estimator", - "webrtc_rtc_event_rtp_rtcp", - "webrtc_api_video_encoded_frame", - "webrtc_rtp_interfaces", - "webrtc_libjingle_peerconnection_api", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_units__time_delta", + "webrtc_rtc_base__logging", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_opus__audio_encoder_opus_config", + "webrtc_audio_coding__webrtc_opus_wrapper", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__audio_coding_opus_common", ], } - cc_library_static { - name: "webrtc_rtp_receiver", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/rtcp_demuxer.cc", - "call/rtp_demuxer.cc", - "call/rtp_rtcp_demuxer_helper.cc", - "call/rtp_stream_receiver_controller.cc", - "call/rtx_receive_stream.cc", - ], + name: "webrtc_api__rtp_packet_info", + defaults: ["webrtc_defaults"], + srcs: ["api/rtp_packet_info.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_base_approved", - "webrtc_rtp_headers", - "webrtc_rtp_rtcp_format", - "webrtc_rtp_interfaces", - "webrtc_rtp_rtcp", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_api__rtp_headers", ], } +cc_library_static { + name: "webrtc_crypto__options", + defaults: ["webrtc_defaults"], + srcs: ["api/crypto/crypto_options.cc"], + host_supported: true, + static_libs: ["webrtc_rtc_base__rtc_base"], +} cc_library_static { - name: "webrtc_rtc_p2p", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "p2p/base/async_stun_tcp_socket.cc", - "p2p/base/basic_async_resolver_factory.cc", - "p2p/base/basic_ice_controller.cc", - "p2p/base/basic_packet_socket_factory.cc", - "p2p/base/connection.cc", - "p2p/base/connection_info.cc", - "p2p/base/default_ice_transport_factory.cc", - "p2p/base/dtls_transport.cc", - "p2p/base/dtls_transport_internal.cc", - "p2p/base/ice_controller_interface.cc", - "p2p/base/ice_credentials_iterator.cc", - "p2p/base/ice_transport_internal.cc", - "p2p/base/mdns_message.cc", - "p2p/base/p2p_constants.cc", - "p2p/base/p2p_transport_channel.cc", - "p2p/base/packet_transport_internal.cc", - "p2p/base/port.cc", - "p2p/base/port_allocator.cc", - "p2p/base/port_interface.cc", - "p2p/base/pseudo_tcp.cc", - "p2p/base/regathering_controller.cc", - "p2p/base/stun_port.cc", - "p2p/base/stun_request.cc", - "p2p/base/tcp_port.cc", - "p2p/base/transport_description.cc", - "p2p/base/transport_description_factory.cc", - "p2p/base/turn_port.cc", - "p2p/client/basic_port_allocator.cc", - "p2p/client/turn_port_factory.cc", - ":webrtc_stun_types", - ], + name: "webrtc_media__rtc_h264_profile_id", + defaults: ["webrtc_defaults"], + srcs: ["media/base/h264_profile_level_id.cc"], + host_supported: true, static_libs: [ - "webrtc_sigslot", - "webrtc_sent_packet", - "webrtc_base64", - "webrtc_rtc_base_checks", - "webrtc_weak_ptr", - "webrtc_rtc_event_log", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_error", - "webrtc_rtc_numerics", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_fifo_buffer", - "webrtc_api_crypto_options", - "webrtc_libjingle_peerconnection_api", - "webrtc_ice_log", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_base", ], } - cc_library_static { - name: "webrtc_pacing", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/pacing/bitrate_prober.cc", - "modules/pacing/paced_sender.cc", - "modules/pacing/pacing_controller.cc", - "modules/pacing/packet_router.cc", - "modules/pacing/round_robin_packet_queue.cc", - "modules/pacing/task_queue_paced_sender.cc", - ], + name: "webrtc_audio_coding__webrtc_cng", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/cng/webrtc_cng.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_task_queue", - "webrtc_sequence_checker", - "webrtc_data_size", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_data_rate", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_rtc_event_pacing", - "webrtc_field_trial_based_config", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_system_wrappers_metrics", - "webrtc_interval_budget", - "webrtc_utility", - "webrtc_rtp_rtcp_format", - "webrtc_remote_bitrate_estimator", - "webrtc_rtc_event_bwe", - "webrtc_rtp_rtcp", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_common_audio__common_audio_c", ], } - cc_library_static { - name: "webrtc_audio", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_common_audio__common_audio_sse2", + defaults: ["webrtc_defaults"], srcs: [ - "audio/audio_level.cc", - "audio/audio_receive_stream.cc", - "audio/audio_send_stream.cc", - "audio/audio_state.cc", - "audio/audio_transport_impl.cc", - "audio/channel_receive.cc", - "audio/channel_receive_frame_transformer_delegate.cc", - "audio/channel_send.cc", - "audio/channel_send_frame_transformer_delegate.cc", - "audio/null_audio_poller.cc", - "audio/remix_resample.cc", - ":webrtc_rms_level", - ":webrtc_neteq_api", + "common_audio/fir_filter_sse.cc", + "common_audio/resampler/sinc_resampler_sse.cc", ], + host_supported: true, + cflags: ["-msse2"], static_libs: [ - "webrtc_audio_network_adaptor_config", - "webrtc_transport_api", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_sequence_checker", - "webrtc_rtp_parameters", - "webrtc_rtc_event_log", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_audio_codecs_api", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_common_audio_c", - "webrtc_audio_format_to_string", - "webrtc_rtp_headers", - "webrtc_rate_limiter", - "webrtc_rtc_stream_config", - "webrtc_api_crypto_options", - "webrtc_common_audio", - "webrtc_bitrate_allocator", - "webrtc_audio_frame_api", - "webrtc_utility", - "webrtc_audio_encoder_cng", - "webrtc_rtc_event_audio", - "webrtc_audio_frame_operations", - "webrtc_audio_processing_api", - "webrtc_audio_frame_proxies", - "webrtc_rtp_rtcp_format", - "webrtc_remote_bitrate_estimator", - "webrtc_audio_coding", - "webrtc_aec3_factory", - "webrtc_audio_processing", - "webrtc_rtp_interfaces", - "webrtc_call_interfaces", - "webrtc_rtp_rtcp", - "webrtc_pacing", + "webrtc_rtc_base__checks", + "webrtc_memory__aligned_malloc", + "webrtc_rtc_base__rtc_base_approved", ], } +cc_library_static { + name: "webrtc_logging__rtc_event_video", + defaults: ["webrtc_defaults"], + srcs: [ + "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.cc", + "logging/rtc_event_log/events/rtc_event_video_send_stream_config.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_logging__rtc_stream_config", + ], +} cc_library_static { - name: "webrtc_delay_based_bwe", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_common_audio__common_audio", + defaults: ["webrtc_defaults"], srcs: [ - "modules/congestion_controller/goog_cc/delay_based_bwe.cc", + "common_audio/audio_converter.cc", + "common_audio/audio_util.cc", + "common_audio/channel_buffer.cc", + "common_audio/real_fourier.cc", + "common_audio/real_fourier_ooura.cc", + "common_audio/resampler/push_resampler.cc", + "common_audio/resampler/push_sinc_resampler.cc", + "common_audio/resampler/resampler.cc", + "common_audio/resampler/sinc_resampler.cc", + "common_audio/smoothing_filter.cc", + "common_audio/vad/vad.cc", + "common_audio/wav_file.cc", + "common_audio/wav_header.cc", + "common_audio/window_generator.cc", ], + host_supported: true, + static_libs: [ + "webrtc_ooura__fft_size_256", + "webrtc_rtc_base__checks", + "webrtc_memory__aligned_malloc", + "webrtc_system__file_wrapper", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio_sse2", + ], +} + +cc_library_static { + name: "webrtc_call__simulated_network", + defaults: ["webrtc_defaults"], + srcs: ["call/simulated_network.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_units__data_size", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_units__data_rate", + "webrtc_rtc_base__rtc_base_approved", + ], +} + +cc_library_static { + name: "webrtc_call__bitrate_allocator", + defaults: ["webrtc_defaults"], + srcs: ["call/bitrate_allocator.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_synchronization__sequence_checker", + "webrtc_units__time_delta", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_transport__network_control", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_system_wrappers__metrics", + ], +} + +cc_library_static { + name: "webrtc_agc2__rnn_vad_with_level", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/agc2/vad_with_level.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rnn_vad__rnn_vad", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__g722", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/g722/audio_decoder_g722.cc", + "modules/audio_coding/codecs/g722/audio_encoder_g722.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_audio_coding__g722_c", + "webrtc_units__time_delta", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__legacy_encoded_audio_frame", + ], +} + +cc_library_static { + name: "webrtc_audio_device__audio_device_buffer", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_device/audio_device_buffer.cc", + "modules/audio_device/fine_audio_buffer.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio_c", + ], +} + +cc_library_static { + name: "webrtc_audio__audio_frame_api", + defaults: ["webrtc_defaults"], + srcs: [ + "api/audio/audio_frame.cc", + "api/audio/channel_layout.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_api__rtp_packet_info", + ], +} + +cc_library_static { + name: "webrtc_goog_cc__alr_detector", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/goog_cc/alr_detector.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__timeutils", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_experiments__field_trial_parser", + "webrtc_logging__rtc_event_pacing", + "webrtc_transport__field_trial_based_config", + "webrtc_experiments__alr_experiment", + "webrtc_pacing__interval_budget", + ], +} + +filegroup { + name: "webrtc_transport__stun_types", + srcs: ["api/transport/stun.cc"], +} + +cc_library_static { + name: "webrtc_video__video_frame", + defaults: ["webrtc_defaults"], + srcs: [ + "api/video/video_frame.cc", + "api/video/video_frame_buffer.cc", + "api/video/video_source_interface.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_api__rtp_packet_info", + ], +} + +cc_library_static { + name: "webrtc_audio_processing__apm_logging", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/logging/apm_data_dumper.cc"], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__ilbc_c", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/ilbc/abs_quant.c", + "modules/audio_coding/codecs/ilbc/abs_quant_loop.c", + "modules/audio_coding/codecs/ilbc/augmented_cb_corr.c", + "modules/audio_coding/codecs/ilbc/bw_expand.c", + "modules/audio_coding/codecs/ilbc/cb_construct.c", + "modules/audio_coding/codecs/ilbc/cb_mem_energy.c", + "modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c", + "modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c", + "modules/audio_coding/codecs/ilbc/cb_search.c", + "modules/audio_coding/codecs/ilbc/cb_search_core.c", + "modules/audio_coding/codecs/ilbc/cb_update_best_index.c", + "modules/audio_coding/codecs/ilbc/chebyshev.c", + "modules/audio_coding/codecs/ilbc/comp_corr.c", + "modules/audio_coding/codecs/ilbc/constants.c", + "modules/audio_coding/codecs/ilbc/create_augmented_vec.c", + "modules/audio_coding/codecs/ilbc/decode.c", + "modules/audio_coding/codecs/ilbc/decode_residual.c", + "modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c", + "modules/audio_coding/codecs/ilbc/do_plc.c", + "modules/audio_coding/codecs/ilbc/encode.c", + "modules/audio_coding/codecs/ilbc/energy_inverse.c", + "modules/audio_coding/codecs/ilbc/enh_upsample.c", + "modules/audio_coding/codecs/ilbc/enhancer.c", + "modules/audio_coding/codecs/ilbc/enhancer_interface.c", + "modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c", + "modules/audio_coding/codecs/ilbc/frame_classify.c", + "modules/audio_coding/codecs/ilbc/gain_dequant.c", + "modules/audio_coding/codecs/ilbc/gain_quant.c", + "modules/audio_coding/codecs/ilbc/get_cd_vec.c", + "modules/audio_coding/codecs/ilbc/get_lsp_poly.c", + "modules/audio_coding/codecs/ilbc/get_sync_seq.c", + "modules/audio_coding/codecs/ilbc/hp_input.c", + "modules/audio_coding/codecs/ilbc/hp_output.c", + "modules/audio_coding/codecs/ilbc/ilbc.c", + "modules/audio_coding/codecs/ilbc/index_conv_dec.c", + "modules/audio_coding/codecs/ilbc/index_conv_enc.c", + "modules/audio_coding/codecs/ilbc/init_decode.c", + "modules/audio_coding/codecs/ilbc/init_encode.c", + "modules/audio_coding/codecs/ilbc/interpolate.c", + "modules/audio_coding/codecs/ilbc/interpolate_samples.c", + "modules/audio_coding/codecs/ilbc/lpc_encode.c", + "modules/audio_coding/codecs/ilbc/lsf_check.c", + "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c", + "modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c", + "modules/audio_coding/codecs/ilbc/lsf_to_lsp.c", + "modules/audio_coding/codecs/ilbc/lsf_to_poly.c", + "modules/audio_coding/codecs/ilbc/lsp_to_lsf.c", + "modules/audio_coding/codecs/ilbc/my_corr.c", + "modules/audio_coding/codecs/ilbc/nearest_neighbor.c", + "modules/audio_coding/codecs/ilbc/pack_bits.c", + "modules/audio_coding/codecs/ilbc/poly_to_lsf.c", + "modules/audio_coding/codecs/ilbc/poly_to_lsp.c", + "modules/audio_coding/codecs/ilbc/refiner.c", + "modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c", + "modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c", + "modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c", + "modules/audio_coding/codecs/ilbc/simple_lsf_quant.c", + "modules/audio_coding/codecs/ilbc/smooth.c", + "modules/audio_coding/codecs/ilbc/smooth_out_data.c", + "modules/audio_coding/codecs/ilbc/sort_sq.c", + "modules/audio_coding/codecs/ilbc/split_vq.c", + "modules/audio_coding/codecs/ilbc/state_construct.c", + "modules/audio_coding/codecs/ilbc/state_search.c", + "modules/audio_coding/codecs/ilbc/swap_bytes.c", + "modules/audio_coding/codecs/ilbc/unpack_bits.c", + "modules/audio_coding/codecs/ilbc/vq3.c", + "modules/audio_coding/codecs/ilbc/vq4.c", + "modules/audio_coding/codecs/ilbc/window32_w32.c", + "modules/audio_coding/codecs/ilbc/xcorr_coef.c", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_opus__audio_encoder_multiopus", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_opus__audio_encoder_opus_config", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__webrtc_multiopus", + ], +} + +cc_library_static { + name: "webrtc_utility__utility", + defaults: ["webrtc_defaults"], + srcs: ["modules/utility/source/process_thread_impl.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_video_coding__nack_module", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/histogram.cc", + "modules/video_coding/nack_module2.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__sequence_checker", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_task_utils__pending_task_safety_flag", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_task_utils__repeating_task", + "webrtc_utility__utility", + ], +} + +cc_library_static { + name: "webrtc_g722__audio_encoder_g722", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/g722/audio_encoder_g722.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__g722", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__isac_c", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/isac/main/source/arith_routines.c", + "modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c", + "modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c", + "modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c", + "modules/audio_coding/codecs/isac/main/source/crc.c", + "modules/audio_coding/codecs/isac/main/source/decode.c", + "modules/audio_coding/codecs/isac/main/source/decode_bwe.c", + "modules/audio_coding/codecs/isac/main/source/encode.c", + "modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c", + "modules/audio_coding/codecs/isac/main/source/entropy_coding.c", + "modules/audio_coding/codecs/isac/main/source/filterbanks.c", + "modules/audio_coding/codecs/isac/main/source/intialize.c", + "modules/audio_coding/codecs/isac/main/source/isac.c", + "modules/audio_coding/codecs/isac/main/source/lattice.c", + "modules/audio_coding/codecs/isac/main/source/lpc_analysis.c", + "modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c", + "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c", + "modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c", + "modules/audio_coding/codecs/isac/main/source/lpc_tables.c", + "modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c", + "modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c", + "modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c", + "modules/audio_coding/codecs/isac/main/source/transform.c", + ], + host_supported: true, + static_libs: [ + "webrtc_fft__fft", + "webrtc_rtc_base__checks", + "webrtc_audio_coding__isac_vad", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__g711", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/g711/audio_decoder_pcm.cc", + "modules/audio_coding/codecs/g711/audio_encoder_pcm.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_audio_coding__g711_c", + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__legacy_encoded_audio_frame", + ], +} + +cc_library_static { + name: "webrtc_opus__audio_decoder_multiopus", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__webrtc_multiopus", + ], +} + +cc_library_static { + name: "webrtc_common_audio__fir_filter_factory", + defaults: ["webrtc_defaults"], + srcs: [ + "common_audio/fir_filter_c.cc", + "common_audio/fir_filter_factory.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_common_audio__common_audio_sse2", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__ilbc", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.cc", + "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__legacy_encoded_audio_frame", + "webrtc_common_audio__common_audio", + "webrtc_audio_coding__ilbc_c", + ], +} + +filegroup { + name: "webrtc_neteq__neteq_api", + srcs: ["api/neteq/neteq.cc"], +} + +cc_library_static { + name: "webrtc_audio_coding__audio_encoder_cng", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/cng/audio_encoder_cng.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__webrtc_cng", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_agc2__fixed_digital", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/agc2/fixed_digital_level_estimator.cc", + "modules/audio_processing/agc2/interpolated_gain_curve.cc", + "modules/audio_processing/agc2/limiter.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_agc2__common", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio", + "webrtc_audio_processing__apm_logging", + ], +} + +cc_library_static { + name: "webrtc_logging__rtc_event_audio", + defaults: ["webrtc_defaults"], + srcs: [ + "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.cc", + "logging/rtc_event_log/events/rtc_event_audio_playout.cc", + "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.cc", + "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_audio_coding__audio_network_adaptor_config", + "webrtc_rtc_base__checks", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_logging__rtc_stream_config", + ], +} + +cc_library_static { + name: "webrtc_experiments__min_video_bitrate_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/min_video_bitrate_experiment.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_video__video_frame", + ], +} + +cc_library_static { + name: "webrtc_video__encoded_image", + defaults: ["webrtc_defaults"], + srcs: ["api/video/encoded_image.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_api__rtp_packet_info", + "webrtc_video__video_frame", + ], +} + +cc_library_static { + name: "webrtc_agc__legacy_agc", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/agc/legacy/analog_agc.cc", + "modules/audio_processing/agc/legacy/digital_agc.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_ooura__fft_size_256", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_g722__audio_decoder_g722", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/g722/audio_decoder_g722.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__g722", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__pcm16b", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc", + "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.cc", + "modules/audio_coding/codecs/pcm16b/pcm16b_common.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_audio_coding__pcm16b_c", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__legacy_encoded_audio_frame", + "webrtc_audio_coding__g711", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__red", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_coding/codecs/red/audio_encoder_copy_red.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_common_audio__common_audio", + ], +} + +cc_library_static { + name: "webrtc_utility__audio_frame_operations", + defaults: ["webrtc_defaults"], + srcs: [ + "audio/utility/audio_frame_operations.cc", + "audio/utility/channel_mixer.cc", + "audio/utility/channel_mixing_matrix.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_common_audio__common_audio", + "webrtc_audio__audio_frame_api", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__isac", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/isac/main/source/audio_decoder_isac.cc", + "modules/audio_coding/codecs/isac/main/source/audio_encoder_isac.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__isac_c", + ], +} + +cc_library_static { + name: "webrtc_deprecated__nack_module", + defaults: ["webrtc_defaults"], + srcs: ["modules/video_coding/deprecated/nack_module.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_rtc_base__criticalsection", + "webrtc_synchronization__mutex", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_base__logging", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_utility__utility", + "webrtc_video_coding__nack_module", + ], +} + +cc_library_static { + name: "webrtc_video__video_frame_i420", + defaults: ["webrtc_defaults"], + srcs: ["api/video/i420_buffer.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_memory__aligned_malloc", + "webrtc_video__video_rtp_headers", + "webrtc_rtc_base__rtc_base", + "webrtc_video__video_frame", + ], +} + +cc_library_static { + name: "webrtc_isac__audio_encoder_isac_float", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/isac/audio_encoder_isac_float.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__isac", + ], +} + +cc_library_static { + name: "webrtc_audio_processing__api", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/include/audio_processing.cc"], + host_supported: true, + static_libs: [ + "webrtc_audio_processing__audio_processing_statistics", + "webrtc_system__file_wrapper", + "webrtc_audio_processing__config", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio__aec3_config", + "webrtc_audio__audio_frame_api", + ], +} + +cc_library_static { + name: "webrtc_transient__transient_suppressor_impl", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/transient/moving_moments.cc", + "modules/audio_processing/transient/transient_detector.cc", + "modules/audio_processing/transient/transient_suppressor_impl.cc", + "modules/audio_processing/transient/wpd_node.cc", + "modules/audio_processing/transient/wpd_tree.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_ooura__fft_size_256", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__logging", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio", + "webrtc_common_audio__fir_filter_factory", + ], +} + +cc_library_static { + name: "webrtc_ilbc__audio_encoder_ilbc", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/ilbc/audio_encoder_ilbc.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__ilbc", + ], +} + +cc_library_static { + name: "webrtc_rtp_rtcp__rtp_video_header", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/rtp_rtcp/source/rtp_video_header.cc", + ":webrtc_rtp__dependency_descriptor", + ], + host_supported: true, + static_libs: [ + "webrtc_video__video_rtp_headers", + "webrtc_video__video_frame", + ], +} + +cc_library_static { + name: "webrtc_agc2__noise_level_estimator", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/agc2/down_sampler.cc", + "modules/audio_processing/agc2/noise_level_estimator.cc", + "modules/audio_processing/agc2/noise_spectrum_estimator.cc", + "modules/audio_processing/agc2/signal_classifier.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_ooura__fft_size_128", + "webrtc_rtc_base__checks", + "webrtc_agc2__biquad_filter", + "webrtc_common_audio__common_audio", + "webrtc_audio_processing__apm_logging", + ], +} + +cc_library_static { + name: "webrtc_audio_processing__audio_buffer", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/audio_buffer.cc", + "modules/audio_processing/splitting_filter.cc", + "modules/audio_processing/three_band_filter_bank.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio", + "webrtc_audio_processing__api", + ], +} + +cc_library_static { + name: "webrtc_isac__audio_decoder_isac_float", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/isac/audio_decoder_isac_float.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__isac", + ], +} + +cc_library_static { + name: "webrtc_vad__vad", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/vad/gmm.cc", + "modules/audio_processing/vad/pitch_based_vad.cc", + "modules/audio_processing/vad/pitch_internal.cc", + "modules/audio_processing/vad/pole_zero_filter.cc", + "modules/audio_processing/vad/standalone_vad.cc", + "modules/audio_processing/vad/vad_audio_proc.cc", + "modules/audio_processing/vad/vad_circular_buffer.cc", + "modules/audio_processing/vad/voice_activity_detector.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_ooura__fft_size_256", + "webrtc_rtc_base__checks", + "webrtc_audio_coding__isac_vad", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio", + "webrtc_utility__audio_frame_operations", + ], +} + +cc_library_static { + name: "webrtc_audio_device__audio_device_generic", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_device/audio_device_generic.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_device__audio_device_buffer", + ], +} + +cc_library_static { + name: "webrtc_audio_processing__high_pass_filter", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/high_pass_filter.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_utility__cascaded_biquad_filter", + "webrtc_audio_processing__audio_buffer", + ], +} + +cc_library_static { + name: "webrtc_ns__ns", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/ns/fast_math.cc", + "modules/audio_processing/ns/histograms.cc", + "modules/audio_processing/ns/noise_estimator.cc", + "modules/audio_processing/ns/noise_suppressor.cc", + "modules/audio_processing/ns/ns_fft.cc", + "modules/audio_processing/ns/prior_signal_model.cc", + "modules/audio_processing/ns/prior_signal_model_estimator.cc", + "modules/audio_processing/ns/quantile_noise_estimator.cc", + "modules/audio_processing/ns/signal_model.cc", + "modules/audio_processing/ns/signal_model_estimator.cc", + "modules/audio_processing/ns/speech_probability_estimator.cc", + "modules/audio_processing/ns/suppression_params.cc", + "modules/audio_processing/ns/wiener_filter.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_ooura__fft_size_256", + "webrtc_ooura__fft_size_128", + "webrtc_rtc_base__checks", + "webrtc_utility__cascaded_biquad_filter", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio_c", + "webrtc_audio_processing__apm_logging", + "webrtc_audio_processing__audio_buffer", + "webrtc_audio_processing__high_pass_filter", + ], +} + +cc_library_static { + name: "webrtc_common_video__common_video", + defaults: ["webrtc_defaults"], + srcs: [ + "common_video/bitrate_adjuster.cc", + "common_video/frame_rate_estimator.cc", + "common_video/h264/h264_bitstream_parser.cc", + "common_video/h264/h264_common.cc", + "common_video/h264/pps_parser.cc", + "common_video/h264/sps_parser.cc", + "common_video/h264/sps_vui_rewriter.cc", + "common_video/i420_buffer_pool.cc", + "common_video/incoming_video_stream.cc", + "common_video/libyuv/webrtc_libyuv.cc", + "common_video/video_frame_buffer.cc", + "common_video/video_render_frames.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_synchronization__mutex", + "webrtc_video__video_bitrate_allocation", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_video__video_bitrate_allocator", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_video__video_rtp_headers", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_media__rtc_h264_profile_id", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + ], +} + +cc_library_static { + name: "webrtc_g711__audio_encoder_g711", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/g711/audio_encoder_g711.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__g711", + ], +} + +cc_library_static { + name: "webrtc_agc2__adaptive_digital", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/agc2/adaptive_agc.cc", + "modules/audio_processing/agc2/adaptive_digital_gain_applier.cc", + "modules/audio_processing/agc2/adaptive_mode_level_estimator.cc", + "modules/audio_processing/agc2/saturation_protector.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_agc2__common", + "webrtc_system_wrappers__metrics", + "webrtc_agc2__gain_applier", + "webrtc_common_audio__common_audio", + "webrtc_agc2__rnn_vad_with_level", + "webrtc_audio_processing__apm_logging", + "webrtc_audio_processing__api", + "webrtc_agc2__noise_level_estimator", + ], +} + +cc_library_static { + name: "webrtc_L16__audio_encoder_L16", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/L16/audio_encoder_L16.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__pcm16b", + ], +} + +cc_library_static { + name: "webrtc_audio_processing__audio_frame_proxies", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/include/audio_frame_proxies.cc"], + host_supported: true, + static_libs: [ + "webrtc_audio__audio_frame_api", + "webrtc_audio_processing__api", + ], +} + +cc_library_static { + name: "webrtc_ilbc__audio_decoder_ilbc", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/ilbc/audio_decoder_ilbc.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__ilbc", + ], +} + +cc_library_static { + name: "webrtc_g711__audio_decoder_g711", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/g711/audio_decoder_g711.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__g711", + ], +} + +cc_library_static { + name: "webrtc_audio_processing__optionally_built_submodule_creators", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/optionally_built_submodule_creators.cc"], + host_supported: true, + static_libs: ["webrtc_transient__transient_suppressor_impl"], +} + +cc_library_static { + name: "webrtc_video__video_frame_i010", + defaults: ["webrtc_defaults"], + srcs: ["api/video/i010_buffer.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_memory__aligned_malloc", + "webrtc_video__video_rtp_headers", + "webrtc_rtc_base__rtc_base", + "webrtc_video__video_frame", + "webrtc_video__video_frame_i420", + ], +} + +filegroup { + name: "webrtc_video__video_frame_metadata", + srcs: ["api/video/video_frame_metadata.cc"], +} + +cc_library_static { + name: "webrtc_L16__audio_decoder_L16", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/L16/audio_decoder_L16.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__pcm16b", + ], +} + +cc_library_static { + name: "webrtc_video_codecs__video_codecs_api", + defaults: ["webrtc_defaults"], + srcs: [ + "api/video_codecs/sdp_video_format.cc", + "api/video_codecs/video_codec.cc", + "api/video_codecs/video_decoder.cc", + "api/video_codecs/video_decoder_factory.cc", + "api/video_codecs/video_encoder.cc", + "api/video_codecs/video_encoder_config.cc", + "api/video_codecs/vp8_frame_config.cc", + "api/video_codecs/vp8_temporal_layers.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_video__video_bitrate_allocation", + "webrtc_units__data_rate", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__audio_network_adaptor", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.cc", + "modules/audio_coding/audio_network_adaptor/bitrate_controller.cc", + "modules/audio_coding/audio_network_adaptor/channel_controller.cc", + "modules/audio_coding/audio_network_adaptor/controller.cc", + "modules/audio_coding/audio_network_adaptor/controller_manager.cc", + "modules/audio_coding/audio_network_adaptor/debug_dump_writer.cc", + "modules/audio_coding/audio_network_adaptor/dtx_controller.cc", + "modules/audio_coding/audio_network_adaptor/event_log_writer.cc", + "modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.cc", + "modules/audio_coding/audio_network_adaptor/frame_length_controller.cc", + "modules/audio_coding/audio_network_adaptor/frame_length_controller_v2.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_audio_coding__audio_network_adaptor_config", + "webrtc_rtc_base__checks", + "webrtc_system__file_wrapper", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_common_audio__common_audio", + "webrtc_logging__rtc_event_audio", + ], +} + +cc_library_static { + name: "webrtc_agc__level_estimation", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/agc/agc.cc", + "modules/audio_processing/agc/loudness_histogram.cc", + "modules/audio_processing/agc/utility.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_vad__vad", + ], +} + +cc_library_static { + name: "webrtc_api__media_stream_interface", + defaults: ["webrtc_defaults"], + srcs: ["api/media_stream_interface.cc"], + host_supported: true, + static_libs: [ + "webrtc_audio_processing__audio_processing_statistics", + "webrtc_rtc_base__checks", + "webrtc_api__rtp_parameters", + "webrtc_api__audio_options_api", + "webrtc_video__video_frame", + ], +} + +cc_library_static { + name: "webrtc_audio_mixer__audio_frame_manipulator", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_mixer/audio_frame_manipulator.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio__audio_frame_api", + "webrtc_utility__audio_frame_operations", + ], +} + +cc_library_static { + name: "webrtc_experiments__quality_scaling_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/quality_scaling_experiment.cc"], + host_supported: true, + static_libs: [ + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video_codecs__video_codecs_api", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__webrtc_opus", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/codecs/opus/audio_decoder_opus.cc", + "modules/audio_coding/codecs/opus/audio_encoder_opus.cc", + ], + host_supported: true, cflags: [ - "-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0", + "-DWEBRTC_CODEC_ILBC", + "-DWEBRTC_CODEC_OPUS", + "-DWEBRTC_OPUS_SUPPORT_120MS_PTIME=1", + "-DWEBRTC_CODEC_ISAC", ], static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_rtc_event_log", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers_metrics", - "webrtc_remote_bitrate_estimator", - "webrtc_rtc_event_bwe", - "webrtc_goog_cc_estimators", - "webrtc_pacing", + "webrtc_rtc_base__checks", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_opus__audio_encoder_opus_config", + "webrtc_audio_coding__webrtc_opus_wrapper", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__audio_coding_opus_common", + "webrtc_common_audio__common_audio", + "webrtc_audio_coding__audio_network_adaptor", ], } +filegroup { + name: "webrtc_audio_processing__aec_dump_interface", + srcs: ["modules/audio_processing/include/aec_dump.cc"], +} cc_library_static { - name: "webrtc_video_coding_utility", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/utility/decoded_frames_history.cc", - "modules/video_coding/utility/frame_dropper.cc", - "modules/video_coding/utility/framerate_controller.cc", - "modules/video_coding/utility/ivf_file_reader.cc", - "modules/video_coding/utility/ivf_file_writer.cc", - "modules/video_coding/utility/quality_scaler.cc", - "modules/video_coding/utility/simulcast_rate_allocator.cc", - "modules/video_coding/utility/simulcast_utility.cc", - "modules/video_coding/utility/vp8_header_parser.cc", - "modules/video_coding/utility/vp9_uncompressed_header_parser.cc", - ], + name: "webrtc_audio_processing__voice_detection", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/voice_detection.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_adaptation_counters", - "webrtc_module_api", - "webrtc_file_wrapper", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_weak_ptr", - "webrtc_field_trial", - "webrtc_repeating_task", - "webrtc_video_bitrate_allocator", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_quality_scaler_settings", - "webrtc_encoded_image", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_quality_scaling_experiment", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_rtp_rtcp_format", - "webrtc_stable_target_rate_experiment", - "webrtc_api_video_encoded_frame", - "webrtc_rtp_rtcp", + "webrtc_rtc_base__checks", + "webrtc_common_audio__common_audio_c", + "webrtc_audio__audio_frame_api", + "webrtc_audio_processing__api", + "webrtc_audio_processing__audio_buffer", ], } - cc_library_static { - name: "webrtc_ice_transport_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/ice_transport_factory.cc", - ], + name: "webrtc_media__rtc_vp9_profile", + defaults: ["webrtc_defaults"], + srcs: ["media/base/vp9_profile.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_event_log", - "webrtc_rtc_base", - "webrtc_libjingle_peerconnection_api", - "webrtc_rtc_p2p", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video_codecs__video_codecs_api", ], } - cc_library_static { - name: "webrtc_resource_adaptation", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_aec3__aec3", + defaults: ["webrtc_defaults"], srcs: [ - "call/adaptation/encoder_settings.cc", - "call/adaptation/resource.cc", - "call/adaptation/resource_adaptation_processor.cc", - "call/adaptation/resource_adaptation_processor_interface.cc", - "call/adaptation/video_source_restrictions.cc", - "call/adaptation/video_stream_adapter.cc", - "call/adaptation/video_stream_input_state.cc", - "call/adaptation/video_stream_input_state_provider.cc", + "modules/audio_processing/aec3/adaptive_fir_filter.cc", + "modules/audio_processing/aec3/adaptive_fir_filter_erl.cc", + "modules/audio_processing/aec3/aec3_common.cc", + "modules/audio_processing/aec3/aec3_fft.cc", + "modules/audio_processing/aec3/aec_state.cc", + "modules/audio_processing/aec3/alignment_mixer.cc", + "modules/audio_processing/aec3/api_call_jitter_metrics.cc", + "modules/audio_processing/aec3/block_buffer.cc", + "modules/audio_processing/aec3/block_delay_buffer.cc", + "modules/audio_processing/aec3/block_framer.cc", + "modules/audio_processing/aec3/block_processor.cc", + "modules/audio_processing/aec3/block_processor_metrics.cc", + "modules/audio_processing/aec3/clockdrift_detector.cc", + "modules/audio_processing/aec3/coarse_filter_update_gain.cc", + "modules/audio_processing/aec3/comfort_noise_generator.cc", + "modules/audio_processing/aec3/decimator.cc", + "modules/audio_processing/aec3/dominant_nearend_detector.cc", + "modules/audio_processing/aec3/downsampled_render_buffer.cc", + "modules/audio_processing/aec3/echo_audibility.cc", + "modules/audio_processing/aec3/echo_canceller3.cc", + "modules/audio_processing/aec3/echo_path_delay_estimator.cc", + "modules/audio_processing/aec3/echo_path_variability.cc", + "modules/audio_processing/aec3/echo_remover.cc", + "modules/audio_processing/aec3/echo_remover_metrics.cc", + "modules/audio_processing/aec3/erl_estimator.cc", + "modules/audio_processing/aec3/erle_estimator.cc", + "modules/audio_processing/aec3/fft_buffer.cc", + "modules/audio_processing/aec3/filter_analyzer.cc", + "modules/audio_processing/aec3/frame_blocker.cc", + "modules/audio_processing/aec3/fullband_erle_estimator.cc", + "modules/audio_processing/aec3/matched_filter.cc", + "modules/audio_processing/aec3/matched_filter_lag_aggregator.cc", + "modules/audio_processing/aec3/moving_average.cc", + "modules/audio_processing/aec3/refined_filter_update_gain.cc", + "modules/audio_processing/aec3/render_buffer.cc", + "modules/audio_processing/aec3/render_delay_buffer.cc", + "modules/audio_processing/aec3/render_delay_controller.cc", + "modules/audio_processing/aec3/render_delay_controller_metrics.cc", + "modules/audio_processing/aec3/render_signal_analyzer.cc", + "modules/audio_processing/aec3/residual_echo_estimator.cc", + "modules/audio_processing/aec3/reverb_decay_estimator.cc", + "modules/audio_processing/aec3/reverb_frequency_response.cc", + "modules/audio_processing/aec3/reverb_model.cc", + "modules/audio_processing/aec3/reverb_model_estimator.cc", + "modules/audio_processing/aec3/signal_dependent_erle_estimator.cc", + "modules/audio_processing/aec3/spectrum_buffer.cc", + "modules/audio_processing/aec3/stationarity_estimator.cc", + "modules/audio_processing/aec3/subband_erle_estimator.cc", + "modules/audio_processing/aec3/subband_nearend_detector.cc", + "modules/audio_processing/aec3/subtractor.cc", + "modules/audio_processing/aec3/subtractor_output.cc", + "modules/audio_processing/aec3/subtractor_output_analyzer.cc", + "modules/audio_processing/aec3/suppression_filter.cc", + "modules/audio_processing/aec3/suppression_gain.cc", ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_adaptation_counters", - "webrtc_sequence_checker", - "webrtc_rtp_parameters", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_video_frame", - "webrtc_video_codecs_api", - "webrtc_balanced_degradation_settings", - "webrtc_video_coding_utility", + "webrtc_ooura__fft_size_128", + "webrtc_rtc_base__checks", + "webrtc_utility__cascaded_biquad_filter", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio__aec3_config", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio_c", + "webrtc_audio_processing__apm_logging", + "webrtc_audio_processing__audio_buffer", + "webrtc_audio_processing__high_pass_filter", ], } - cc_library_static { - name: "webrtc_control_handler", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/congestion_controller/rtp/control_handler.cc", - ], + name: "webrtc_opus__audio_decoder_opus", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/opus/audio_decoder_opus.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_sequence_checker", - "webrtc_data_size", - "webrtc_time_delta", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_network_control", - "webrtc_rtc_base", - "webrtc_pacing", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__webrtc_opus", ], } - cc_library_static { - name: "webrtc_vp8_temporal_layers", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/codecs/vp8/default_temporal_layers.cc", - "modules/video_coding/codecs/vp8/screenshare_layers.cc", - "modules/video_coding/codecs/vp8/temporal_layers_checker.cc", - ], + name: "webrtc_experiments__rate_control_settings", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/rate_control_settings.cc"], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_system_wrappers_metrics", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_video_coding_utility", + "webrtc_units__data_size", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video_codecs__video_codecs_api", ], } +cc_library_static { + name: "webrtc_video_coding__video_codec_interface", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/include/video_codec_interface.cc", + "modules/video_coding/video_coding_defines.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_generic_frame_descriptor__generic_frame_descriptor", + "webrtc_modules__module_api", + "webrtc_video__video_rtp_headers", + "webrtc_video__video_frame", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + ], +} cc_library_static { - name: "webrtc_congestion_controller", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_av1__libaom_av1_encoder", + defaults: ["webrtc_defaults"], srcs: [ - "modules/congestion_controller/receive_side_congestion_controller.cc", + "modules/video_coding/codecs/av1/libaom_av1_encoder.cc", + ":webrtc_av1__scalable_video_controller", ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__logging", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + ], +} + +cc_library_static { + name: "webrtc_audio_device__audio_device_impl", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_device/dummy/audio_device_dummy.cc", + "modules/audio_device/dummy/file_audio_device.cc", + "modules/audio_device/include/test_audio_device.cc", + "modules/audio_device/audio_device_data_observer.cc", + "modules/audio_device/audio_device_impl.cc", + "modules/audio_device/linux/alsasymboltable_linux.cc", + "modules/audio_device/linux/audio_device_alsa_linux.cc", + "modules/audio_device/linux/audio_mixer_manager_alsa_linux.cc", + "modules/audio_device/linux/latebindingsymboltable_linux.cc", + "modules/audio_device/linux/audio_device_pulse_linux.cc", + "modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc", + "modules/audio_device/linux/pulseaudiosymboltable_linux.cc", + "modules/audio_device/dummy/file_audio_device_factory.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_DUMMY_FILE_DEVICES"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_system__file_wrapper", + "webrtc_synchronization__mutex", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_common_audio__common_audio_c", + "webrtc_task_utils__repeating_task", + "webrtc_common_audio__common_audio", + "webrtc_audio_device__audio_device_buffer", + "webrtc_utility__utility", + "webrtc_audio_device__audio_device_generic", + ], +} + +cc_library_static { + name: "webrtc_av1__libaom_av1_decoder", + defaults: ["webrtc_defaults"], + srcs: ["modules/video_coding/codecs/av1/libaom_av1_decoder.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__logging", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + ], +} + +cc_library_static { + name: "webrtc_audio_coding__neteq", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_coding/neteq/accelerate.cc", + "modules/audio_coding/neteq/audio_multi_vector.cc", + "modules/audio_coding/neteq/audio_vector.cc", + "modules/audio_coding/neteq/background_noise.cc", + "modules/audio_coding/neteq/buffer_level_filter.cc", + "modules/audio_coding/neteq/comfort_noise.cc", + "modules/audio_coding/neteq/cross_correlation.cc", + "modules/audio_coding/neteq/decision_logic.cc", + "modules/audio_coding/neteq/decoder_database.cc", + "modules/audio_coding/neteq/delay_manager.cc", + "modules/audio_coding/neteq/dsp_helper.cc", + "modules/audio_coding/neteq/dtmf_buffer.cc", + "modules/audio_coding/neteq/dtmf_tone_generator.cc", + "modules/audio_coding/neteq/expand.cc", + "modules/audio_coding/neteq/expand_uma_logger.cc", + "modules/audio_coding/neteq/histogram.cc", + "modules/audio_coding/neteq/merge.cc", + "modules/audio_coding/neteq/nack_tracker.cc", + "modules/audio_coding/neteq/neteq_impl.cc", + "modules/audio_coding/neteq/normal.cc", + "modules/audio_coding/neteq/packet.cc", + "modules/audio_coding/neteq/packet_buffer.cc", + "modules/audio_coding/neteq/post_decode_vad.cc", + "modules/audio_coding/neteq/preemptive_expand.cc", + "modules/audio_coding/neteq/random_vector.cc", + "modules/audio_coding/neteq/red_payload_splitter.cc", + "modules/audio_coding/neteq/statistics_calculator.cc", + "modules/audio_coding/neteq/sync_buffer.cc", + "modules/audio_coding/neteq/time_stretch.cc", + "modules/audio_coding/neteq/timestamp_scaler.cc", + ":webrtc_neteq__tick_timer", + ":webrtc_neteq__neteq_api", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__mutex", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio_c", + "webrtc_rtc_base__audio_format_to_string", + "webrtc_api__rtp_headers", + "webrtc_api__rtp_packet_info", + "webrtc_audio_coding__webrtc_cng", + "webrtc_common_audio__common_audio", + "webrtc_audio__audio_frame_api", + ], +} + +cc_library_static { + name: "webrtc_goog_cc__pushback_controller", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__data_size", + "webrtc_transport__network_control", + "webrtc_experiments__rate_control_settings", + ], +} + +cc_library_static { + name: "webrtc_video_processing__video_processing", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_processing/util/denoiser_filter.cc", + "modules/video_processing/util/denoiser_filter_c.cc", + "modules/video_processing/util/noise_estimation.cc", + "modules/video_processing/util/skin_detection.cc", + "modules/video_processing/video_denoiser.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_video_processing__video_processing_sse2", + "webrtc_common_audio__common_audio", + "webrtc_video__video_frame", + "webrtc_utility__utility", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + ], +} + +cc_library_static { + name: "webrtc_rtp_rtcp__rtp_rtcp_format", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/rtp_rtcp/include/report_block_data.cc", + "modules/rtp_rtcp/include/rtp_rtcp_defines.cc", + "modules/rtp_rtcp/source/rtcp_packet.cc", + "modules/rtp_rtcp/source/rtcp_packet/app.cc", + "modules/rtp_rtcp/source/rtcp_packet/bye.cc", + "modules/rtp_rtcp/source/rtcp_packet/common_header.cc", + "modules/rtp_rtcp/source/rtcp_packet/compound_packet.cc", + "modules/rtp_rtcp/source/rtcp_packet/dlrr.cc", + "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.cc", + "modules/rtp_rtcp/source/rtcp_packet/extended_reports.cc", + "modules/rtp_rtcp/source/rtcp_packet/fir.cc", + "modules/rtp_rtcp/source/rtcp_packet/loss_notification.cc", + "modules/rtp_rtcp/source/rtcp_packet/nack.cc", + "modules/rtp_rtcp/source/rtcp_packet/pli.cc", + "modules/rtp_rtcp/source/rtcp_packet/psfb.cc", + "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.cc", + "modules/rtp_rtcp/source/rtcp_packet/receiver_report.cc", + "modules/rtp_rtcp/source/rtcp_packet/remb.cc", + "modules/rtp_rtcp/source/rtcp_packet/remote_estimate.cc", + "modules/rtp_rtcp/source/rtcp_packet/report_block.cc", + "modules/rtp_rtcp/source/rtcp_packet/rrtr.cc", + "modules/rtp_rtcp/source/rtcp_packet/rtpfb.cc", + "modules/rtp_rtcp/source/rtcp_packet/sdes.cc", + "modules/rtp_rtcp/source/rtcp_packet/sender_report.cc", + "modules/rtp_rtcp/source/rtcp_packet/target_bitrate.cc", + "modules/rtp_rtcp/source/rtcp_packet/tmmb_item.cc", + "modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc", + "modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc", + "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.cc", + "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.cc", + "modules/rtp_rtcp/source/rtp_dependency_descriptor_reader.cc", + "modules/rtp_rtcp/source/rtp_dependency_descriptor_writer.cc", + "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.cc", + "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.cc", + "modules/rtp_rtcp/source/rtp_header_extension_map.cc", + "modules/rtp_rtcp/source/rtp_header_extensions.cc", + "modules/rtp_rtcp/source/rtp_packet.cc", + "modules/rtp_rtcp/source/rtp_packet_received.cc", + "modules/rtp_rtcp/source/rtp_packet_to_send.cc", + ":webrtc_rtp__dependency_descriptor", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_api__rtp_parameters", + "webrtc_units__time_delta", + "webrtc_transport__network_control", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_api__rtp_headers", + "webrtc_video__video_frame", + "webrtc_common_video__common_video", + ], +} + +filegroup { + name: "webrtc_neteq__default_neteq_controller_factory", + srcs: ["api/neteq/default_neteq_controller_factory.cc"], +} + +cc_library_static { + name: "webrtc_experiments__balanced_degradation_settings", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/balanced_degradation_settings.cc"], + host_supported: true, + static_libs: [ + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video_codecs__video_codecs_api", + ], +} + +filegroup { + name: "webrtc_audio_coding__default_neteq_factory", + srcs: ["modules/audio_coding/neteq/default_neteq_factory.cc"], +} + +cc_library_static { + name: "webrtc_video_coding__encoded_frame", + defaults: ["webrtc_defaults"], + srcs: ["modules/video_coding/encoded_frame.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_experiments__alr_experiment", + "webrtc_experiments__rtt_mult_experiment", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_rtp_rtcp__rtp_video_header", + "webrtc_video_coding__video_codec_interface", + ], +} + +cc_library_static { + name: "webrtc_experiments__stable_target_rate_experiment", + defaults: ["webrtc_defaults"], + srcs: ["rtc_base/experiments/stable_target_rate_experiment.cc"], + host_supported: true, + static_libs: [ + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + "webrtc_experiments__rate_control_settings", + ], +} + +cc_library_static { + name: "webrtc_audio_mixer__audio_mixer_impl", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_mixer/audio_mixer_impl.cc", + "modules/audio_mixer/default_output_rate_calculator.cc", + "modules/audio_mixer/frame_combiner.cc", + ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio", + "webrtc_audio__audio_frame_api", + "webrtc_audio_processing__apm_logging", + "webrtc_agc2__fixed_digital", + "webrtc_utility__audio_frame_operations", + "webrtc_audio_processing__api", + "webrtc_audio_mixer__audio_frame_manipulator", + ], +} + +cc_library_static { + name: "webrtc_agc2__level_estimation_agc", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/agc2/adaptive_mode_level_estimator_agc.cc"], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_agc2__common", + "webrtc_agc2__gain_applier", + "webrtc_common_audio__common_audio", + "webrtc_agc2__rnn_vad_with_level", + "webrtc_audio_processing__apm_logging", + "webrtc_audio_processing__api", + "webrtc_agc2__noise_level_estimator", + "webrtc_vad__vad", + "webrtc_agc2__adaptive_digital", + "webrtc_agc__level_estimation", + ], +} + +cc_library_static { + name: "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/remote_bitrate_estimator/aimd_rate_control.cc", + "modules/remote_bitrate_estimator/bwe_defines.cc", + "modules/remote_bitrate_estimator/inter_arrival.cc", + "modules/remote_bitrate_estimator/overuse_detector.cc", + "modules/remote_bitrate_estimator/overuse_estimator.cc", + "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc", + "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc", + "modules/remote_bitrate_estimator/remote_estimator_proxy.cc", + ], + host_supported: true, + cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__mutex", + "webrtc_units__timestamp", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + "webrtc_goog_cc__link_capacity_estimator", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_system_wrappers__metrics", + "webrtc_api__rtp_headers", + "webrtc_rtp_rtcp__rtp_rtcp_format", + ], +} + +cc_library_static { + name: "webrtc_agc__agc", + defaults: ["webrtc_defaults"], + srcs: ["modules/audio_processing/agc/agc_manager_direct.cc"], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__logging", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio_c", + "webrtc_common_audio__common_audio", + "webrtc_audio_processing__apm_logging", + "webrtc_audio_processing__audio_buffer", + "webrtc_vad__vad", + "webrtc_agc__level_estimation", + "webrtc_agc2__level_estimation_agc", + ], +} + +cc_library_static { + name: "webrtc_opus__audio_encoder_opus", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/opus/audio_encoder_opus.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__rtc_base_approved", + "webrtc_opus__audio_encoder_opus_config", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_audio_coding__webrtc_opus", + ], +} + +cc_library_static { + name: "webrtc_rtp__transport_feedback", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/congestion_controller/rtp/transport_feedback_adapter.cc", + "modules/congestion_controller/rtp/transport_feedback_demuxer.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_network__sent_packet", + "webrtc_rtc_base__checks", + "webrtc_synchronization__mutex", + "webrtc_units__data_size", + "webrtc_units__timestamp", + "webrtc_system_wrappers__field_trial", + "webrtc_transport__network_control", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_rtc_base__rtc_base", + "webrtc_rtp_rtcp__rtp_rtcp_format", + ], +} + +cc_library_static { + name: "webrtc_audio_codecs__builtin_audio_decoder_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/builtin_audio_decoder_factory.cc"], + host_supported: true, cflags: [ - "-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0", + "-DWEBRTC_USE_BUILTIN_ILBC=1", + "-DWEBRTC_USE_BUILTIN_OPUS=1", + "-DWEBRTC_USE_BUILTIN_ISAC_FIX=0", + "-DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1", ], static_libs: [ - "webrtc_module_api", - "webrtc_network_control", - "webrtc_field_trial_based_config", - "webrtc_rtc_base", - "webrtc_rtp_rtcp_format", - "webrtc_remote_bitrate_estimator", - "webrtc_pacing", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_opus__audio_decoder_multiopus", + "webrtc_g722__audio_decoder_g722", + "webrtc_ilbc__audio_decoder_ilbc", + "webrtc_g711__audio_decoder_g711", + "webrtc_L16__audio_decoder_L16", + "webrtc_opus__audio_decoder_opus", ], } - cc_library_static { - name: "webrtc_multiplex", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_audio_coding__audio_coding", + defaults: ["webrtc_defaults"], srcs: [ - "modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc", - "modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc", - "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc", - "modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc", + "modules/audio_coding/acm2/acm_receiver.cc", + "modules/audio_coding/acm2/acm_remixing.cc", + "modules/audio_coding/acm2/acm_resampler.cc", + "modules/audio_coding/acm2/audio_coding_module.cc", + "modules/audio_coding/acm2/call_statistics.cc", + ":webrtc_neteq__neteq_api", + ":webrtc_audio_coding__default_neteq_factory", ], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_video_rtp_headers", - "webrtc_rtc_base", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_rtp_rtcp_format", - "webrtc_rtc_media_base", - "webrtc_video_coding_utility", + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio_c", + "webrtc_rtc_base__audio_format_to_string", + "webrtc_common_audio__common_audio", + "webrtc_audio__audio_frame_api", + "webrtc_audio_coding__neteq", ], } - cc_library_static { - name: "webrtc_builtin_video_bitrate_allocator_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/builtin_video_bitrate_allocator_factory.cc", - ], + name: "webrtc_audio__aec3_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/audio/echo_canceller3_factory.cc"], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], static_libs: [ - "webrtc_video_bitrate_allocation", - "webrtc_video_bitrate_allocator", - "webrtc_video_codecs_api", - "webrtc_vp9_helpers", - "webrtc_rtc_media_base", - "webrtc_video_coding_utility", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio__aec3_config", + "webrtc_aec3__aec3", ], } - cc_library_static { - name: "webrtc_rtc_software_fallback_wrappers", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_logging__rtc_event_rtp_rtcp", + defaults: ["webrtc_defaults"], srcs: [ - "api/video_codecs/video_decoder_software_fallback_wrapper.cc", - "api/video_codecs/video_encoder_software_fallback_wrapper.cc", + "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.cc", + "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.cc", + "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.cc", + "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.cc", ], + host_supported: true, static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_bitrate_allocation", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_h264_profile_id", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_rtc_media_base", - "webrtc_video_coding_utility", + "webrtc_rtc_base__checks", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtp_rtcp__rtp_rtcp_format", ], } +cc_library_static { + name: "webrtc_video_coding__webrtc_vp9_helpers", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/codecs/vp9/svc_config.cc", + "modules/video_coding/codecs/vp9/svc_rate_allocator.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_video__video_bitrate_allocation", + "webrtc_rtc_base__logging", + "webrtc_video__video_bitrate_allocator", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_experiments__stable_target_rate_experiment", + ], +} cc_library_static { - name: "webrtc_congestion_controller_goog_cc", - defaults: [ - "webrtc_defaults", - ], + name: "webrtc_aec_dump__null_aec_dump_factory", + defaults: ["webrtc_defaults"], srcs: [ - "modules/congestion_controller/goog_cc/goog_cc_network_control.cc", + "modules/audio_processing/aec_dump/null_aec_dump_factory.cc", + ":webrtc_audio_processing__aec_dump_interface", ], + host_supported: true, +} + +cc_library_static { + name: "webrtc_video__encoded_frame", + defaults: ["webrtc_defaults"], + srcs: ["api/video/encoded_frame.cc"], + host_supported: true, + static_libs: ["webrtc_video_coding__encoded_frame"], +} + +cc_library_static { + name: "webrtc_logging__rtc_event_bwe", + defaults: ["webrtc_defaults"], + srcs: [ + "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc", + "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.cc", + "logging/rtc_event_log/events/rtc_event_probe_cluster_created.cc", + "logging/rtc_event_log/events/rtc_event_probe_result_failure.cc", + "logging/rtc_event_log/events/rtc_event_probe_result_success.cc", + "logging/rtc_event_log/events/rtc_event_route_change.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_units__data_rate", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + ], +} + +cc_library_static { + name: "webrtc_audio_codecs__builtin_audio_encoder_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/audio_codecs/builtin_audio_encoder_factory.cc"], + host_supported: true, cflags: [ - "-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0", + "-DWEBRTC_USE_BUILTIN_ILBC=1", + "-DWEBRTC_USE_BUILTIN_OPUS=1", + "-DWEBRTC_USE_BUILTIN_ISAC_FIX=0", + "-DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1", ], static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_data_size", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_logging", - "webrtc_data_rate", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_rtc_event_pacing", - "webrtc_field_trial_based_config", - "webrtc_system_wrappers", - "webrtc_alr_experiment", - "webrtc_alr_detector", - "webrtc_rate_control_settings", - "webrtc_pushback_controller", - "webrtc_remote_bitrate_estimator", - "webrtc_rtc_event_bwe", - "webrtc_probe_controller", - "webrtc_goog_cc_estimators", - "webrtc_loss_based_controller", - "webrtc_delay_based_bwe", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_opus__audio_encoder_multiopus", + "webrtc_g722__audio_encoder_g722", + "webrtc_ilbc__audio_encoder_ilbc", + "webrtc_g711__audio_encoder_g711", + "webrtc_L16__audio_encoder_L16", + "webrtc_opus__audio_encoder_opus", ], } +cc_library_static { + name: "webrtc_audio_processing__audio_processing", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/audio_processing/audio_processing_builder_impl.cc", + "modules/audio_processing/audio_processing_impl.cc", + "modules/audio_processing/echo_control_mobile_impl.cc", + "modules/audio_processing/echo_detector/circular_buffer.cc", + "modules/audio_processing/echo_detector/mean_variance_estimator.cc", + "modules/audio_processing/echo_detector/moving_max.cc", + "modules/audio_processing/echo_detector/normalized_covariance_estimator.cc", + "modules/audio_processing/gain_control_impl.cc", + "modules/audio_processing/gain_controller2.cc", + "modules/audio_processing/level_estimator.cc", + "modules/audio_processing/residual_echo_detector.cc", + "modules/audio_processing/typing_detection.cc", + ":webrtc_audio_processing__rms_level", + ":webrtc_audio_processing__aec_dump_interface", + ], + host_supported: true, + cflags: ["-DWEBRTC_APM_DEBUG_DUMP=0"], + static_libs: [ + "webrtc_ooura__fft_size_256", + "webrtc_audio_processing__audio_processing_statistics", + "webrtc_rtc_base__checks", + "webrtc_synchronization__mutex", + "webrtc_audio_processing__config", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_audio__aec3_config", + "webrtc_system_wrappers__metrics", + "webrtc_common_audio__common_audio_c", + "webrtc_aecm__aecm_core", + "webrtc_agc2__gain_applier", + "webrtc_common_audio__common_audio", + "webrtc_audio__audio_frame_api", + "webrtc_audio_processing__apm_logging", + "webrtc_common_audio__fir_filter_factory", + "webrtc_agc2__fixed_digital", + "webrtc_agc__legacy_agc", + "webrtc_utility__audio_frame_operations", + "webrtc_audio_processing__api", + "webrtc_audio_processing__audio_buffer", + "webrtc_vad__vad", + "webrtc_audio_processing__high_pass_filter", + "webrtc_ns__ns", + "webrtc_agc2__adaptive_digital", + "webrtc_audio_processing__audio_frame_proxies", + "webrtc_audio_processing__optionally_built_submodule_creators", + "webrtc_audio_processing__voice_detection", + "webrtc_aec3__aec3", + "webrtc_agc__agc", + "webrtc_aec_dump__null_aec_dump_factory", + ], +} cc_library_static { - name: "webrtc_vp9", - defaults: [ - "webrtc_defaults", + name: "webrtc_goog_cc__probe_controller", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/goog_cc/probe_controller.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_logging__rtc_event_pacing", + "webrtc_system_wrappers__metrics", + "webrtc_logging__rtc_event_bwe", + ], +} + +cc_library_static { + name: "webrtc_goog_cc__loss_based_controller", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc", + "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc", + ], + host_supported: true, + cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_system_wrappers__metrics", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_logging__rtc_event_bwe", + ], +} + +cc_library_static { + name: "webrtc_goog_cc__estimators", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc", + "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc", + "modules/congestion_controller/goog_cc/bitrate_estimator.cc", + "modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc", + "modules/congestion_controller/goog_cc/robust_throughput_estimator.cc", + "modules/congestion_controller/goog_cc/trendline_estimator.cc", + ], + host_supported: true, + cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_numerics", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_logging__rtc_event_bwe", + ], +} + +cc_library_static { + name: "webrtc_call__rtp_interfaces", + defaults: ["webrtc_defaults"], + srcs: ["call/rtp_config.cc"], + host_supported: true, + static_libs: [ + "webrtc_transport__bitrate_settings", + "webrtc_rtc_base__checks", + "webrtc_api__rtp_parameters", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_api__rtp_headers", + "webrtc_crypto__options", + "webrtc_rtp_rtcp__rtp_rtcp_format", + ], +} + +cc_library_static { + name: "webrtc_call__video_stream_api", + defaults: ["webrtc_defaults"], + srcs: [ + "call/video_receive_stream.cc", + "call/video_send_stream.cc", + ":webrtc_adaptation__resource_adaptation_api", + ], + host_supported: true, + static_libs: [ + "webrtc_api__transport_api", + "webrtc_rtc_base__checks", + "webrtc_api__rtp_parameters", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_api__rtp_headers", + "webrtc_crypto__options", + "webrtc_video__video_frame", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_call__rtp_interfaces", + ], +} + +cc_library_static { + name: "webrtc_call__call_interfaces", + defaults: ["webrtc_defaults"], + srcs: [ + "call/audio_receive_stream.cc", + "call/audio_state.cc", + "call/call_config.cc", + "call/flexfec_receive_stream.cc", + "call/syncable.cc", + "call/audio_send_stream.cc", + ":webrtc_adaptation__resource_adaptation_api", + ":webrtc_neteq__neteq_api", + ], + host_supported: true, + static_libs: [ + "webrtc_network__sent_packet", + "webrtc_api__transport_api", + "webrtc_audio_processing__audio_processing_statistics", + "webrtc_transport__bitrate_settings", + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_api__rtp_parameters", + "webrtc_transport__network_control", + "webrtc_api__rtc_error", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_rtc_base__rtc_base", + "webrtc_rtc_base__audio_format_to_string", + "webrtc_api__rtp_headers", + "webrtc_crypto__options", + "webrtc_utility__utility", + "webrtc_audio_processing__api", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_audio_processing__audio_processing", + "webrtc_call__rtp_interfaces", + "webrtc_call__video_stream_api", + ], +} + +cc_library_static { + name: "webrtc_media__rtc_media_base", + defaults: ["webrtc_defaults"], + srcs: [ + "media/base/adapted_video_track_source.cc", + "media/base/codec.cc", + "media/base/media_channel.cc", + "media/base/media_constants.cc", + "media/base/media_engine.cc", + "media/base/rid_description.cc", + "media/base/rtp_data_engine.cc", + "media/base/rtp_utils.cc", + "media/base/stream_params.cc", + "media/base/turn_utils.cc", + "media/base/video_adapter.cc", + "media/base/video_broadcaster.cc", + "media/base/video_common.cc", + "media/base/video_source_base.cc", + ":webrtc_transport__stun_types", + ], + host_supported: true, + static_libs: [ + "webrtc_sigslot__sigslot", + "webrtc_audio_processing__audio_processing_statistics", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_system__file_wrapper", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_api__rtp_parameters", + "webrtc_api__audio_options_api", + "webrtc_system_wrappers__field_trial", + "webrtc_api__rtc_error", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_rtc_base__rtc_base", + "webrtc_crypto__options", + "webrtc_media__rtc_h264_profile_id", + "webrtc_video__video_frame", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_api__media_stream_interface", + "webrtc_media__rtc_vp9_profile", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_call__video_stream_api", + "webrtc_call__call_interfaces", + ], +} + +cc_library_static { + name: "webrtc_video_capture__video_capture_module", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_capture/device_info_impl.cc", + "modules/video_capture/video_capture_factory.cc", + "modules/video_capture/video_capture_impl.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_modules__module_api", + "webrtc_rtc_base__stringutils", + "webrtc_synchronization__mutex", + "webrtc_synchronization__rw_lock_wrapper", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_video__video_frame", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_media__rtc_media_base", + ], +} + +cc_library_static { + name: "webrtc_call__fake_network", + defaults: ["webrtc_defaults"], + srcs: ["call/fake_network_pipe.cc"], + host_supported: true, + static_libs: [ + "webrtc_api__transport_api", + "webrtc_rtc_base__checks", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_api__rtp_parameters", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_call__simulated_network", + "webrtc_utility__utility", + "webrtc_call__call_interfaces", + ], +} + +cc_library_static { + name: "webrtc_api__libjingle_peerconnection_api", + defaults: ["webrtc_defaults"], + srcs: [ + "api/candidate.cc", + "api/data_channel_interface.cc", + "api/dtls_transport_interface.cc", + "api/jsep.cc", + "api/jsep_ice_candidate.cc", + "api/peer_connection_interface.cc", + "api/proxy.cc", + "api/rtp_receiver_interface.cc", + "api/rtp_sender_interface.cc", + "api/rtp_transceiver_interface.cc", + "api/sctp_transport_interface.cc", + "api/stats_types.cc", + ":webrtc_adaptation__resource_adaptation_api", + ":webrtc_neteq__neteq_api", + ], + host_supported: true, + static_libs: [ + "webrtc_audio_processing__audio_processing_statistics", + "webrtc_transport__bitrate_settings", + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_api__rtp_parameters", + "webrtc_api__audio_options_api", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_units__data_rate", + "webrtc_transport__network_control", + "webrtc_api__rtc_error", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_rtc_base__rtc_base", + "webrtc_api__rtp_packet_info", + "webrtc_crypto__options", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_api__media_stream_interface", + "webrtc_media__rtc_media_base", + ], +} + +filegroup { + name: "webrtc_video_capture__video_capture_internal_impl", + srcs: [ + "modules/video_capture/linux/device_info_linux.cc", + "modules/video_capture/linux/video_capture_linux.cc", + ], +} + +cc_library_static { + name: "webrtc_logging__ice_log", + defaults: ["webrtc_defaults"], + srcs: [ + "logging/rtc_event_log/events/rtc_event_dtls_transport_state.cc", + "logging/rtc_event_log/events/rtc_event_dtls_writable_state.cc", + "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.cc", + "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.cc", + "logging/rtc_event_log/ice_logger.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_api__libjingle_peerconnection_api", + ], +} + +cc_library_static { + name: "webrtc_call__bitrate_configurator", + defaults: ["webrtc_defaults"], + srcs: ["call/rtp_bitrate_configurator.cc"], + host_supported: true, + static_libs: [ + "webrtc_transport__bitrate_settings", + "webrtc_rtc_base__checks", + "webrtc_units__data_rate", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_call__rtp_interfaces", + "webrtc_api__libjingle_peerconnection_api", + ], +} + +cc_library_static { + name: "webrtc_rtp_rtcp__rtp_rtcp", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/rtp_rtcp/source/absolute_capture_time_receiver.cc", + "modules/rtp_rtcp/source/absolute_capture_time_sender.cc", + "modules/rtp_rtcp/source/active_decode_targets_helper.cc", + "modules/rtp_rtcp/source/create_video_rtp_depacketizer.cc", + "modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc", + "modules/rtp_rtcp/source/dtmf_queue.cc", + "modules/rtp_rtcp/source/fec_private_tables_bursty.cc", + "modules/rtp_rtcp/source/fec_private_tables_random.cc", + "modules/rtp_rtcp/source/flexfec_header_reader_writer.cc", + "modules/rtp_rtcp/source/flexfec_receiver.cc", + "modules/rtp_rtcp/source/flexfec_sender.cc", + "modules/rtp_rtcp/source/forward_error_correction.cc", + "modules/rtp_rtcp/source/forward_error_correction_internal.cc", + "modules/rtp_rtcp/source/packet_loss_stats.cc", + "modules/rtp_rtcp/source/receive_statistics_impl.cc", + "modules/rtp_rtcp/source/remote_ntp_time_estimator.cc", + "modules/rtp_rtcp/source/rtcp_nack_stats.cc", + "modules/rtp_rtcp/source/rtcp_receiver.cc", + "modules/rtp_rtcp/source/rtcp_sender.cc", + "modules/rtp_rtcp/source/rtp_descriptor_authentication.cc", + "modules/rtp_rtcp/source/rtp_format.cc", + "modules/rtp_rtcp/source/rtp_format_h264.cc", + "modules/rtp_rtcp/source/rtp_format_video_generic.cc", + "modules/rtp_rtcp/source/rtp_format_vp8.cc", + "modules/rtp_rtcp/source/rtp_format_vp9.cc", + "modules/rtp_rtcp/source/rtp_header_extension_size.cc", + "modules/rtp_rtcp/source/rtp_packet_history.cc", + "modules/rtp_rtcp/source/rtp_packetizer_av1.cc", + "modules/rtp_rtcp/source/rtp_rtcp_impl.cc", + "modules/rtp_rtcp/source/rtp_rtcp_impl2.cc", + "modules/rtp_rtcp/source/rtp_sender.cc", + "modules/rtp_rtcp/source/rtp_sender_audio.cc", + "modules/rtp_rtcp/source/rtp_sender_egress.cc", + "modules/rtp_rtcp/source/rtp_sender_video.cc", + "modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc", + "modules/rtp_rtcp/source/rtp_sequence_number_map.cc", + "modules/rtp_rtcp/source/rtp_utility.cc", + "modules/rtp_rtcp/source/source_tracker.cc", + "modules/rtp_rtcp/source/time_util.cc", + "modules/rtp_rtcp/source/tmmbr_help.cc", + "modules/rtp_rtcp/source/ulpfec_generator.cc", + "modules/rtp_rtcp/source/ulpfec_header_reader_writer.cc", + "modules/rtp_rtcp/source/ulpfec_receiver_impl.cc", + "modules/rtp_rtcp/source/video_rtp_depacketizer.cc", + "modules/rtp_rtcp/source/video_rtp_depacketizer_av1.cc", + "modules/rtp_rtcp/source/video_rtp_depacketizer_generic.cc", + "modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc", + "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.cc", + "modules/rtp_rtcp/source/video_rtp_depacketizer_vp8.cc", + "modules/rtp_rtcp/source/video_rtp_depacketizer_vp9.cc", + ":webrtc_rtp__dependency_descriptor", + ], + host_supported: true, + cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"], + static_libs: [ + "webrtc_api__transport_api", + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_task_queue__task_queue", + "webrtc_synchronization__mutex", + "webrtc_time__timestamp_extrapolator", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_api__rtp_parameters", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_task_utils__pending_task_safety_flag", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_units__data_rate", + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + "webrtc_video__video_bitrate_allocator", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_system_wrappers__metrics", + "webrtc_task_utils__repeating_task", + "webrtc_api__rtp_headers", + "webrtc_rtc_base__rate_limiter", + "webrtc_api__rtp_packet_info", + "webrtc_video__video_frame", + "webrtc_logging__rtc_event_audio", + "webrtc_video__encoded_image", + "webrtc_rtp_rtcp__rtp_video_header", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_logging__rtc_event_rtp_rtcp", + "webrtc_video__encoded_frame", + "webrtc_call__rtp_interfaces", + "webrtc_api__libjingle_peerconnection_api", + ], +} + +cc_library_static { + name: "webrtc_call__rtp_receiver", + defaults: ["webrtc_defaults"], + srcs: [ + "call/rtp_demuxer.cc", + "call/rtp_stream_receiver_controller.cc", + "call/rtx_receive_stream.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_api__rtp_headers", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_call__rtp_interfaces", + "webrtc_rtp_rtcp__rtp_rtcp", + ], +} + +cc_library_static { + name: "webrtc_p2p__rtc_p2p", + defaults: ["webrtc_defaults"], + srcs: [ + "p2p/base/async_stun_tcp_socket.cc", + "p2p/base/basic_async_resolver_factory.cc", + "p2p/base/basic_ice_controller.cc", + "p2p/base/basic_packet_socket_factory.cc", + "p2p/base/connection.cc", + "p2p/base/connection_info.cc", + "p2p/base/default_ice_transport_factory.cc", + "p2p/base/dtls_transport.cc", + "p2p/base/dtls_transport_internal.cc", + "p2p/base/ice_controller_interface.cc", + "p2p/base/ice_credentials_iterator.cc", + "p2p/base/ice_transport_internal.cc", + "p2p/base/mdns_message.cc", + "p2p/base/p2p_constants.cc", + "p2p/base/p2p_transport_channel.cc", + "p2p/base/packet_transport_internal.cc", + "p2p/base/port.cc", + "p2p/base/port_allocator.cc", + "p2p/base/port_interface.cc", + "p2p/base/pseudo_tcp.cc", + "p2p/base/regathering_controller.cc", + "p2p/base/stun_port.cc", + "p2p/base/stun_request.cc", + "p2p/base/tcp_port.cc", + "p2p/base/transport_description.cc", + "p2p/base/transport_description_factory.cc", + "p2p/base/turn_port.cc", + "p2p/client/basic_port_allocator.cc", + "p2p/client/turn_port_factory.cc", + ":webrtc_transport__stun_types", + ], + host_supported: true, + static_libs: [ + "webrtc_sigslot__sigslot", + "webrtc_network__sent_packet", + "webrtc_base64__base64", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__weak_ptr", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_api__rtc_error", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_memory__fifo_buffer", + "webrtc_crypto__options", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_logging__ice_log", + ], +} + +cc_library_static { + name: "webrtc_pacing__pacing", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/pacing/bitrate_prober.cc", + "modules/pacing/paced_sender.cc", + "modules/pacing/pacing_controller.cc", + "modules/pacing/packet_router.cc", + "modules/pacing/round_robin_packet_queue.cc", + "modules/pacing/task_queue_paced_sender.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_task_queue__task_queue", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_units__data_size", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_units__data_rate", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_logging__rtc_event_pacing", + "webrtc_transport__field_trial_based_config", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_system_wrappers__metrics", + "webrtc_pacing__interval_budget", + "webrtc_utility__utility", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_logging__rtc_event_bwe", + "webrtc_rtp_rtcp__rtp_rtcp", + ], +} + +cc_library_static { + name: "webrtc_media__rtc_data", + defaults: ["webrtc_defaults"], + srcs: ["media/sctp/sctp_transport.cc"], + host_supported: true, + static_libs: [ + "webrtc_sigslot__sigslot", + "webrtc_api__transport_api", + "webrtc_synchronization__mutex", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_rtc_base__rtc_base", + "webrtc_media__rtc_media_base", + "webrtc_p2p__rtc_p2p", + ], +} + +cc_library_static { + name: "webrtc_goog_cc__delay_based_bwe", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/goog_cc/delay_based_bwe.cc"], + host_supported: true, + cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__metrics", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_logging__rtc_event_bwe", + "webrtc_goog_cc__estimators", + "webrtc_pacing__pacing", + ], +} + +cc_library_static { + name: "webrtc_video_coding__video_coding_utility", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/utility/decoded_frames_history.cc", + "modules/video_coding/utility/frame_dropper.cc", + "modules/video_coding/utility/framerate_controller.cc", + "modules/video_coding/utility/ivf_file_reader.cc", + "modules/video_coding/utility/ivf_file_writer.cc", + "modules/video_coding/utility/quality_scaler.cc", + "modules/video_coding/utility/simulcast_rate_allocator.cc", + "modules/video_coding/utility/simulcast_utility.cc", + "modules/video_coding/utility/vp8_header_parser.cc", + "modules/video_coding/utility/vp9_uncompressed_header_parser.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_system__file_wrapper", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_video__video_adaptation", + "webrtc_rtc_base__weak_ptr", + "webrtc_system_wrappers__field_trial", + "webrtc_video__video_bitrate_allocator", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_experiments__quality_scaler_settings", + "webrtc_task_utils__repeating_task", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__quality_scaling_experiment", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_experiments__stable_target_rate_experiment", + "webrtc_video__encoded_frame", + "webrtc_rtp_rtcp__rtp_rtcp", + ], +} + +cc_library_static { + name: "webrtc_audio__audio", + defaults: ["webrtc_defaults"], + srcs: [ + "audio/audio_level.cc", + "audio/audio_receive_stream.cc", + "audio/audio_send_stream.cc", + "audio/audio_state.cc", + "audio/audio_transport_impl.cc", + "audio/channel_receive.cc", + "audio/channel_receive_frame_transformer_delegate.cc", + "audio/channel_send.cc", + "audio/channel_send_frame_transformer_delegate.cc", + "audio/null_audio_poller.cc", + "audio/remix_resample.cc", + ":webrtc_audio_processing__rms_level", + ":webrtc_neteq__neteq_api", + ], + host_supported: true, + static_libs: [ + "webrtc_audio_coding__audio_network_adaptor_config", + "webrtc_api__transport_api", + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_api__rtp_parameters", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_common_audio__common_audio_c", + "webrtc_rtc_base__audio_format_to_string", + "webrtc_api__rtp_headers", + "webrtc_rtc_base__rate_limiter", + "webrtc_logging__rtc_stream_config", + "webrtc_crypto__options", + "webrtc_common_audio__common_audio", + "webrtc_call__bitrate_allocator", + "webrtc_audio__audio_frame_api", + "webrtc_utility__utility", + "webrtc_audio_coding__audio_encoder_cng", + "webrtc_logging__rtc_event_audio", + "webrtc_audio_coding__red", + "webrtc_utility__audio_frame_operations", + "webrtc_audio_processing__api", + "webrtc_audio_processing__audio_frame_proxies", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_audio_coding__audio_coding", + "webrtc_audio__aec3_factory", + "webrtc_audio_processing__audio_processing", + "webrtc_call__rtp_interfaces", + "webrtc_call__call_interfaces", + "webrtc_rtp_rtcp__rtp_rtcp", + "webrtc_pacing__pacing", + ], +} + +cc_library_static { + name: "webrtc_api__ice_transport_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/ice_transport_factory.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__rtc_base", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_p2p__rtc_p2p", + ], +} + +cc_library_static { + name: "webrtc_adaptation__resource_adaptation", + defaults: ["webrtc_defaults"], + srcs: [ + "call/adaptation/adaptation_constraint.cc", + "call/adaptation/adaptation_listener.cc", + "call/adaptation/broadcast_resource_listener.cc", + "call/adaptation/degradation_preference_provider.cc", + "call/adaptation/encoder_settings.cc", + "call/adaptation/resource_adaptation_processor.cc", + "call/adaptation/resource_adaptation_processor_interface.cc", + "call/adaptation/video_source_restrictions.cc", + "call/adaptation/video_stream_adapter.cc", + "call/adaptation/video_stream_input_state.cc", + "call/adaptation/video_stream_input_state_provider.cc", + ":webrtc_adaptation__resource_adaptation_api", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_api__rtp_parameters", + "webrtc_video__video_adaptation", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_frame", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__balanced_degradation_settings", + "webrtc_video_coding__video_coding_utility", + ], +} + +cc_library_static { + name: "webrtc_rtp__control_handler", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/rtp/control_handler.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_synchronization__sequence_checker", + "webrtc_units__data_size", + "webrtc_units__time_delta", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_transport__network_control", + "webrtc_rtc_base__rtc_base", + "webrtc_pacing__pacing", + ], +} + +cc_library_static { + name: "webrtc_video_coding__webrtc_vp8_temporal_layers", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/codecs/vp8/default_temporal_layers.cc", + "modules/video_coding/codecs/vp8/screenshare_layers.cc", + "modules/video_coding/codecs/vp8/temporal_layers_checker.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__metrics", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_video_coding__video_coding_utility", + ], +} + +cc_library_static { + name: "webrtc_congestion_controller__congestion_controller", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/receive_side_congestion_controller.cc"], + host_supported: true, + cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"], + static_libs: [ + "webrtc_modules__module_api", + "webrtc_synchronization__mutex", + "webrtc_transport__network_control", + "webrtc_transport__field_trial_based_config", + "webrtc_rtc_base__rtc_base", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_pacing__pacing", + ], +} + +cc_library_static { + name: "webrtc_video_coding__webrtc_multiplex", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/codecs/multiplex/augmented_video_frame_buffer.cc", + "modules/video_coding/codecs/multiplex/multiplex_decoder_adapter.cc", + "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.cc", + "modules/video_coding/codecs/multiplex/multiplex_encoder_adapter.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__mutex", + "webrtc_video__video_rtp_headers", + "webrtc_rtc_base__rtc_base", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_media__rtc_media_base", + "webrtc_video_coding__video_coding_utility", + ], +} + +cc_library_static { + name: "webrtc_video__builtin_video_bitrate_allocator_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/video/builtin_video_bitrate_allocator_factory.cc"], + host_supported: true, + static_libs: [ + "webrtc_video__video_bitrate_allocation", + "webrtc_video__video_bitrate_allocator", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__webrtc_vp9_helpers", + "webrtc_media__rtc_media_base", + "webrtc_video_coding__video_coding_utility", + ], +} + +cc_library_static { + name: "webrtc_video_codecs__rtc_software_fallback_wrappers", + defaults: ["webrtc_defaults"], + srcs: [ + "api/video_codecs/video_decoder_software_fallback_wrapper.cc", + "api/video_codecs/video_encoder_software_fallback_wrapper.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_video__video_bitrate_allocation", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_system_wrappers__metrics", + "webrtc_media__rtc_h264_profile_id", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_media__rtc_media_base", + "webrtc_video_coding__video_coding_utility", + ], +} + +cc_library_static { + name: "webrtc_goog_cc__goog_cc", + defaults: ["webrtc_defaults"], + srcs: ["modules/congestion_controller/goog_cc/goog_cc_network_control.cc"], + host_supported: true, + cflags: ["-DBWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0"], + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_units__data_size", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_logging__rtc_event_pacing", + "webrtc_transport__field_trial_based_config", + "webrtc_system_wrappers__system_wrappers", + "webrtc_experiments__alr_experiment", + "webrtc_goog_cc__alr_detector", + "webrtc_experiments__rate_control_settings", + "webrtc_goog_cc__pushback_controller", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_logging__rtc_event_bwe", + "webrtc_goog_cc__probe_controller", + "webrtc_goog_cc__loss_based_controller", + "webrtc_goog_cc__estimators", + "webrtc_goog_cc__delay_based_bwe", + ], +} + +cc_library_static { + name: "webrtc_video_coding__webrtc_vp9", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/codecs/vp9/vp9.cc", + "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc", + "modules/video_coding/codecs/vp9/vp9_impl.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__mutex", + "webrtc_system_wrappers__field_trial", + "webrtc_video__video_rtp_headers", + "webrtc_rtc_base__rtc_base", + "webrtc_video__video_frame", + "webrtc_common_video__common_video", + "webrtc_video__video_frame_i010", + "webrtc_video_codecs__video_codecs_api", + "webrtc_media__rtc_vp9_profile", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_video_coding__webrtc_vp9_helpers", + "webrtc_media__rtc_media_base", + "webrtc_video_coding__video_coding_utility", + ], +} + +cc_library_static { + name: "webrtc_video_coding__webrtc_h264", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/codecs/h264/h264.cc", + "modules/video_coding/codecs/h264/h264_color_space.cc", + "modules/video_coding/codecs/h264/h264_decoder_impl.cc", + "modules/video_coding/codecs/h264/h264_encoder_impl.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_system_wrappers__field_trial", + "webrtc_video__video_rtp_headers", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_media__rtc_h264_profile_id", + "webrtc_video__video_frame", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_video__video_frame_i010", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_media__rtc_media_base", + "webrtc_video_coding__video_coding_utility", + ], +} + +cc_library_static { + name: "webrtc_media__rtc_simulcast_encoder_adapter", + defaults: ["webrtc_defaults"], + srcs: ["media/engine/simulcast_encoder_adapter.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_synchronization__sequence_checker", + "webrtc_system_wrappers__field_trial", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_video__video_frame", + "webrtc_video__video_frame_i420", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_call__video_stream_api", + "webrtc_media__rtc_media_base", + "webrtc_video_coding__video_coding_utility", + "webrtc_video_codecs__rtc_software_fallback_wrappers", + ], +} + +cc_library_static { + name: "webrtc_adaptation__video_adaptation", + defaults: ["webrtc_defaults"], + srcs: [ + "video/adaptation/encode_usage_resource.cc", + "video/adaptation/overuse_frame_detector.cc", + "video/adaptation/quality_rampup_experiment_helper.cc", + "video/adaptation/quality_scaler_resource.cc", + "video/adaptation/video_stream_encoder_resource.cc", + "video/adaptation/video_stream_encoder_resource_manager.cc", + ":webrtc_adaptation__resource_adaptation_api", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_api__rtp_parameters", + "webrtc_video__video_adaptation", + "webrtc_rtc_base__timeutils", + "webrtc_rtc_base__rtc_event", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_experiments__quality_scaler_settings", + "webrtc_experiments__quality_rampup_experiment", + "webrtc_task_utils__repeating_task", + "webrtc_video__video_frame", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__balanced_degradation_settings", + "webrtc_video_coding__video_coding_utility", + "webrtc_adaptation__resource_adaptation", + ], +} + +cc_library_static { + name: "webrtc_transport__goog_cc", + defaults: ["webrtc_defaults"], + srcs: ["api/transport/goog_cc_factory.cc"], + host_supported: true, + static_libs: [ + "webrtc_transport__network_control", + "webrtc_goog_cc__goog_cc", + ], +} + +cc_library_static { + name: "webrtc_video_codecs__vp8_temporal_layers_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/video_codecs/vp8_temporal_layers_factory.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_coding_utility", + "webrtc_video_coding__webrtc_vp8_temporal_layers", + ], +} + +cc_library_static { + name: "webrtc_test__fake_video_codecs", + defaults: ["webrtc_defaults"], + srcs: [ + "test/configurable_frame_size_encoder.cc", + "test/fake_decoder.cc", + "test/fake_encoder.cc", + "test/fake_vp8_decoder.cc", + "test/fake_vp8_encoder.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__criticalsection", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_rtc_base__timeutils", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_video_coding__video_coding_utility", + "webrtc_video_codecs__vp8_temporal_layers_factory", + ], +} + +cc_library_static { + name: "webrtc_media__rtc_encoder_simulcast_proxy", + defaults: ["webrtc_defaults"], + srcs: ["media/engine/encoder_simulcast_proxy.cc"], + host_supported: true, + static_libs: [ + "webrtc_video__video_bitrate_allocation", + "webrtc_video__video_rtp_headers", + "webrtc_video__video_frame", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_media__rtc_simulcast_encoder_adapter", + ], +} + +cc_library_static { + name: "webrtc_pc__rtc_pc_base", + defaults: ["webrtc_defaults"], + srcs: [ + "pc/channel.cc", + "pc/channel_manager.cc", + "pc/composite_rtp_transport.cc", + "pc/dtls_srtp_transport.cc", + "pc/dtls_transport.cc", + "pc/external_hmac.cc", + "pc/ice_transport.cc", + "pc/jsep_transport.cc", + "pc/jsep_transport_controller.cc", + "pc/media_session.cc", + "pc/rtcp_mux_filter.cc", + "pc/rtp_media_utils.cc", + "pc/rtp_transport.cc", + "pc/sctp_data_channel_transport.cc", + "pc/sctp_transport.cc", + "pc/sctp_utils.cc", + "pc/session_description.cc", + "pc/simulcast_description.cc", + "pc/srtp_filter.cc", + "pc/srtp_session.cc", + "pc/srtp_transport.cc", + "pc/transport_stats.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_sigslot__sigslot", + "webrtc_pc__media_protocol_names", + "webrtc_base64__base64", + "webrtc_rtc_base__checks", + "webrtc_rtc_base__stringutils", + "webrtc_system__file_wrapper", + "webrtc_synchronization__mutex", + "webrtc_api__rtp_parameters", + "webrtc_api__audio_options_api", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_system_wrappers__field_trial", + "webrtc_api__rtc_error", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_video__video_rtp_headers", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_api__rtp_headers", + "webrtc_crypto__options", + "webrtc_media__rtc_h264_profile_id", + "webrtc_video__video_frame", + "webrtc_common_video__common_video", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_call__rtp_interfaces", + "webrtc_call__call_interfaces", + "webrtc_media__rtc_media_base", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_logging__ice_log", + "webrtc_rtp_rtcp__rtp_rtcp", + "webrtc_call__rtp_receiver", + "webrtc_p2p__rtc_p2p", + "webrtc_media__rtc_data", + "webrtc_api__ice_transport_factory", + "webrtc_video__builtin_video_bitrate_allocator_factory", + ], +} + +cc_library_static { + name: "webrtc_call__rtp_sender", + defaults: ["webrtc_defaults"], + srcs: [ + "call/rtp_payload_params.cc", + "call/rtp_transport_controller_send.cc", + "call/rtp_video_sender.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_api__transport_api", + "webrtc_rtc_base__checks", + "webrtc_synchronization__mutex", + "webrtc_api__rtp_parameters", + "webrtc_units__time_delta", + "webrtc_units__timestamp", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_units__data_rate", + "webrtc_video_coding__chain_diff_calculator", + "webrtc_transport__network_control", + "webrtc_transport__field_trial_based_config", + "webrtc_video_coding__frame_dependencies_calculator", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_rtc_base__rtc_base", + "webrtc_task_utils__repeating_task", + "webrtc_rtc_base__rate_limiter", + "webrtc_video__video_frame", + "webrtc_utility__utility", + "webrtc_rtp_rtcp__rtp_video_header", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_rtp__transport_feedback", + "webrtc_logging__rtc_event_bwe", + "webrtc_call__rtp_interfaces", + "webrtc_call__bitrate_configurator", + "webrtc_rtp_rtcp__rtp_rtcp", + "webrtc_pacing__pacing", + "webrtc_rtp__control_handler", + "webrtc_congestion_controller__congestion_controller", + "webrtc_transport__goog_cc", + ], +} + +cc_library_static { + name: "webrtc_video_coding__video_coding", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/codec_timer.cc", + "modules/video_coding/decoder_database.cc", + "modules/video_coding/fec_controller_default.cc", + "modules/video_coding/frame_buffer2.cc", + "modules/video_coding/frame_object.cc", + "modules/video_coding/generic_decoder.cc", + "modules/video_coding/h264_sprop_parameter_sets.cc", + "modules/video_coding/h264_sps_pps_tracker.cc", + "modules/video_coding/inter_frame_delay.cc", + "modules/video_coding/jitter_estimator.cc", + "modules/video_coding/loss_notification_controller.cc", + "modules/video_coding/media_opt_util.cc", + "modules/video_coding/packet_buffer.cc", + "modules/video_coding/rtp_frame_reference_finder.cc", + "modules/video_coding/rtt_filter.cc", + "modules/video_coding/timestamp_map.cc", + "modules/video_coding/timing.cc", + "modules/video_coding/unique_timestamp_counter.cc", + "modules/video_coding/video_codec_initializer.cc", + "modules/video_coding/video_receiver2.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_base64__base64", + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__mutex", + "webrtc_time__timestamp_extrapolator", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_video__video_adaptation", + "webrtc_units__time_delta", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_video__video_bitrate_allocator", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_experiments__jitter_upper_bound_experiment", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_experiments__alr_experiment", + "webrtc_system_wrappers__metrics", + "webrtc_experiments__rtt_mult_experiment", + "webrtc_rtc_base__rtc_base", + "webrtc_task_utils__repeating_task", + "webrtc_api__rtp_headers", + "webrtc_api__rtp_packet_info", + "webrtc_video__video_frame", + "webrtc_experiments__min_video_bitrate_experiment", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_rtp_rtcp__rtp_video_header", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_video_coding__encoded_frame", + "webrtc_video_coding__webrtc_vp9_helpers", + "webrtc_video__encoded_frame", + "webrtc_rtp_rtcp__rtp_rtcp", + "webrtc_video_coding__video_coding_utility", + "webrtc_video__builtin_video_bitrate_allocator_factory", + ], +} + +cc_library_static { + name: "webrtc_video_coding__webrtc_vp8", + defaults: ["webrtc_defaults"], + srcs: [ + "modules/video_coding/codecs/vp8/libvpx_interface.cc", + "modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc", + "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_experiments__cpu_speed_experiment", + "webrtc_video__video_rtp_headers", + "webrtc_system_wrappers__metrics", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_video_coding__video_coding_utility", + "webrtc_video_coding__webrtc_vp8_temporal_layers", + "webrtc_video_codecs__vp8_temporal_layers_factory", + ], +} + +cc_library_static { + name: "webrtc_media__rtc_internal_video_codecs", + defaults: ["webrtc_defaults"], + srcs: [ + "media/engine/fake_video_codec_factory.cc", + "media/engine/internal_decoder_factory.cc", + "media/engine/internal_encoder_factory.cc", + "media/engine/multiplex_codec_factory.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_media__rtc_constants", + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_video__video_bitrate_allocation", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__video_rtp_headers", + "webrtc_media__rtc_h264_profile_id", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_av1__libaom_av1_encoder", + "webrtc_av1__libaom_av1_decoder", + "webrtc_call__video_stream_api", + "webrtc_call__call_interfaces", + "webrtc_media__rtc_media_base", + "webrtc_video_coding__webrtc_multiplex", + "webrtc_video_codecs__rtc_software_fallback_wrappers", + "webrtc_video_coding__webrtc_vp9", + "webrtc_video_coding__webrtc_h264", + "webrtc_media__rtc_simulcast_encoder_adapter", + "webrtc_test__fake_video_codecs", + "webrtc_media__rtc_encoder_simulcast_proxy", + "webrtc_video_coding__webrtc_vp8", + ], +} + +cc_library_static { + name: "webrtc_video_codecs__builtin_video_encoder_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/video_codecs/builtin_video_encoder_factory.cc"], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_video_codecs__video_codecs_api", + "webrtc_media__rtc_media_base", + "webrtc_media__rtc_encoder_simulcast_proxy", + "webrtc_media__rtc_internal_video_codecs", + ], +} + +cc_library_static { + name: "webrtc_video__frame_dumping_decoder", + defaults: ["webrtc_defaults"], + srcs: ["video/frame_dumping_decoder.cc"], + host_supported: true, + static_libs: [ + "webrtc_system__file_wrapper", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_video__encoded_image", + "webrtc_video_codecs__video_codecs_api", + "webrtc_video_coding__video_codec_interface", + "webrtc_video__encoded_frame", + "webrtc_video_coding__video_coding_utility", + "webrtc_video_coding__video_coding", + ], +} + +cc_library_static { + name: "webrtc_pc__peerconnection", + defaults: ["webrtc_defaults"], + srcs: [ + "pc/audio_rtp_receiver.cc", + "pc/audio_track.cc", + "pc/data_channel_controller.cc", + "pc/data_channel_utils.cc", + "pc/dtmf_sender.cc", + "pc/ice_server_parsing.cc", + "pc/jitter_buffer_delay.cc", + "pc/jsep_ice_candidate.cc", + "pc/jsep_session_description.cc", + "pc/local_audio_source.cc", + "pc/media_stream.cc", + "pc/media_stream_observer.cc", + "pc/peer_connection.cc", + "pc/peer_connection_factory.cc", + "pc/remote_audio_source.cc", + "pc/rtc_stats_collector.cc", + "pc/rtc_stats_traversal.cc", + "pc/rtp_data_channel.cc", + "pc/rtp_parameters_conversion.cc", + "pc/rtp_receiver.cc", + "pc/rtp_sender.cc", + "pc/rtp_transceiver.cc", + "pc/sctp_data_channel.cc", + "pc/sdp_serializer.cc", + "pc/sdp_utils.cc", + "pc/stats_collector.cc", + "pc/track_media_info_map.cc", + "pc/video_rtp_receiver.cc", + "pc/video_rtp_track_source.cc", + "pc/video_track.cc", + "pc/video_track_source.cc", + "pc/webrtc_sdp.cc", + "pc/webrtc_session_description_factory.cc", + ":webrtc_rtc_base__rtc_operations_chain", + ], + host_supported: true, + static_libs: [ + "webrtc_sigslot__sigslot", + "webrtc_base64__base64", + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_system__file_wrapper", + "webrtc_synchronization__mutex", + "webrtc_api__rtp_parameters", + "webrtc_api__audio_options_api", + "webrtc_rtc_base__weak_ptr", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_transport__field_trial_based_config", + "webrtc_api__rtc_error", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_api__rtc_event_log_output_file", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_video__video_frame", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_api__media_stream_interface", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_call__call_interfaces", + "webrtc_media__rtc_media_base", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_logging__ice_log", + "webrtc_p2p__rtc_p2p", + "webrtc_media__rtc_data", + "webrtc_api__ice_transport_factory", + "webrtc_video__builtin_video_bitrate_allocator_factory", + "webrtc_pc__rtc_pc_base", + ], +} + +cc_library_static { + name: "webrtc_video__video_stream_encoder_impl", + defaults: ["webrtc_defaults"], + srcs: [ + "video/encoder_bitrate_adjuster.cc", + "video/encoder_overshoot_detector.cc", + "video/frame_encode_metadata_writer.cc", + "video/video_source_sink_controller.cc", + "video/video_stream_encoder.cc", + ":webrtc_adaptation__resource_adaptation_api", + ], + host_supported: true, + static_libs: [ + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__criticalsection", + "webrtc_synchronization__mutex", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_api__rtp_parameters", + "webrtc_video__video_adaptation", + "webrtc_rtc_base__timeutils", + "webrtc_rtc_base__rtc_event", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_video__video_bitrate_allocator", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_experiments__alr_experiment", + "webrtc_experiments__quality_scaler_settings", + "webrtc_experiments__quality_rampup_experiment", + "webrtc_task_utils__repeating_task", + "webrtc_video__video_frame", + "webrtc_video__encoded_image", + "webrtc_video__video_frame_i420", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__quality_scaling_experiment", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_experiments__balanced_degradation_settings", + "webrtc_video_coding__webrtc_vp9_helpers", + "webrtc_video_coding__video_coding_utility", + "webrtc_adaptation__resource_adaptation", + "webrtc_adaptation__video_adaptation", + "webrtc_video_coding__video_coding", + ], +} + +cc_library_static { + name: "webrtc_video__video_stream_encoder_create", + defaults: ["webrtc_defaults"], + srcs: ["api/video/video_stream_encoder_create.cc"], + host_supported: true, + static_libs: [ + "webrtc_task_queue__task_queue", + "webrtc_video__video_frame", + "webrtc_video_codecs__video_codecs_api", + "webrtc_adaptation__video_adaptation", + "webrtc_video__video_stream_encoder_impl", + ], +} + +cc_library_static { + name: "webrtc_video_codecs__builtin_video_decoder_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/video_codecs/builtin_video_decoder_factory.cc"], + host_supported: true, + static_libs: [ + "webrtc_video_codecs__video_codecs_api", + "webrtc_media__rtc_internal_video_codecs", + ], +} + +cc_library_static { + name: "webrtc_video__video", + defaults: ["webrtc_defaults"], + srcs: [ + "video/buffered_frame_decryptor.cc", + "video/call_stats.cc", + "video/call_stats2.cc", + "video/encoder_rtcp_feedback.cc", + "video/quality_limitation_reason_tracker.cc", + "video/quality_threshold.cc", + "video/receive_statistics_proxy.cc", + "video/receive_statistics_proxy2.cc", + "video/report_block_stats.cc", + "video/rtp_streams_synchronizer.cc", + "video/rtp_streams_synchronizer2.cc", + "video/rtp_video_stream_receiver.cc", + "video/rtp_video_stream_receiver2.cc", + "video/rtp_video_stream_receiver_frame_transformer_delegate.cc", + "video/send_delay_stats.cc", + "video/send_statistics_proxy.cc", + "video/stats_counter.cc", + "video/stream_synchronization.cc", + "video/transport_adapter.cc", + "video/video_quality_observer.cc", + "video/video_quality_observer2.cc", + "video/video_receive_stream.cc", + "video/video_receive_stream2.cc", + "video/video_send_stream.cc", + "video/video_send_stream_impl.cc", + "video/video_stream_decoder.cc", + "video/video_stream_decoder2.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_api__transport_api", + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__stringutils", + "webrtc_synchronization__mutex", + "webrtc_time__timestamp_extrapolator", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_api__rtp_parameters", + "webrtc_units__timestamp", + "webrtc_rtc_base__weak_ptr", + "webrtc_task_utils__pending_task_safety_flag", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_video__video_bitrate_allocator", + "webrtc_experiments__keyframe_interval_settings_experiment", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_rtc_base__rtc_numerics", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_experiments__alr_experiment", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_task_utils__repeating_task", + "webrtc_rtc_base__rate_limiter", + "webrtc_crypto__options", + "webrtc_media__rtc_h264_profile_id", + "webrtc_call__bitrate_allocator", + "webrtc_video__video_frame", + "webrtc_utility__utility", + "webrtc_video_coding__nack_module", + "webrtc_experiments__min_video_bitrate_experiment", + "webrtc_video__encoded_image", + "webrtc_deprecated__nack_module", + "webrtc_video__video_frame_i420", + "webrtc_rtp_rtcp__rtp_video_header", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_experiments__quality_scaling_experiment", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_video_processing__video_processing", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_call__rtp_interfaces", + "webrtc_call__video_stream_api", + "webrtc_call__call_interfaces", + "webrtc_media__rtc_media_base", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_rtp_rtcp__rtp_rtcp", + "webrtc_call__rtp_receiver", + "webrtc_pacing__pacing", + "webrtc_video_coding__video_coding_utility", + "webrtc_call__rtp_sender", + "webrtc_video_coding__video_coding", + "webrtc_video__frame_dumping_decoder", + "webrtc_video__video_stream_encoder_create", + ], +} + +cc_library_static { + name: "webrtc_call__call", + defaults: ["webrtc_defaults"], + srcs: [ + "call/call.cc", + "call/call_factory.cc", + "call/degraded_call.cc", + "call/flexfec_receive_stream_impl.cc", + "call/receive_time_calculator.cc", + ], + host_supported: true, + static_libs: [ + "webrtc_network__sent_packet", + "webrtc_api__transport_api", + "webrtc_rtc_base__checks", + "webrtc_modules__module_api", + "webrtc_synchronization__sequence_checker", + "webrtc_api__rtp_parameters", + "webrtc_units__time_delta", + "webrtc_task_utils__pending_task_safety_flag", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_system_wrappers__field_trial", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_system_wrappers__system_wrappers", + "webrtc_system_wrappers__metrics", + "webrtc_api__rtp_headers", + "webrtc_rtc_base__rate_limiter", + "webrtc_logging__rtc_stream_config", + "webrtc_logging__rtc_event_video", + "webrtc_call__simulated_network", + "webrtc_call__bitrate_allocator", + "webrtc_utility__utility", + "webrtc_logging__rtc_event_audio", + "webrtc_video_codecs__video_codecs_api", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_logging__rtc_event_rtp_rtcp", + "webrtc_call__rtp_interfaces", + "webrtc_call__video_stream_api", + "webrtc_call__call_interfaces", + "webrtc_call__fake_network", + "webrtc_rtp_rtcp__rtp_rtcp", + "webrtc_call__rtp_receiver", + "webrtc_pacing__pacing", + "webrtc_audio__audio", + "webrtc_adaptation__resource_adaptation", + "webrtc_congestion_controller__congestion_controller", + "webrtc_call__rtp_sender", + "webrtc_video_coding__video_coding", + "webrtc_video__video", + ], +} + +cc_library_static { + name: "webrtc_media__rtc_audio_video", + defaults: ["webrtc_defaults"], + srcs: [ + "media/engine/adm_helpers.cc", + "media/engine/payload_type_mapper.cc", + "media/engine/simulcast.cc", + "media/engine/unhandled_packets_buffer.cc", + "media/engine/webrtc_media_engine.cc", + "media/engine/webrtc_video_engine.cc", + "media/engine/webrtc_voice_engine.cc", + ":webrtc_video_capture__video_capture_internal_impl", + ], + host_supported: true, + cflags: ["-DHAVE_WEBRTC_VIDEO"], + static_libs: [ + "webrtc_media__rtc_constants", + "webrtc_api__transport_api", + "webrtc_transport__bitrate_settings", + "webrtc_base64__base64", + "webrtc_rtc_base__checks", + "webrtc_task_queue__task_queue", + "webrtc_rtc_base__stringutils", + "webrtc_synchronization__mutex", + "webrtc_video__video_bitrate_allocation", + "webrtc_api__rtp_parameters", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_experiments__field_trial_parser", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_experiments__normalize_simulcast_size_experiment", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_system_wrappers__metrics", + "webrtc_rtc_base__rtc_base", + "webrtc_rtc_base__audio_format_to_string", + "webrtc_video__video_frame", + "webrtc_experiments__min_video_bitrate_experiment", + "webrtc_video__video_frame_i420", + "webrtc_audio_processing__api", + "webrtc_common_video__common_video", + "webrtc_video_codecs__video_codecs_api", + "webrtc_api__media_stream_interface", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_audio_device__audio_device_impl", + "webrtc_audio_mixer__audio_mixer_impl", + "webrtc_aec_dump__null_aec_dump_factory", + "webrtc_call__video_stream_api", + "webrtc_call__call_interfaces", + "webrtc_media__rtc_media_base", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_video_coding__video_coding_utility", + "webrtc_video_codecs__rtc_software_fallback_wrappers", + "webrtc_video_coding__video_coding", + "webrtc_call__call", + ], +} + +cc_library_static { + name: "webrtc_api__create_peerconnection_factory", + defaults: ["webrtc_defaults"], + srcs: ["api/create_peerconnection_factory.cc"], + host_supported: true, + cflags: ["-DHAVE_WEBRTC_VIDEO"], + static_libs: [ + "webrtc_rtc_event_log__rtc_event_log_factory", + "webrtc_task_queue__default_task_queue_factory", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_rtc_base__rtc_base", + "webrtc_audio_processing__api", + "webrtc_video_codecs__video_codecs_api", + "webrtc_media__rtc_media_base", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_pc__peerconnection", + "webrtc_media__rtc_audio_video", + ], +} + +cc_library_static { + name: "libwebrtc", + defaults: ["webrtc_defaults"], + export_include_dirs: ["."], + whole_static_libs: [ + "webrtc_spl_sqrt_floor__spl_sqrt_floor", + "webrtc_fft__fft", + "webrtc_ooura__fft_size_256", + "webrtc_audio_coding__audio_network_adaptor_config", + "webrtc_audio_coding__pcm16b_c", + "webrtc_sigslot__sigslot", + "webrtc_network__sent_packet", + "webrtc_pc__media_protocol_names", + "webrtc_g722__g722_3p", + "webrtc_media__rtc_constants", + "webrtc_api__transport_api", + "webrtc_synchronization__yield", + "webrtc_g711__g711_3p", + "webrtc_audio_processing__audio_processing_statistics", + "webrtc_transport__bitrate_settings", + "webrtc_base64__base64", + "webrtc_audio_coding__g711_c", + "webrtc_ooura__fft_size_128", + "webrtc_rtc_base__checks", + "webrtc_audio_coding__isac_vad", + "webrtc_memory__aligned_malloc", + "webrtc_audio_coding__g722_c", + "webrtc_system_wrappers__cpu_features_linux", + "webrtc_generic_frame_descriptor__generic_frame_descriptor", + "webrtc_rtc_base__platform_thread_types", + "webrtc_modules__module_api", + "webrtc_task_queue__task_queue", + "webrtc_utility__pffft_wrapper", + "webrtc_utility__cascaded_biquad_filter", + "webrtc_synchronization__yield_policy", + "webrtc_rtc_base__stringutils", + "webrtc_rtc_base__criticalsection", + "webrtc_system__file_wrapper", + "webrtc_synchronization__mutex", + "webrtc_synchronization__rw_lock_wrapper", + "webrtc_time__timestamp_extrapolator", + "webrtc_utility__legacy_delay_estimator", + "webrtc_synchronization__sequence_checker", + "webrtc_video__video_bitrate_allocation", + "webrtc_api__rtp_parameters", + "webrtc_video__video_adaptation", + "webrtc_audio_processing__config", + "webrtc_api__audio_options_api", + "webrtc_units__data_size", + "webrtc_rtc_base__timeutils", + "webrtc_units__time_delta", + "webrtc_rtc_base__rtc_event", + "webrtc_units__timestamp", + "webrtc_units__frequency", + "webrtc_rtc_base__weak_ptr", + "webrtc_rtc_base__platform_thread", + "webrtc_task_utils__pending_task_safety_flag", + "webrtc_rtc_event_log__rtc_event_log", + "webrtc_rtc_base__logging", + "webrtc_units__data_rate", + "webrtc_system_wrappers__field_trial", + "webrtc_video_coding__chain_diff_calculator", + "webrtc_transport__network_control", + "webrtc_experiments__field_trial_parser", + "webrtc_logging__rtc_event_pacing", + "webrtc_transport__field_trial_based_config", + "webrtc_video_coding__frame_dependencies_calculator", + "webrtc_rtc_base__rtc_task_queue_libevent", + "webrtc_api__rtc_error", + "webrtc_rtc_event_log__rtc_event_log_factory", + "webrtc_goog_cc__link_capacity_estimator", + "webrtc_video__video_bitrate_allocator", + "webrtc_experiments__keyframe_interval_settings_experiment", + "webrtc_rtc_base__rtc_task_queue", + "webrtc_task_queue__default_task_queue_factory", + "webrtc_rtc_base__rtc_base_approved", + "webrtc_api__rtc_event_log_output_file", + "webrtc_experiments__jitter_upper_bound_experiment", + "webrtc_agc2__biquad_filter", + "webrtc_rtc_base__rtc_numerics", + "webrtc_experiments__cpu_speed_experiment", + "webrtc_system_wrappers__system_wrappers", + "webrtc_video__video_rtp_headers", + "webrtc_opus__audio_encoder_opus_config", + "webrtc_audio__aec3_config", + "webrtc_audio_coding__webrtc_opus_wrapper", + "webrtc_agc2__common", + "webrtc_experiments__alr_experiment", + "webrtc_experiments__quality_scaler_settings", + "webrtc_experiments__normalize_simulcast_size_experiment", + "webrtc_audio_codecs__audio_codecs_api", + "webrtc_experiments__quality_rampup_experiment", + "webrtc_stats__rtc_stats", + "webrtc_system_wrappers__metrics", + "webrtc_experiments__rtt_mult_experiment", + "webrtc_rnn_vad__rnn_vad", + "webrtc_rtc_base__rtc_base", + "webrtc_common_audio__common_audio_cc", + "webrtc_pacing__interval_budget", + "webrtc_common_audio__common_audio_c", + "webrtc_aecm__aecm_core", + "webrtc_video_processing__video_processing_sse2", + "webrtc_agc2__gain_applier", + "webrtc_task_utils__repeating_task", + "webrtc_rtc_base__audio_format_to_string", + "webrtc_memory__fifo_buffer", + "webrtc_api__rtp_headers", + "webrtc_rtc_base__rate_limiter", + "webrtc_audio_coding__audio_coding_opus_common", + "webrtc_logging__rtc_stream_config", + "webrtc_audio_coding__legacy_encoded_audio_frame", + "webrtc_audio_coding__webrtc_multiopus", + "webrtc_api__rtp_packet_info", + "webrtc_crypto__options", + "webrtc_media__rtc_h264_profile_id", + "webrtc_audio_coding__webrtc_cng", + "webrtc_common_audio__common_audio_sse2", + "webrtc_logging__rtc_event_video", + "webrtc_common_audio__common_audio", + "webrtc_call__simulated_network", + "webrtc_call__bitrate_allocator", + "webrtc_agc2__rnn_vad_with_level", + "webrtc_audio_coding__g722", + "webrtc_audio_device__audio_device_buffer", + "webrtc_audio__audio_frame_api", + "webrtc_goog_cc__alr_detector", + "webrtc_video__video_frame", + "webrtc_audio_processing__apm_logging", + "webrtc_audio_coding__ilbc_c", + "webrtc_opus__audio_encoder_multiopus", + "webrtc_utility__utility", + "webrtc_video_coding__nack_module", + "webrtc_g722__audio_encoder_g722", + "webrtc_audio_coding__isac_c", + "webrtc_audio_coding__g711", + "webrtc_opus__audio_decoder_multiopus", + "webrtc_common_audio__fir_filter_factory", + "webrtc_audio_coding__ilbc", + "webrtc_audio_coding__audio_encoder_cng", + "webrtc_agc2__fixed_digital", + "webrtc_logging__rtc_event_audio", + "webrtc_experiments__min_video_bitrate_experiment", + "webrtc_video__encoded_image", + "webrtc_agc__legacy_agc", + "webrtc_g722__audio_decoder_g722", + "webrtc_audio_coding__pcm16b", + "webrtc_audio_coding__red", + "webrtc_utility__audio_frame_operations", + "webrtc_audio_coding__isac", + "webrtc_deprecated__nack_module", + "webrtc_video__video_frame_i420", + "webrtc_isac__audio_encoder_isac_float", + "webrtc_audio_processing__api", + "webrtc_transient__transient_suppressor_impl", + "webrtc_ilbc__audio_encoder_ilbc", + "webrtc_rtp_rtcp__rtp_video_header", + "webrtc_agc2__noise_level_estimator", + "webrtc_audio_processing__audio_buffer", + "webrtc_isac__audio_decoder_isac_float", + "webrtc_vad__vad", + "webrtc_audio_device__audio_device_generic", + "webrtc_audio_processing__high_pass_filter", + "webrtc_ns__ns", + "webrtc_common_video__common_video", + "webrtc_g711__audio_encoder_g711", + "webrtc_agc2__adaptive_digital", + "webrtc_L16__audio_encoder_L16", + "webrtc_audio_processing__audio_frame_proxies", + "webrtc_ilbc__audio_decoder_ilbc", + "webrtc_g711__audio_decoder_g711", + "webrtc_audio_processing__optionally_built_submodule_creators", + "webrtc_video__video_frame_i010", + "webrtc_L16__audio_decoder_L16", + "webrtc_video_codecs__video_codecs_api", + "webrtc_audio_coding__audio_network_adaptor", + "webrtc_agc__level_estimation", + "webrtc_api__media_stream_interface", + "webrtc_audio_mixer__audio_frame_manipulator", + "webrtc_experiments__quality_scaling_experiment", + "webrtc_audio_coding__webrtc_opus", + "webrtc_audio_processing__voice_detection", + "webrtc_media__rtc_vp9_profile", + "webrtc_aec3__aec3", + "webrtc_opus__audio_decoder_opus", + "webrtc_experiments__rate_control_settings", + "webrtc_video_coding__video_codec_interface", + "webrtc_av1__libaom_av1_encoder", + "webrtc_audio_device__audio_device_impl", + "webrtc_av1__libaom_av1_decoder", + "webrtc_audio_coding__neteq", + "webrtc_goog_cc__pushback_controller", + "webrtc_video_processing__video_processing", + "webrtc_rtp_rtcp__rtp_rtcp_format", + "webrtc_experiments__balanced_degradation_settings", + "webrtc_video_coding__encoded_frame", + "webrtc_experiments__stable_target_rate_experiment", + "webrtc_audio_mixer__audio_mixer_impl", + "webrtc_agc2__level_estimation_agc", + "webrtc_remote_bitrate_estimator__remote_bitrate_estimator", + "webrtc_agc__agc", + "webrtc_opus__audio_encoder_opus", + "webrtc_rtp__transport_feedback", + "webrtc_audio_codecs__builtin_audio_decoder_factory", + "webrtc_audio_coding__audio_coding", + "webrtc_audio__aec3_factory", + "webrtc_logging__rtc_event_rtp_rtcp", + "webrtc_video_coding__webrtc_vp9_helpers", + "webrtc_aec_dump__null_aec_dump_factory", + "webrtc_video__encoded_frame", + "webrtc_logging__rtc_event_bwe", + "webrtc_audio_codecs__builtin_audio_encoder_factory", + "webrtc_audio_processing__audio_processing", + "webrtc_goog_cc__probe_controller", + "webrtc_goog_cc__loss_based_controller", + "webrtc_goog_cc__estimators", + "webrtc_call__rtp_interfaces", + "webrtc_call__video_stream_api", + "webrtc_call__call_interfaces", + "webrtc_media__rtc_media_base", + "webrtc_video_capture__video_capture_module", + "webrtc_call__fake_network", + "webrtc_api__libjingle_peerconnection_api", + "webrtc_logging__ice_log", + "webrtc_call__bitrate_configurator", + "webrtc_rtp_rtcp__rtp_rtcp", + "webrtc_call__rtp_receiver", + "webrtc_p2p__rtc_p2p", + "webrtc_pacing__pacing", + "webrtc_media__rtc_data", + "webrtc_goog_cc__delay_based_bwe", + "webrtc_video_coding__video_coding_utility", + "webrtc_audio__audio", + "webrtc_api__ice_transport_factory", + "webrtc_adaptation__resource_adaptation", + "webrtc_rtp__control_handler", + "webrtc_video_coding__webrtc_vp8_temporal_layers", + "webrtc_congestion_controller__congestion_controller", + "webrtc_video_coding__webrtc_multiplex", + "webrtc_video__builtin_video_bitrate_allocator_factory", + "webrtc_video_codecs__rtc_software_fallback_wrappers", + "webrtc_goog_cc__goog_cc", + "webrtc_video_coding__webrtc_vp9", + "webrtc_video_coding__webrtc_h264", + "webrtc_media__rtc_simulcast_encoder_adapter", + "webrtc_adaptation__video_adaptation", + "webrtc_transport__goog_cc", + "webrtc_video_codecs__vp8_temporal_layers_factory", + "webrtc_test__fake_video_codecs", + "webrtc_media__rtc_encoder_simulcast_proxy", + "webrtc_pc__rtc_pc_base", + "webrtc_call__rtp_sender", + "webrtc_video_coding__video_coding", + "webrtc_video_coding__webrtc_vp8", + "webrtc_media__rtc_internal_video_codecs", + "webrtc_video_codecs__builtin_video_encoder_factory", + "webrtc_video__frame_dumping_decoder", + "webrtc_pc__peerconnection", + "webrtc_video__video_stream_encoder_impl", + "webrtc_video__video_stream_encoder_create", + "webrtc_video_codecs__builtin_video_decoder_factory", + "webrtc_video__video", + "webrtc_call__call", + "webrtc_media__rtc_audio_video", + "webrtc_api__create_peerconnection_factory", + "libpffft", + "rnnoise_rnn_vad", + "usrsctplib", ], srcs: [ - "modules/video_coding/codecs/vp9/vp9.cc", - "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc", - "modules/video_coding/codecs/vp9/vp9_impl.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_field_trial", - "webrtc_video_rtp_headers", - "webrtc_rtc_base", - "webrtc_video_frame", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_rtc_vp9_profile", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_rtp_rtcp_format", - "webrtc_vp9_helpers", - "webrtc_rtc_media_base", - "webrtc_video_coding_utility", - "webrtc_video_frame_i010", - "libvpx", + ":webrtc_rtp__dependency_descriptor", + ":webrtc_audio_processing__rms_level", + ":webrtc_rtc_base__rtc_operations_chain", + ":webrtc_av1__scalable_video_controller", + ":webrtc_adaptation__resource_adaptation_api", + ":webrtc_neteq__tick_timer", + ":webrtc_transport__stun_types", + ":webrtc_neteq__neteq_api", + ":webrtc_video__video_frame_metadata", + ":webrtc_audio_processing__aec_dump_interface", + ":webrtc_neteq__default_neteq_controller_factory", + ":webrtc_audio_coding__default_neteq_factory", + ":webrtc_video_capture__video_capture_internal_impl", ], } - -cc_library_static { - name: "webrtc_h264", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/codecs/h264/h264.cc", - "modules/video_coding/codecs/h264/h264_color_space.cc", - "modules/video_coding/codecs/h264/h264_decoder_impl.cc", - "modules/video_coding/codecs/h264/h264_encoder_impl.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_field_trial", - "webrtc_video_rtp_headers", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_rtc_h264_profile_id", - "webrtc_video_frame", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_rtc_media_base", - "webrtc_video_coding_utility", - ], -} - - -cc_library_static { - name: "webrtc_rtc_simulcast_encoder_adapter", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/engine/simulcast_encoder_adapter.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_sequence_checker", - "webrtc_field_trial", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_video_frame", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_video_codecs_api", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_video_stream_api", - "webrtc_rtc_media_base", - "webrtc_video_coding_utility", - "webrtc_rtc_software_fallback_wrappers", - ], -} - - -cc_library_static { - name: "webrtc_video_adaptation", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "video/adaptation/encode_usage_resource.cc", - "video/adaptation/overuse_frame_detector.cc", - "video/adaptation/quality_scaler_resource.cc", - "video/adaptation/video_stream_encoder_resource_manager.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_adaptation_counters", - "webrtc_task_queue", - "webrtc_sequence_checker", - "webrtc_rtp_parameters", - "webrtc_timeutils", - "webrtc_rtc_event", - "webrtc_logging", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_repeating_task", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_system_wrappers", - "webrtc_quality_scaler_settings", - "webrtc_quality_rampup_experiment", - "webrtc_video_frame", - "webrtc_video_codecs_api", - "webrtc_balanced_degradation_settings", - "webrtc_video_coding_utility", - "webrtc_resource_adaptation", - ], -} - - -cc_library_static { - name: "webrtc_transport_goog_cc", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/transport/goog_cc_factory.cc", - ], - static_libs: [ - "webrtc_network_control", - "webrtc_congestion_controller_goog_cc", - ], -} - - -cc_library_static { - name: "webrtc_vp8_temporal_layers_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video_codecs/vp8_temporal_layers_factory.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_codecs_api", - "webrtc_video_coding_utility", - "webrtc_vp8_temporal_layers", - ], -} - - -cc_library_static { - name: "webrtc_fake_video_codecs", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "test/configurable_frame_size_encoder.cc", - "test/fake_decoder.cc", - "test/fake_encoder.cc", - "test/fake_vp8_decoder.cc", - "test/fake_vp8_encoder.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_task_queue", - "webrtc_criticalsection", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_timeutils", - "webrtc_rtc_task_queue", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_video_coding_utility", - "webrtc_vp8_temporal_layers_factory", - ], -} - - -cc_library_static { - name: "webrtc_rtc_encoder_simulcast_proxy", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/engine/encoder_simulcast_proxy.cc", - ], - static_libs: [ - "webrtc_video_bitrate_allocation", - "webrtc_video_rtp_headers", - "webrtc_video_frame", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_rtc_simulcast_encoder_adapter", - ], -} - - -cc_library_static { - name: "webrtc_rtp_sender", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/rtp_payload_params.cc", - "call/rtp_transport_controller_send.cc", - "call/rtp_video_sender.cc", - ], - static_libs: [ - "webrtc_transport_api", - "webrtc_rtc_base_checks", - "webrtc_rtp_parameters", - "webrtc_time_delta", - "webrtc_timestamp", - "webrtc_rtc_event_log", - "webrtc_data_rate", - "webrtc_network_control", - "webrtc_field_trial_based_config", - "webrtc_frame_dependencies_calculator", - "webrtc_repeating_task", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_rtc_base", - "webrtc_rate_limiter", - "webrtc_video_frame", - "webrtc_utility", - "webrtc_rtp_video_header", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_rtp_rtcp_format", - "webrtc_transport_feedback", - "webrtc_rtc_event_bwe", - "webrtc_rtp_interfaces", - "webrtc_bitrate_configurator", - "webrtc_rtp_rtcp", - "webrtc_pacing", - "webrtc_control_handler", - "webrtc_congestion_controller", - "webrtc_transport_goog_cc", - ], -} - - -cc_library_static { - name: "webrtc_video_coding", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/codec_timer.cc", - "modules/video_coding/decoder_database.cc", - "modules/video_coding/fec_controller_default.cc", - "modules/video_coding/frame_buffer2.cc", - "modules/video_coding/frame_object.cc", - "modules/video_coding/generic_decoder.cc", - "modules/video_coding/h264_sprop_parameter_sets.cc", - "modules/video_coding/h264_sps_pps_tracker.cc", - "modules/video_coding/inter_frame_delay.cc", - "modules/video_coding/jitter_estimator.cc", - "modules/video_coding/loss_notification_controller.cc", - "modules/video_coding/media_opt_util.cc", - "modules/video_coding/packet_buffer.cc", - "modules/video_coding/rtp_frame_reference_finder.cc", - "modules/video_coding/rtt_filter.cc", - "modules/video_coding/timestamp_map.cc", - "modules/video_coding/timing.cc", - "modules/video_coding/unique_timestamp_counter.cc", - "modules/video_coding/video_codec_initializer.cc", - "modules/video_coding/video_receiver2.cc", - ], - static_libs: [ - "webrtc_base64", - "webrtc_rtc_base_checks", - "webrtc_video_adaptation_counters", - "webrtc_module_api", - "webrtc_timestamp_extrapolator", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_time_delta", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_repeating_task", - "webrtc_video_bitrate_allocator", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_jitter_upper_bound_experiment", - "webrtc_rtc_numerics", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_alr_experiment", - "webrtc_rtt_mult_experiment", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_rtp_headers", - "webrtc_rtp_packet_info", - "webrtc_video_frame", - "webrtc_min_video_bitrate_experiment", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_rtp_video_header", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_rtp_rtcp_format", - "webrtc_encoded_frame", - "webrtc_vp9_helpers", - "webrtc_api_video_encoded_frame", - "webrtc_rtp_rtcp", - "webrtc_video_coding_utility", - "webrtc_builtin_video_bitrate_allocator_factory", - ], -} - - -cc_library_static { - name: "webrtc_vp8", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "modules/video_coding/codecs/vp8/libvpx_interface.cc", - "modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc", - "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_cpu_speed_experiment", - "webrtc_video_rtp_headers", - "webrtc_system_wrappers_metrics", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_video_coding_utility", - "webrtc_vp8_temporal_layers", - "webrtc_vp8_temporal_layers_factory", - "libvpx", - "libyuv", - ], -} - - -cc_library_static { - name: "webrtc_rtc_internal_video_codecs", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/engine/fake_video_codec_factory.cc", - "media/engine/internal_decoder_factory.cc", - "media/engine/internal_encoder_factory.cc", - "media/engine/multiplex_codec_factory.cc", - ], - static_libs: [ - "webrtc_rtc_constants", - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_video_bitrate_allocation", - "webrtc_rtc_base_approved", - "webrtc_video_rtp_headers", - "webrtc_rtc_h264_profile_id", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_libaom_av1_encoder", - "webrtc_libaom_av1_decoder", - "webrtc_video_stream_api", - "webrtc_call_interfaces", - "webrtc_rtc_media_base", - "webrtc_multiplex", - "webrtc_rtc_software_fallback_wrappers", - "webrtc_vp9", - "webrtc_h264", - "webrtc_rtc_simulcast_encoder_adapter", - "webrtc_fake_video_codecs", - "webrtc_rtc_encoder_simulcast_proxy", - "webrtc_vp8", - ], -} - - -cc_library_static { - name: "webrtc_builtin_video_encoder_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video_codecs/builtin_video_encoder_factory.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_codecs_api", - "webrtc_rtc_media_base", - "webrtc_rtc_encoder_simulcast_proxy", - "webrtc_rtc_internal_video_codecs", - ], -} - - -cc_library_static { - name: "webrtc_frame_dumping_decoder", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "video/frame_dumping_decoder.cc", - ], - static_libs: [ - "webrtc_file_wrapper", - "webrtc_rtc_base_approved", - "webrtc_encoded_image", - "webrtc_video_codecs_api", - "webrtc_video_codec_interface", - "webrtc_api_video_encoded_frame", - "webrtc_video_coding_utility", - "webrtc_video_coding", - ], -} - - -cc_library_static { - name: "webrtc_video_stream_encoder_impl", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "video/encoder_bitrate_adjuster.cc", - "video/encoder_overshoot_detector.cc", - "video/frame_encode_metadata_writer.cc", - "video/video_source_sink_controller.cc", - "video/video_stream_encoder.cc", - ], - static_libs: [ - "webrtc_rtc_base_checks", - "webrtc_video_adaptation_counters", - "webrtc_task_queue", - "webrtc_criticalsection", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_rtp_parameters", - "webrtc_timeutils", - "webrtc_rtc_event", - "webrtc_logging", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_repeating_task", - "webrtc_video_bitrate_allocator", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_alr_experiment", - "webrtc_quality_scaler_settings", - "webrtc_quality_rampup_experiment", - "webrtc_video_frame", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_quality_scaling_experiment", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_balanced_degradation_settings", - "webrtc_vp9_helpers", - "webrtc_video_coding_utility", - "webrtc_resource_adaptation", - "webrtc_video_adaptation", - "webrtc_video_coding", - ], -} - - -cc_library_static { - name: "webrtc_video_stream_encoder_create", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video/video_stream_encoder_create.cc", - ], - static_libs: [ - "webrtc_task_queue", - "webrtc_video_frame", - "webrtc_video_codecs_api", - "webrtc_video_adaptation", - "webrtc_video_stream_encoder_impl", - ], -} - - -cc_library_static { - name: "webrtc_builtin_video_decoder_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/video_codecs/builtin_video_decoder_factory.cc", - ], - static_libs: [ - "webrtc_video_codecs_api", - "webrtc_rtc_internal_video_codecs", - ], -} - - -cc_library_static { - name: "webrtc_video", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "video/buffered_frame_decryptor.cc", - "video/call_stats.cc", - "video/call_stats2.cc", - "video/encoder_rtcp_feedback.cc", - "video/quality_limitation_reason_tracker.cc", - "video/quality_threshold.cc", - "video/receive_statistics_proxy.cc", - "video/receive_statistics_proxy2.cc", - "video/report_block_stats.cc", - "video/rtp_streams_synchronizer.cc", - "video/rtp_streams_synchronizer2.cc", - "video/rtp_video_stream_receiver.cc", - "video/rtp_video_stream_receiver_frame_transformer_delegate.cc", - "video/send_delay_stats.cc", - "video/send_statistics_proxy.cc", - "video/stats_counter.cc", - "video/stream_synchronization.cc", - "video/transport_adapter.cc", - "video/video_quality_observer.cc", - "video/video_quality_observer2.cc", - "video/video_receive_stream.cc", - "video/video_receive_stream2.cc", - "video/video_send_stream.cc", - "video/video_send_stream_impl.cc", - "video/video_stream_decoder.cc", - "video/video_stream_decoder2.cc", - ], - static_libs: [ - "webrtc_transport_api", - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_task_queue", - "webrtc_stringutils", - "webrtc_timestamp_extrapolator", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_rtp_parameters", - "webrtc_timestamp", - "webrtc_weak_ptr", - "webrtc_pending_task_safety_flag", - "webrtc_rtc_event_log", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_repeating_task", - "webrtc_video_bitrate_allocator", - "webrtc_keyframe_interval_settings_experiment", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_rtc_numerics", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_alr_experiment", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_rate_limiter", - "webrtc_api_crypto_options", - "webrtc_rtc_h264_profile_id", - "webrtc_bitrate_allocator", - "webrtc_video_frame", - "webrtc_utility", - "webrtc_nack_module", - "webrtc_min_video_bitrate_experiment", - "webrtc_encoded_image", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_media_transport_interface", - "webrtc_rtp_video_header", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_quality_scaling_experiment", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_video_processing", - "webrtc_rtp_rtcp_format", - "webrtc_remote_bitrate_estimator", - "webrtc_rtp_interfaces", - "webrtc_video_stream_api", - "webrtc_call_interfaces", - "webrtc_rtc_media_base", - "webrtc_libjingle_peerconnection_api", - "webrtc_rtp_rtcp", - "webrtc_rtp_receiver", - "webrtc_pacing", - "webrtc_video_coding_utility", - "webrtc_rtp_sender", - "webrtc_video_coding", - "webrtc_frame_dumping_decoder", - "webrtc_video_stream_encoder_create", - ], -} - - -cc_library_static { - name: "webrtc_call", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "call/call.cc", - "call/call_factory.cc", - "call/degraded_call.cc", - "call/flexfec_receive_stream_impl.cc", - "call/receive_time_calculator.cc", - ], - static_libs: [ - "webrtc_sent_packet", - "webrtc_transport_api", - "webrtc_rtc_base_checks", - "webrtc_module_api", - "webrtc_rw_lock_wrapper", - "webrtc_sequence_checker", - "webrtc_rtp_parameters", - "webrtc_time_delta", - "webrtc_rtc_event_log", - "webrtc_field_trial", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_rtc_task_queue", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_system_wrappers_metrics", - "webrtc_rtp_headers", - "webrtc_rate_limiter", - "webrtc_rtc_stream_config", - "webrtc_rtc_event_video", - "webrtc_simulated_network", - "webrtc_bitrate_allocator", - "webrtc_utility", - "webrtc_rtc_event_audio", - "webrtc_video_codecs_api", - "webrtc_rtp_rtcp_format", - "webrtc_rtc_event_rtp_rtcp", - "webrtc_rtp_interfaces", - "webrtc_video_stream_api", - "webrtc_call_interfaces", - "webrtc_fake_network", - "webrtc_rtp_rtcp", - "webrtc_rtp_receiver", - "webrtc_pacing", - "webrtc_audio", - "webrtc_congestion_controller", - "webrtc_rtp_sender", - "webrtc_video_coding", - "webrtc_video", - ], -} - - -cc_library_static { - name: "webrtc_rtc_audio_video", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/engine/adm_helpers.cc", - "media/engine/payload_type_mapper.cc", - "media/engine/simulcast.cc", - "media/engine/unhandled_packets_buffer.cc", - "media/engine/webrtc_media_engine.cc", - "media/engine/webrtc_video_engine.cc", - "media/engine/webrtc_voice_engine.cc", - ":webrtc_video_capture_internal_impl", - ], - cflags: [ - "-DHAVE_WEBRTC_VIDEO", - ], - static_libs: [ - "webrtc_rtc_constants", - "webrtc_transport_api", - "webrtc_bitrate_settings", - "webrtc_base64", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_stringutils", - "webrtc_video_bitrate_allocation", - "webrtc_rtp_parameters", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_rtc_task_queue", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_normalize_simulcast_size_experiment", - "webrtc_audio_codecs_api", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_audio_format_to_string", - "webrtc_video_frame", - "webrtc_min_video_bitrate_experiment", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_media_transport_interface", - "webrtc_audio_processing_api", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_media_stream_interface", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_audio_device_impl", - "webrtc_audio_mixer_impl", - "webrtc_null_aec_dump_factory", - "webrtc_video_stream_api", - "webrtc_call_interfaces", - "webrtc_rtc_media_base", - "webrtc_libjingle_peerconnection_api", - "webrtc_video_coding_utility", - "webrtc_rtc_software_fallback_wrappers", - "webrtc_video_coding", - "webrtc_call", - ], -} - -cc_library_static { - name: "webrtc_rtc_data", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "media/sctp/sctp_transport.cc", - ], - static_libs: [ - "webrtc_sigslot", - "webrtc_transport_api", - "webrtc_rtc_base_approved", - "webrtc_system_wrappers", - "webrtc_rtc_base", - "webrtc_rtc_media_base", - "webrtc_rtc_p2p", - "usrsctplib", - ], -} - - -cc_library_static { - name: "webrtc_rtc_pc_base", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "pc/channel.cc", - "pc/channel_manager.cc", - "pc/composite_data_channel_transport.cc", - "pc/composite_rtp_transport.cc", - "pc/datagram_rtp_transport.cc", - "pc/dtls_srtp_transport.cc", - "pc/dtls_transport.cc", - "pc/external_hmac.cc", - "pc/ice_transport.cc", - "pc/jsep_transport.cc", - "pc/jsep_transport_controller.cc", - "pc/media_session.cc", - "pc/rtcp_mux_filter.cc", - "pc/rtp_media_utils.cc", - "pc/rtp_transport.cc", - "pc/sctp_data_channel_transport.cc", - "pc/sctp_transport.cc", - "pc/sctp_utils.cc", - "pc/session_description.cc", - "pc/simulcast_description.cc", - "pc/srtp_filter.cc", - "pc/srtp_session.cc", - "pc/srtp_transport.cc", - "pc/transport_stats.cc", - ], - cflags: [ - "-DHAVE_CONFIG_H", - "-DOPENSSL", - "-DHAVE_STDLIB_H", - "-DHAVE_STRING_H", - "-DHAVE_STDINT_H", - "-DHAVE_INTTYPES_H", - "-DHAVE_INT16_T", - "-DHAVE_INT32_T", - "-DHAVE_INT8_T", - "-DHAVE_UINT16_T", - "-DHAVE_UINT32_T", - "-DHAVE_UINT64_T", - "-DHAVE_UINT8_T", - "-DHAVE_ARPA_INET_H", - "-DHAVE_NETINET_IN_H", - "-DHAVE_SYS_TYPES_H", - "-DHAVE_UNISTD_H", - ], - static_libs: [ - "webrtc_sigslot", - "webrtc_media_protocol_names", - "webrtc_base64", - "webrtc_rtc_base_checks", - "webrtc_stringutils", - "webrtc_file_wrapper", - "webrtc_rtp_parameters", - "webrtc_audio_options_api", - "webrtc_rtc_event_log", - "webrtc_field_trial", - "webrtc_rtc_error", - "webrtc_rtc_task_queue", - "webrtc_video_rtp_headers", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_rtp_headers", - "webrtc_api_crypto_options", - "webrtc_rtc_h264_profile_id", - "webrtc_video_frame", - "webrtc_media_transport_interface", - "webrtc_common_video", - "webrtc_rtp_rtcp_format", - "webrtc_rtp_interfaces", - "webrtc_call_interfaces", - "webrtc_rtc_media_base", - "webrtc_libjingle_peerconnection_api", - "webrtc_ice_log", - "webrtc_rtp_rtcp", - "webrtc_rtp_receiver", - "webrtc_rtc_p2p", - "webrtc_rtc_data", - "webrtc_ice_transport_factory", - "webrtc_builtin_video_bitrate_allocator_factory", - "libsrtp2", - ], -} - - -cc_library_static { - name: "webrtc_peerconnection", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "pc/audio_rtp_receiver.cc", - "pc/audio_track.cc", - "pc/data_channel.cc", - "pc/data_channel_controller.cc", - "pc/dtmf_sender.cc", - "pc/ice_server_parsing.cc", - "pc/jitter_buffer_delay.cc", - "pc/jsep_ice_candidate.cc", - "pc/jsep_session_description.cc", - "pc/local_audio_source.cc", - "pc/media_stream.cc", - "pc/media_stream_observer.cc", - "pc/peer_connection.cc", - "pc/peer_connection_factory.cc", - "pc/remote_audio_source.cc", - "pc/rtc_stats_collector.cc", - "pc/rtc_stats_traversal.cc", - "pc/rtp_parameters_conversion.cc", - "pc/rtp_receiver.cc", - "pc/rtp_sender.cc", - "pc/rtp_transceiver.cc", - "pc/sdp_serializer.cc", - "pc/sdp_utils.cc", - "pc/stats_collector.cc", - "pc/track_media_info_map.cc", - "pc/video_rtp_receiver.cc", - "pc/video_rtp_track_source.cc", - "pc/video_track.cc", - "pc/video_track_source.cc", - "pc/webrtc_sdp.cc", - "pc/webrtc_session_description_factory.cc", - ":webrtc_rtc_operations_chain", - ], - static_libs: [ - "webrtc_sigslot", - "webrtc_base64", - "webrtc_rtc_base_checks", - "webrtc_task_queue", - "webrtc_file_wrapper", - "webrtc_rtp_parameters", - "webrtc_audio_options_api", - "webrtc_weak_ptr", - "webrtc_rtc_event_log", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_field_trial_parser", - "webrtc_field_trial_based_config", - "webrtc_rtc_error", - "webrtc_rtc_base_approved", - "webrtc_rtc_event_log_output_file", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_video_frame", - "webrtc_media_transport_interface", - "webrtc_common_video", - "webrtc_video_codecs_api", - "webrtc_media_stream_interface", - "webrtc_rtp_rtcp_format", - "webrtc_call_interfaces", - "webrtc_rtc_media_base", - "webrtc_libjingle_peerconnection_api", - "webrtc_ice_log", - "webrtc_rtc_p2p", - "webrtc_rtc_data", - "webrtc_ice_transport_factory", - "webrtc_builtin_video_bitrate_allocator_factory", - "webrtc_rtc_pc_base", - ], -} - - -cc_library_static { - name: "webrtc_create_peerconnection_factory", - defaults: [ - "webrtc_defaults", - ], - srcs: [ - "api/create_peerconnection_factory.cc", - ], - cflags: [ - "-DHAVE_WEBRTC_VIDEO", - ], - static_libs: [ - "webrtc_rtc_event_log_factory", - "webrtc_default_task_queue_factory", - "webrtc_rtc_base_approved", - "webrtc_audio_codecs_api", - "webrtc_rtc_base", - "webrtc_audio_processing_api", - "webrtc_video_codecs_api", - "webrtc_rtc_media_base", - "webrtc_libjingle_peerconnection_api", - "webrtc_peerconnection", - "webrtc_rtc_audio_video", - ], -} - -cc_library_static { - name: "libwebrtc", - defaults: [ - "webrtc_defaults", - ], - whole_static_libs: [ - "webrtc_spl_sqrt_floor", - "webrtc_fft", - "webrtc_ooura_fft_size_256", - "webrtc_audio_network_adaptor_config", - "webrtc_pcm16b_c", - "webrtc_sigslot", - "webrtc_sent_packet", - "webrtc_media_protocol_names", - "webrtc_g722_3p", - "webrtc_rtc_constants", - "webrtc_transport_api", - "webrtc_platform_thread_types", - "webrtc_g711_3p", - "webrtc_audio_processing_statistics", - "webrtc_bitrate_settings", - "webrtc_base64", - "webrtc_g711_c", - "webrtc_rtc_base_checks", - "webrtc_isac_vad", - "webrtc_aligned_malloc", - "webrtc_video_adaptation_counters", - "webrtc_g722_c", - "webrtc_cpu_features_linux", - "webrtc_module_api", - "webrtc_task_queue", - "webrtc_cascaded_biquad_filter", - "webrtc_yield_policy", - "webrtc_stringutils", - "webrtc_criticalsection", - "webrtc_file_wrapper", - "webrtc_rw_lock_wrapper", - "webrtc_timestamp_extrapolator", - "webrtc_legacy_delay_estimator", - "webrtc_sequence_checker", - "webrtc_video_bitrate_allocation", - "webrtc_rtp_parameters", - "webrtc_audio_processing_config", - "webrtc_generic_frame_descriptor", - "webrtc_audio_options_api", - "webrtc_data_size", - "webrtc_audio_interfaces", - "webrtc_timeutils", - "webrtc_time_delta", - "webrtc_rtc_event", - "webrtc_timestamp", - "webrtc_frequency", - "webrtc_weak_ptr", - "webrtc_platform_thread", - "webrtc_pending_task_safety_flag", - "webrtc_rtc_event_log", - "webrtc_logging", - "webrtc_data_rate", - "webrtc_field_trial", - "webrtc_network_control", - "webrtc_field_trial_parser", - "webrtc_rtc_event_pacing", - "webrtc_field_trial_based_config", - "webrtc_frame_dependencies_calculator", - "webrtc_rtc_task_queue_libevent", - "webrtc_rtc_error", - "webrtc_repeating_task", - "webrtc_rtc_event_log_factory", - "webrtc_link_capacity_estimator", - "webrtc_video_bitrate_allocator", - "webrtc_keyframe_interval_settings_experiment", - "webrtc_rtc_task_queue", - "webrtc_default_task_queue_factory", - "webrtc_rtc_base_approved", - "webrtc_rtc_event_log_output_file", - "webrtc_jitter_upper_bound_experiment", - "webrtc_biquad_filter", - "webrtc_rtc_numerics", - "webrtc_cpu_speed_experiment", - "webrtc_system_wrappers", - "webrtc_video_rtp_headers", - "webrtc_audio_encoder_opus_config", - "webrtc_aec3_config", - "webrtc_opus_wrapper", - "webrtc_agc2_common", - "webrtc_alr_experiment", - "webrtc_quality_scaler_settings", - "webrtc_normalize_simulcast_size_experiment", - "webrtc_audio_codecs_api", - "webrtc_rtt_mult_experiment", - "webrtc_quality_rampup_experiment", - "webrtc_rtc_stats", - "webrtc_system_wrappers_metrics", - "webrtc_rtc_base", - "webrtc_common_audio_cc", - "webrtc_interval_budget", - "webrtc_common_audio_c", - "webrtc_aecm_core", - "webrtc_video_processing_sse2", - "webrtc_gain_applier", - "webrtc_audio_format_to_string", - "webrtc_fifo_buffer", - "webrtc_rtp_headers", - "webrtc_rate_limiter", - "webrtc_audio_coding_opus_common", - "webrtc_rtc_stream_config", - "webrtc_legacy_encoded_audio_frame", - "webrtc_multiopus", - "webrtc_rtp_packet_info", - "webrtc_api_crypto_options", - "webrtc_rtc_h264_profile_id", - "webrtc_cng", - "webrtc_common_audio_sse2", - "webrtc_rtc_event_video", - "webrtc_common_audio", - "webrtc_simulated_network", - "webrtc_bitrate_allocator", - "webrtc_g722", - "webrtc_audio_device_buffer", - "webrtc_audio_frame_api", - "webrtc_alr_detector", - "webrtc_video_frame", - "webrtc_apm_logging", - "webrtc_ilbc_c", - "webrtc_audio_encoder_multiopus", - "webrtc_utility", - "webrtc_nack_module", - "webrtc_audio_encoder_g722", - "webrtc_audio_coding_isac_c", - "webrtc_g711", - "webrtc_audio_decoder_multiopus", - "webrtc_fir_filter_factory", - "webrtc_ilbc", - "webrtc_audio_encoder_cng", - "webrtc_fixed_digital", - "webrtc_rtc_event_audio", - "webrtc_min_video_bitrate_experiment", - "webrtc_encoded_image", - "webrtc_legacy_agc", - "webrtc_audio_decoder_g722", - "webrtc_pcm16b", - "webrtc_audio_coding_isac", - "webrtc_video_interfaces", - "webrtc_video_frame_i420", - "webrtc_video_frame_i010", - "webrtc_media_transport_interface", - "webrtc_audio_encoder_isac_float", - "webrtc_audio_frame_operations", - "webrtc_audio_processing_api", - "webrtc_transient_suppressor_impl", - "webrtc_audio_encoder_ilbc", - "webrtc_rtp_video_header", - "webrtc_ooura_fft_size_128", - "webrtc_noise_level_estimator", - "webrtc_audio_buffer", - "webrtc_audio_decoder_isac_float", - "webrtc_vad", - "webrtc_audio_device_generic", - "webrtc_high_pass_filter", - "webrtc_ns", - "webrtc_common_video", - "webrtc_audio_encoder_g711", - "webrtc_audio_encoder_L16", - "webrtc_audio_frame_proxies", - "webrtc_audio_decoder_ilbc", - "webrtc_audio_decoder_g711", - "webrtc_optionally_built_submodule_creators", - "webrtc_audio_decoder_L16", - "webrtc_video_codecs_api", - "webrtc_audio_network_adaptor", - "webrtc_level_estimation", - "webrtc_media_stream_interface", - "webrtc_audio_frame_manipulator", - "webrtc_quality_scaling_experiment", - "webrtc_opus", - "webrtc_voice_detection", - "webrtc_rtc_vp9_profile", - "webrtc_aec3", - "webrtc_audio_decoder_opus", - "webrtc_rate_control_settings", - "webrtc_video_codec_interface", - "webrtc_libaom_av1_encoder", - "webrtc_audio_device_impl", - "webrtc_libaom_av1_decoder", - "webrtc_neteq", - "webrtc_pushback_controller", - "webrtc_video_processing", - "webrtc_rtp_rtcp_format", - "webrtc_balanced_degradation_settings", - "webrtc_encoded_frame", - "webrtc_stable_target_rate_experiment", - "webrtc_audio_mixer_impl", - "webrtc_pffft_wrapper", - "webrtc_agc2_rnn_vad", - "webrtc_agc2_rnn_vad_with_level", - "webrtc_adaptive_digital", - "webrtc_level_estimation_agc", - "webrtc_remote_bitrate_estimator", - "webrtc_agc", - "webrtc_audio_encoder_opus", - "webrtc_transport_feedback", - "webrtc_builtin_audio_decoder_factory", - "webrtc_audio_coding", - "webrtc_aec3_factory", - "webrtc_rtc_event_rtp_rtcp", - "webrtc_vp9_helpers", - "webrtc_null_aec_dump_factory", - "webrtc_api_video_encoded_frame", - "webrtc_rtc_event_bwe", - "webrtc_builtin_audio_encoder_factory", - "webrtc_audio_processing", - "webrtc_probe_controller", - "webrtc_goog_cc_estimators", - "webrtc_loss_based_controller", - "webrtc_rtp_interfaces", - "webrtc_video_stream_api", - "webrtc_call_interfaces", - "webrtc_rtc_media_base", - "webrtc_video_capture_module", - "webrtc_fake_network", - "webrtc_libjingle_peerconnection_api", - "webrtc_ice_log", - "webrtc_bitrate_configurator", - "webrtc_rtp_rtcp", - "webrtc_rtp_receiver", - "webrtc_rtc_p2p", - "webrtc_pacing", - "webrtc_audio", - "webrtc_delay_based_bwe", - "webrtc_video_coding_utility", - "webrtc_ice_transport_factory", - "webrtc_resource_adaptation", - "webrtc_control_handler", - "webrtc_vp8_temporal_layers", - "webrtc_congestion_controller", - "webrtc_multiplex", - "webrtc_builtin_video_bitrate_allocator_factory", - "webrtc_rtc_software_fallback_wrappers", - "webrtc_congestion_controller_goog_cc", - "webrtc_vp9", - "webrtc_h264", - "webrtc_rtc_simulcast_encoder_adapter", - "webrtc_video_adaptation", - "webrtc_transport_goog_cc", - "webrtc_vp8_temporal_layers_factory", - "webrtc_fake_video_codecs", - "webrtc_rtc_encoder_simulcast_proxy", - "webrtc_rtp_sender", - "webrtc_video_coding", - "webrtc_vp8", - "webrtc_rtc_internal_video_codecs", - "webrtc_builtin_video_encoder_factory", - "webrtc_frame_dumping_decoder", - "webrtc_video_stream_encoder_impl", - "webrtc_video_stream_encoder_create", - "webrtc_builtin_video_decoder_factory", - "webrtc_video", - "webrtc_call", - "webrtc_rtc_audio_video", - "webrtc_rtc_data", - "webrtc_rtc_pc_base", - "webrtc_peerconnection", - "webrtc_create_peerconnection_factory", - "libpffft", - "usrsctplib", - "rnnoise_rnn_vad", - ], - static_libs: [ - "libaom", - "libevent", - "libopus", - "libsrtp2", - "libvpx", - "libyuv", - ], - shared_libs: [ - "libcrypto", - "libssl", - ], - export_include_dirs: [ - ".", - ], -} - diff --git a/BUILD.gn b/BUILD.gn index f7d15f47a9..b676d1e15e 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -265,6 +265,10 @@ config("common_config") { defines += [ "WEBRTC_USE_H264" ] } + if (rtc_use_absl_mutex) { + defines += [ "WEBRTC_ABSL_MUTEX" ] + } + if (rtc_disable_logging) { defines += [ "RTC_DISABLE_LOGGING" ] } @@ -410,7 +414,7 @@ config("common_config") { } config("common_objc") { - libs = [ "Foundation.framework" ] + frameworks = [ "Foundation.framework" ] if (rtc_use_metal_rendering) { defines = [ "RTC_SUPPORTS_METAL" ] @@ -580,6 +584,14 @@ if (rtc_include_tests) { } } + rtc_test("benchmarks") { + testonly = true + deps = [ + "rtc_base/synchronization:mutex_benchmark", + "test:benchmark_main", + ] + } + # This runs tests that must run in real time and therefore can take some # time to execute. They are in a separate executable to avoid making the # regular unittest suite too slow to run frequently. diff --git a/DEPS b/DEPS index 2cda0d3ed6..6b5c55de2e 100644 --- a/DEPS +++ b/DEPS @@ -1,44 +1,51 @@ # This file contains dependencies for WebRTC. gclient_gn_args_file = 'src/build/config/gclient_args.gni' -gclient_gn_args = [] +gclient_gn_args = [ + 'mac_xcode_version', +] vars = { # By default, we should check out everything needed to run on the main # chromium waterfalls. More info at: crbug.com/570091. 'checkout_configuration': 'default', 'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"', - 'chromium_revision': 'a775f4aeaa5e5847ca65467a39d5b4a2d1d54d6f', + 'chromium_revision': '6b2bcf62a8d67c7a80bbb0ac1ef11921b7c57d79', + + # This can be overridden, e.g. with custom_vars, to download a nonstandard + # Xcode version in build/mac_toolchain.py + # instead of downloading the prebuilt pinned revision. + 'mac_xcode_version': 'default', } deps = { # TODO(kjellander): Move this to be Android-only once the libevent dependency # in base/third_party/libevent is solved. 'src/base': - 'https://chromium.googlesource.com/chromium/src/base@2e2abc3c3844639a5f8d8ec3762875f01f9c1384', + 'https://chromium.googlesource.com/chromium/src/base@db84a3cbe5b7a0402ded5e1836ecbd1ed7e681a5', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@212b25dc86151e32abc71fa5d129614937666d69', + 'https://chromium.googlesource.com/chromium/src/build@124d030a73694c96efe4675a1c0df607e86516fe', 'src/buildtools': - 'https://chromium.googlesource.com/chromium/src/buildtools@2c41dfb19abe40908834803b6fed797b0f341fe1', + 'https://chromium.googlesource.com/chromium/src/buildtools@9e121212d42be62a7cce38072f925f8398d11e49', # Gradle 4.3-rc4. Used for testing Android Studio project generation for WebRTC. 'src/examples/androidtests/third_party/gradle': { 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@89af43c4d0506f69980f00dde78c97b2f81437f8', 'condition': 'checkout_android', }, 'src/ios': { - 'url': 'https://chromium.googlesource.com/chromium/src/ios@2f8d00d1cb5b73ec4329dff7ec1ababf6a05e628', + 'url': 'https://chromium.googlesource.com/chromium/src/ios@d51d66a3c98ad30c027bd11966507bf51e6b6a5f', 'condition': 'checkout_ios', }, 'src/testing': - 'https://chromium.googlesource.com/chromium/src/testing@e5ced5141379ee8ae28b4f93d3c02df039d2b052', + 'https://chromium.googlesource.com/chromium/src/testing@0411afc27fd137a2d82348d439aa4daff078117b', 'src/third_party': - 'https://chromium.googlesource.com/chromium/src/third_party@76bcf1e4994827b25b4539a9ef5eed382d2de992', + 'https://chromium.googlesource.com/chromium/src/third_party@98978dde2ae4cebc3c99203ca4674a58f762f1b5', 'src/buildtools/linux64': { 'packages': [ { 'package': 'gn/gn/linux-amd64', - 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9', + 'version': 'git_revision:3028c6a426a4aaf6da91c4ebafe716ae370225fe', } ], 'dep_type': 'cipd', @@ -48,7 +55,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/mac-amd64', - 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9', + 'version': 'git_revision:3028c6a426a4aaf6da91c4ebafe716ae370225fe', } ], 'dep_type': 'cipd', @@ -58,7 +65,7 @@ deps = { 'packages': [ { 'package': 'gn/gn/windows-amd64', - 'version': 'git_revision:5ed3c9cc67b090d5e311e4bd2aba072173e82db9', + 'version': 'git_revision:3028c6a426a4aaf6da91c4ebafe716ae370225fe', } ], 'dep_type': 'cipd', @@ -72,7 +79,7 @@ deps = { 'src/buildtools/third_party/libc++abi/trunk': 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@196ba1aaa8ac285d94f4ea8d9836390a45360533', 'src/buildtools/third_party/libunwind/trunk': - 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@43bb9f872232f531bac80093ceb4de61c64b9ab7', + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@d999d54f4bca789543a2eb6c995af2d9b5a1f3ed', 'src/tools/clang/dsymutil': { 'packages': [ @@ -100,7 +107,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_build_tools/aapt2', - 'version': 'LKH_DI44rZhQ4RkScMFQLGSJ4jZyuPcff0llITnq-i4C', + 'version': 'R2k5wwOlIaS6sjv2TIyHotiPJod-6KqnZO8NH-KFK8sC', }, ], 'condition': 'checkout_android', @@ -119,20 +126,20 @@ deps = { }, 'src/third_party/boringssl/src': - 'https://boringssl.googlesource.com/boringssl.git@f9e0cda2d81858d10ceeadb0d21f4026f8602cf7', + 'https://boringssl.googlesource.com/boringssl.git@eda849d2e6e6a15a5a4dc728568ec12f21ebfb6d', 'src/third_party/breakpad/breakpad': - 'https://chromium.googlesource.com/breakpad/breakpad.git@f32b83eb08e9ee158d3037b2114357187fd45a05', + 'https://chromium.googlesource.com/breakpad/breakpad.git@2757a2c9c819fcae3784576aef0c8400c7ad06d7', 'src/third_party/catapult': - 'https://chromium.googlesource.com/catapult.git@4b4e8791324284c92a5e6a84d93ed9ccdbbd375e', + 'https://chromium.googlesource.com/catapult.git@a30bbb68c2e324a8ab6b3f54ab17ab47fca46298', 'src/third_party/ced/src': { 'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5', }, 'src/third_party/colorama/src': 'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8', 'src/third_party/depot_tools': - 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@e1a9c8db7e7cf6ba7d70b06ec3f3cf6d74451680', + 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@9949ab7a4b1cb3c342b38129b4e0bfcfb2ef5749', 'src/third_party/ffmpeg': - 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@31886e8f39a47a9d7107d4c937bb053dcf5699ce', + 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@d2dd36c03501e995e8ce2d792d834392b2e62bfe', 'src/third_party/findbugs': { 'url': 'https://chromium.googlesource.com/chromium/deps/findbugs.git@4275d9ac8610db6b1bc9a5e887f97e41b33fac67', 'condition': 'checkout_android', @@ -143,12 +150,15 @@ deps = { 'condition': 'checkout_linux', }, 'src/third_party/freetype/src': - 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@11beee855e29757a07320fd60e85de2e8da4e037', + 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@b7c467b6efa5a91945854de81632be45d6f360ff', 'src/third_party/harfbuzz-ng/src': - 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@0d5695983e8bf3184ecd4cb92f737b9dfe5d6d25', + 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@e3af529e511ca492284cdd9f4584666b88a9e00f', + 'src/third_party/google_benchmark/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@367119482ff4abc3d73e4a109b410090fc281337', + }, # WebRTC-only dependency (not present in Chromium). 'src/third_party/gtest-parallel': - 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@df0b4e476f98516cea7d593e5dbb0fca44f6ee7f', + 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@aabba21acd68a8814c70a6c2937f1625de715411', 'src/third_party/google-truth': { 'packages': [ { @@ -160,13 +170,9 @@ deps = { 'dep_type': 'cipd', }, 'src/third_party/googletest/src': - 'https://chromium.googlesource.com/external/github.com/google/googletest.git@e3f0319d89f4cbf32993de595d984183b1a9fc57', + 'https://chromium.googlesource.com/external/github.com/google/googletest.git@4fe018038f87675c083d0cfb6a6b57c274fb1753', 'src/third_party/icu': { - 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@13cfcd5874f6c39c34ec57fa5295e7910ae90b8d', - }, - 'src/third_party/jsr-305/src': { - 'url': 'https://chromium.googlesource.com/external/jsr-305.git@642c508235471f7220af6d5df2d3210e3bfc0919', - 'condition': 'checkout_android', + 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@79326efe26e5440f530963704c3c0ff965b3a4ac', }, 'src/third_party/jdk': { 'packages': [ @@ -196,9 +202,15 @@ deps = { 'src/third_party/libsrtp': 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@650611720ecc23e0e6b32b0e3100f8b4df91696c', 'src/third_party/libaom/source/libaom': - 'https://aomedia.googlesource.com/aom.git@611c58e511042782869dfcb6e0450587b30fc5f2', + 'https://aomedia.googlesource.com/aom.git@0a7c8715d79222adf6cfc4c1cdc3fcb8fb3951f3', + 'src/third_party/libunwindstack': { + 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@11659d420a71e7323b379ea8781f07c6f384bc7e', + 'condition': 'checkout_android', + }, + 'src/third_party/perfetto': + 'https://android.googlesource.com/platform/external/perfetto.git@026b1e2be09a8cd8ced37f99e771eca00ea439b0', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@8dc6f353c6d04329cf59529f41a6f46d9dbfcafa', + 'https://chromium.googlesource.com/webm/libvpx.git@a1cee8dc919df1980d802e1a9bce1259ec34cba8', 'src/third_party/libyuv': 'https://chromium.googlesource.com/libyuv/libyuv.git@6afd9becdf58822b1da6770598d8597c583ccfad', 'src/third_party/lss': { @@ -212,16 +224,16 @@ deps = { # Used by boringssl. 'src/third_party/nasm': { - 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@4fa54ca5f7fc3a15a8c78ac94688e64d3e4e4fa1' + 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@19f3fad68da99277b2882939d3b2fa4c4b8d51d9' }, 'src/third_party/openh264/src': - 'https://chromium.googlesource.com/external/github.com/cisco/openh264@6f26bce0b1c4e8ce0e13332f7c0083788def5fdf', + 'https://chromium.googlesource.com/external/github.com/cisco/openh264@a5473711f3e20c6bd1c33d81b6c7b9a0618aa18f', 'src/third_party/r8': { 'packages': [ { 'package': 'chromium/third_party/r8', - 'version': 'I91wspV6GMc7l_m-k9v3-ooP-CBrK76OVc3rfnB5T7kC', + 'version': 'vvymFSkKtWKWNmfz0PL_0H8MD8V40P--A9aUfxfpF6QC', }, ], 'condition': 'checkout_android', @@ -241,27 +253,21 @@ deps = { 'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@f172b30356d821d180fa4ecfa3e71c7274a32de4', 'condition': 'checkout_android', }, - 'src/third_party/robolectric/robolectric': { - 'url': 'https://chromium.googlesource.com/external/robolectric.git@f2df0efb033bb402399ebfb9bf58aefee5cced05', - 'condition': 'checkout_android', - }, 'src/third_party/ub-uiautomator/lib': { 'url': 'https://chromium.googlesource.com/chromium/third_party/ub-uiautomator.git@00270549ce3161ae72ceb24712618ea28b4f9434', 'condition': 'checkout_android', }, 'src/third_party/usrsctp/usrsctplib': - 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@bee946a606752a443bd70bca1cb296527fed706d', + 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@a8c51df76caae94254b1e59999405f739467490e', # Dependency used by libjpeg-turbo. 'src/third_party/yasm/binaries': { 'url': 'https://chromium.googlesource.com/chromium/deps/yasm/binaries.git@52f9b3f4b0aa06da24ef8b123058bb61ee468881', 'condition': 'checkout_win', }, - 'src/third_party/yasm/source/patched-yasm': - 'https://chromium.googlesource.com/chromium/deps/yasm/patched-yasm.git@720b70524a4424b15fc57e82263568c8ba0496ad', 'src/tools': - 'https://chromium.googlesource.com/chromium/src/tools@3f15275073fc339e14f39df61fbace5bad82e93d', + 'https://chromium.googlesource.com/chromium/src/tools@d50c1e9d058bfd361917e50ca9a66d486a8c6d3b', 'src/tools/swarming_client': - 'https://chromium.googlesource.com/infra/luci/client-py.git@160b445a44e0daacf6f3f8570ca2707ec451f374', + 'https://chromium.googlesource.com/infra/luci/client-py.git@4c095d04179dc725a300085ae21fe3b79900d072', 'src/third_party/accessibility_test_framework': { 'packages': [ @@ -333,7 +339,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/espresso', - 'version': 'c92dcfc4e894555a0b3c309f2b7939640eb1fee4', + 'version': 'y8fIfH8Leo2cPm7iGCYnBxZpwOlgLv8rm2mlcmJlvGsC', }, ], 'condition': 'checkout_android', @@ -344,7 +350,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/guava', - 'version': 'a6fba501f3a0de88b9be1daa2052632de5b96a46', + 'version': 'y8Zx7cKTiOunLhOrfC4hOt5kDQrLJ_Rq7ISDmXkPdYsC', }, ], 'condition': 'checkout_android', @@ -397,13 +403,9 @@ deps = { 'package': 'chromium/third_party/android_sdk/public/sources/android-29', 'version': '4gxhM8E62bvZpQs7Q3d0DinQaW0RLCIefhXrQBFkNy8C', }, - { - 'package': 'chromium/third_party/android_sdk/public/tools-lint', - 'version': '89hXqZYzCum3delB5RV7J_QyWkaRodqdtQS0s3LMh3wC', - }, { 'package': 'chromium/third_party/android_sdk/public/cmdline-tools', - 'version': 'CR25ixsRhwuRnhdgDpGFyl9S0C_0HO9SUgFrwX46zq8C', + 'version': 'uM0XtAW9BHh8phcbhBDA9GfzP3bku2SP7AiMahhimnoC', }, ], 'condition': 'checkout_android', @@ -469,7 +471,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/turbine', - 'version': '3UJ600difG3ThRhtYrN9AfZ5kh8wCYtBiii1-NMlCrMC', + 'version': 'O_jNDJ4VdwYKBSDbd2BJ3mknaTFoVkvE7Po8XIiKy8sC', }, ], 'condition': 'checkout_android', @@ -477,7 +479,7 @@ deps = { }, 'src/third_party/turbine/src': { - 'url': 'https://chromium.googlesource.com/external/github.com/google/turbine.git' + '@' + '95f6fb6f1e962e8b6ec672905b0b04233f002dc2', + 'url': 'https://chromium.googlesource.com/external/github.com/google/turbine.git' + '@' + '0f2a5024fe4a9bb745bcd5ac7c655cebe11649bc', 'condition': 'checkout_android', }, @@ -496,15 +498,15 @@ deps = { 'packages': [ { 'package': 'infra/tools/luci/isolate/${{platform}}', - 'version': 'git_revision:513b1319d7d855f6c42bc01471562df910fd61b3', + 'version': 'git_revision:56ae79476e3caf14da59d75118408aa778637936', }, { 'package': 'infra/tools/luci/isolated/${{platform}}', - 'version': 'git_revision:513b1319d7d855f6c42bc01471562df910fd61b3', + 'version': 'git_revision:56ae79476e3caf14da59d75118408aa778637936', }, { 'package': 'infra/tools/luci/swarming/${{platform}}', - 'version': 'git_revision:513b1319d7d855f6c42bc01471562df910fd61b3', + 'version': 'git_revision:56ae79476e3caf14da59d75118408aa778637936', }, ], 'dep_type': 'cipd', @@ -601,6 +603,17 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/androidx_activity_activity': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_activity_activity', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/androidx_annotation_annotation': { 'packages': [ { @@ -612,11 +625,33 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/androidx_annotation_annotation_experimental': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation_experimental', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/androidx_appcompat_appcompat': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.2.0-beta01-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_appcompat_appcompat_resources': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat_resources', + 'version': 'version:1.2.0-beta01-cr0', }, ], 'condition': 'checkout_android', @@ -627,7 +662,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_common', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -638,7 +673,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_runtime', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -671,7 +706,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_collection_collection', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -693,7 +728,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_coordinatorlayout_coordinatorlayout', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -704,7 +739,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_core_core', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.3.0-beta01-cr0', }, ], 'condition': 'checkout_android', @@ -755,11 +790,22 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/androidx_exifinterface_exifinterface': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_exifinterface_exifinterface', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/androidx_fragment_fragment': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_fragment_fragment', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.2.5-cr0', }, ], 'condition': 'checkout_android', @@ -869,7 +915,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_common', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -902,7 +948,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_livedata_core', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -913,7 +959,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_runtime', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -924,7 +970,18 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel', - 'version': 'version:2.0.0-cr0', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate', + 'version': 'version:2.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -1001,7 +1058,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_preference_preference', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.1-cr0', }, ], 'condition': 'checkout_android', @@ -1023,6 +1080,17 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_recyclerview_recyclerview', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_savedstate_savedstate': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_savedstate_savedstate', 'version': 'version:1.0.0-cr0', }, ], @@ -1063,11 +1131,22 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_core': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_core', - 'version': 'version:3.1.0-cr0', + 'version': 'version:3.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -1078,7 +1157,29 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource', - 'version': 'version:3.1.0-cr0', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_intents': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_intents', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_web': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_web', + 'version': 'version:3.2.0-cr0', }, ], 'condition': 'checkout_android', @@ -1144,7 +1245,18 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_transition_transition', - 'version': 'version:1.0.0-rc02-cr0', + 'version': 'version:1.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_tvprovider_tvprovider': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_tvprovider_tvprovider', + 'version': 'version:1.0.0-cr0', }, ], 'condition': 'checkout_android', @@ -1155,7 +1267,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -1166,7 +1278,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated', - 'version': 'version:1.0.0-cr0', + 'version': 'version:1.1.0-cr0', }, ], 'condition': 'checkout_android', @@ -1177,6 +1289,17 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_viewpager2_viewpager2': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager2_viewpager2', 'version': 'version:1.0.0-cr0', }, ], @@ -1195,6 +1318,28 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent', + 'version': 'version:3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/classworlds_classworlds': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/classworlds_classworlds', + 'version': 'version:1.1-alpha-2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/com_android_support_animated_vector_drawable': { 'packages': [ { @@ -1635,11 +1780,22 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs', + 'version': 'version:1.0.5-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine': { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine', - 'version': 'version:2.7.0-cr0', + 'version': 'version:2.8.0-cr0', }, ], 'condition': 'checkout_android', @@ -1881,7 +2037,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material', - 'version': 'version:1.0.0-rc02-cr0', + 'version': 'version:1.2.0-alpha06-cr0', }, ], 'condition': 'checkout_android', @@ -1921,6 +2077,17 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations', + 'version': 'version:1.7-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/com_google_code_findbugs_jFormatString': { 'packages': [ { @@ -2002,7 +2169,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2013,7 +2180,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2024,7 +2191,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2035,7 +2202,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_core', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2046,7 +2213,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations', - 'version': 'version:2.3.4-cr0', + 'version': 'version:2.4.0-cr0', }, ], 'condition': 'checkout_android', @@ -2145,7 +2312,7 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite', - 'version': 'version:3.11.4-cr0', + 'version': 'version:3.12.2-cr0', }, ], 'condition': 'checkout_android', @@ -2229,6 +2396,28 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/nekohtml_nekohtml': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/nekohtml_nekohtml', + 'version': 'version:1.9.6.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/nekohtml_xercesMinimal': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/nekohtml_xercesminimal', + 'version': 'version:1.9.6.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/net_ltgt_gradle_incap_incap': { 'packages': [ { @@ -2251,6 +2440,193 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/org_apache_ant_ant': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant', + 'version': 'version:1.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_ant_ant_launcher': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant_launcher', + 'version': 'version:1.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks', + 'version': 'version:2.1.3-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_model': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_model', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_profile': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_profile', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_project': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_project', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_settings': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_settings', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup', + 'version': 'version:1.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/org_checkerframework_checker_compat_qual': { 'packages': [ { @@ -2266,29 +2642,18 @@ deps = { 'packages': [ { 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_qual', - 'version': 'version:3.0.0-cr0', + 'version': 'version:2.10.0-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_checkerframework_dataflow': { + 'src/third_party/android_deps/libs/org_checkerframework_dataflow_shaded': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow', - 'version': 'version:3.0.0-cr0', - }, - ], - 'condition': 'checkout_android', - 'dep_type': 'cipd', - }, - - 'src/third_party/android_deps/libs/org_checkerframework_javacutil': { - 'packages': [ - { - 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_javacutil', - 'version': 'version:3.0.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow_shaded', + 'version': 'version:3.1.2-cr0', }, ], 'condition': 'checkout_android', @@ -2306,6 +2671,39 @@ deps = { 'dep_type': 'cipd', }, + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default', + 'version': 'version:1.0-alpha-9-stable-1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation', + 'version': 'version:1.11-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils', + 'version': 'version:1.5.15-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/android_deps/libs/org_jdom_jdom2': { 'packages': [ { @@ -2427,33 +2825,154 @@ deps = { 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_plumelib_plume_util': { + 'src/third_party/android_deps/libs/org_robolectric_annotations': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_plumelib_plume_util', - 'version': 'version:1.0.6-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_annotations', + 'version': 'version:4.3.1-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_plumelib_reflection_util': { + 'src/third_party/android_deps/libs/org_robolectric_junit': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_plumelib_reflection_util', - 'version': 'version:0.0.2-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_junit', + 'version': 'version:4.3.1-cr0', }, ], 'condition': 'checkout_android', 'dep_type': 'cipd', }, - 'src/third_party/android_deps/libs/org_plumelib_require_javadoc': { + 'src/third_party/android_deps/libs/org_robolectric_pluginapi': { 'packages': [ { - 'package': 'chromium/third_party/android_deps/libs/org_plumelib_require_javadoc', - 'version': 'version:0.1.0-cr0', + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_pluginapi', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_resources': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_resources', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_robolectric': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_robolectric', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_sandbox': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_sandbox', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadowapi': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadowapi', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_framework': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_framework', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_multidex': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_multidex', + 'version': 'version:4.3.1-cr1', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_playservices': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_utils_reflector': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils_reflector', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_threeten_threeten_extra': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_threeten_threeten_extra', + 'version': 'version:1.5.0-cr0', }, ], 'condition': 'checkout_android', @@ -2712,11 +3231,12 @@ include_rules = [ "+test", "+rtc_tools", - # Abseil whitelist. Keep this in sync with abseil-in-webrtc.md. + # Abseil allowlist. Keep this in sync with abseil-in-webrtc.md. "+absl/algorithm/algorithm.h", "+absl/algorithm/container.h", "+absl/base/attributes.h", "+absl/base/config.h", + "+absl/base/const_init.h", "+absl/base/macros.h", "+absl/container/inlined_vector.h", "+absl/memory/memory.h", diff --git a/PRESUBMIT.py b/PRESUBMIT.py index 247b78eaa0..b5d4534c0b 100755 --- a/PRESUBMIT.py +++ b/PRESUBMIT.py @@ -14,7 +14,7 @@ from collections import defaultdict from contextlib import contextmanager # Files and directories that are *skipped* by cpplint in the presubmit script. -CPPLINT_BLACKLIST = [ +CPPLINT_EXCEPTIONS = [ 'api/video_codecs/video_decoder.h', 'common_types.cc', 'common_types.h', @@ -45,12 +45,15 @@ CPPLINT_BLACKLIST = [ # # Justifications for each filter: # - build/c++11 : Rvalue ref checks are unreliable (false positives), -# include file and feature blacklists are +# include file and feature blocklists are # google3-specific. +# - runtime/references : Mutable references are not banned by the Google +# C++ style guide anymore (starting from May 2020). # - whitespace/operators: Same as above (doesn't seem sufficient to eliminate # all move-related errors). -BLACKLIST_LINT_FILTERS = [ +DISABLED_LINT_FILTERS = [ '-build/c++11', + '-runtime/references', '-whitespace/operators', ] @@ -94,15 +97,20 @@ LEGACY_API_DIRS = ( API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:] # TARGET_RE matches a GN target, and extracts the target name and the contents. -TARGET_RE = re.compile(r'(?P\s*)\w+\("(?P\w+)"\) {' - r'(?P.*?)' - r'(?P=indent)}', - re.MULTILINE | re.DOTALL) +TARGET_RE = re.compile( + r'(?P\s*)(?P\w+)\("(?P\w+)"\) {' + r'(?P.*?)' + r'(?P=indent)}', + re.MULTILINE | re.DOTALL) # SOURCES_RE matches a block of sources inside a GN target. SOURCES_RE = re.compile(r'sources \+?= \[(?P.*?)\]', re.MULTILINE | re.DOTALL) +# DEPS_RE matches a block of sources inside a GN target. +DEPS_RE = re.compile(r'\bdeps \+?= \[(?P.*?)\]', + re.MULTILINE | re.DOTALL) + # FILE_PATH_RE matchies a file path. FILE_PATH_RE = re.compile(r'"(?P(\w|\/)+)(?P\.\w+)"') @@ -168,7 +176,7 @@ def CheckNativeApiHeaderChanges(input_api, output_api): """Checks to remind proper changing of native APIs.""" files = [] source_file_filter = lambda x: input_api.FilterSourceFile( - x, white_list=[r'.+\.(gn|gni|h)$']) + x, allow_list=[r'.+\.(gn|gni|h)$']) for f in input_api.AffectedSourceFiles(source_file_filter): for path in API_DIRS: dn = os.path.dirname(f.LocalPath()) @@ -254,9 +262,9 @@ def CheckNoFRIEND_TEST(input_api, output_api, # pylint: disable=invalid-name 'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))] -def IsLintBlacklisted(blacklist_paths, file_path): - """ Checks if a file is blacklisted for lint check.""" - for path in blacklist_paths: +def IsLintDisabled(disabled_paths, file_path): + """ Checks if a file is disabled for lint check.""" + for path in disabled_paths: if file_path == path or os.path.dirname(file_path).startswith(path): return True return False @@ -264,7 +272,7 @@ def IsLintBlacklisted(blacklist_paths, file_path): def CheckApprovedFilesLintClean(input_api, output_api, source_file_filter=None): - """Checks that all new or non-blacklisted .cc and .h files pass cpplint.py. + """Checks that all new or non-exempt .cc and .h files pass cpplint.py. This check is based on CheckChangeLintsClean in depot_tools/presubmit_canned_checks.py but has less filters and only checks added files.""" @@ -277,22 +285,22 @@ def CheckApprovedFilesLintClean(input_api, output_api, cpplint._cpplint_state.ResetErrorCounts() lint_filters = cpplint._Filters() - lint_filters.extend(BLACKLIST_LINT_FILTERS) + lint_filters.extend(DISABLED_LINT_FILTERS) cpplint._SetFilters(','.join(lint_filters)) - # Create a platform independent blacklist for cpplint. - blacklist_paths = [input_api.os_path.join(*path.split('/')) - for path in CPPLINT_BLACKLIST] + # Create a platform independent exempt list for cpplint. + disabled_paths = [input_api.os_path.join(*path.split('/')) + for path in CPPLINT_EXCEPTIONS] # Use the strictest verbosity level for cpplint.py (level 1) which is the # default when running cpplint.py from command line. To make it possible to # work with not-yet-converted code, we're only applying it to new (or - # moved/renamed) files and files not listed in CPPLINT_BLACKLIST. + # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS. verbosity_level = 1 files = [] for f in input_api.AffectedSourceFiles(source_file_filter): # Note that moved/renamed files also count as added. - if f.Action() == 'A' or not IsLintBlacklisted(blacklist_paths, + if f.Action() == 'A' or not IsLintDisabled(disabled_paths, f.LocalPath()): files.append(f.AbsoluteLocalPath()) @@ -338,6 +346,37 @@ def CheckNoSourcesAbove(input_api, gn_files, output_api): return [] +def CheckAbseilDependencies(input_api, gn_files, output_api): + """Checks that Abseil dependencies are declared in `absl_deps`.""" + absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL) + target_types_to_check = [ + 'rtc_library', + 'rtc_source_set', + 'rtc_static_library', + 'webrtc_fuzzer_test', + ] + error_msg = ('Abseil dependencies in target "%s" (file: %s) ' + 'should be moved to the "absl_deps" parameter.') + errors = [] + + for gn_file in gn_files: + gn_file_content = input_api.ReadFile(gn_file) + for target_match in TARGET_RE.finditer(gn_file_content): + target_type = target_match.group('target_type') + target_name = target_match.group('target_name') + target_contents = target_match.group('target_contents') + if target_type in target_types_to_check: + for deps_match in DEPS_RE.finditer(target_contents): + deps = deps_match.group('deps').splitlines() + for dep in deps: + if re.search(absl_re, dep): + errors.append( + output_api.PresubmitError(error_msg % (target_name, + gn_file.LocalPath()))) + break # no need to warn more than once per target + return errors + + def CheckNoMixingSources(input_api, gn_files, output_api): """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target. @@ -566,8 +605,8 @@ def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api): def CheckGnChanges(input_api, output_api): file_filter = lambda x: (input_api.FilterSourceFile( - x, white_list=(r'.+\.(gn|gni)$',), - black_list=(r'.*/presubmit_checks_lib/testdata/.*',))) + x, allow_list=(r'.+\.(gn|gni)$',), + block_list=(r'.*/presubmit_checks_lib/testdata/.*',))) gn_files = [] for f in input_api.AffectedSourceFiles(file_filter): @@ -577,6 +616,7 @@ def CheckGnChanges(input_api, output_api): if gn_files: result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api)) result.extend(CheckNoMixingSources(input_api, gn_files, output_api)) + result.extend(CheckAbseilDependencies(input_api, gn_files, output_api)) result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files, output_api)) result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api)) @@ -756,7 +796,7 @@ def RunPythonTests(input_api, output_api): input_api, output_api, directory, - whitelist=[r'.+_test\.py$'])) + allowlist=[r'.+_test\.py$'])) return input_api.RunTests(tests, parallel=True) @@ -810,17 +850,18 @@ def CommonChecks(input_api, output_api): results = [] # Filter out files that are in objc or ios dirs from being cpplint-ed since # they do not follow C++ lint rules. - black_list = input_api.DEFAULT_BLACK_LIST + ( + exception_list = input_api.DEFAULT_BLACK_LIST + ( r".*\bobjc[\\\/].*", r".*objc\.[hcm]+$", ) - source_file_filter = lambda x: input_api.FilterSourceFile(x, None, black_list) + source_file_filter = lambda x: input_api.FilterSourceFile(x, None, + exception_list) results.extend(CheckApprovedFilesLintClean( input_api, output_api, source_file_filter)) results.extend(input_api.canned_checks.CheckLicense( input_api, output_api, _LicenseHeader(input_api))) results.extend(input_api.canned_checks.RunPylint(input_api, output_api, - black_list=(r'^base[\\\/].*\.py$', + block_list=(r'^base[\\\/].*\.py$', r'^build[\\\/].*\.py$', r'^buildtools[\\\/].*\.py$', r'^infra[\\\/].*\.py$', @@ -847,12 +888,12 @@ def CommonChecks(input_api, output_api): # Also we will skip most checks for third_party directory. third_party_filter_list = (r'^third_party[\\\/].+',) eighty_char_sources = lambda x: input_api.FilterSourceFile(x, - black_list=build_file_filter_list + objc_filter_list + + block_list=build_file_filter_list + objc_filter_list + third_party_filter_list) hundred_char_sources = lambda x: input_api.FilterSourceFile(x, - white_list=objc_filter_list) + allow_list=objc_filter_list) non_third_party_sources = lambda x: input_api.FilterSourceFile(x, - black_list=third_party_filter_list) + block_list=third_party_filter_list) results.extend(input_api.canned_checks.CheckLongLines( input_api, output_api, maxlen=80, source_file_filter=eighty_char_sources)) @@ -900,6 +941,8 @@ def CommonChecks(input_api, output_api): input_api, output_api, non_third_party_sources)) results.extend(CheckBannedAbslMakeUnique( input_api, output_api, non_third_party_sources)) + results.extend(CheckObjcApiSymbols( + input_api, output_api, non_third_party_sources)) return results @@ -976,6 +1019,35 @@ def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter): files)] return [] +def CheckObjcApiSymbols(input_api, output_api, source_file_filter): + rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}', + re.MULTILINE | re.DOTALL) + file_filter = lambda f: (f.LocalPath().endswith(('.h')) + and source_file_filter(f)) + + files = [] + file_filter = lambda x: (input_api.FilterSourceFile(x) + and source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath(): + continue + contents = input_api.ReadFile(f) + for match in rtc_objc_export.finditer(contents): + export_block = match.group(0) + if 'RTC_OBJC_TYPE' not in export_block: + files.append(f.LocalPath()) + + if len(files): + return [output_api.PresubmitError( + 'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' + + 'macro.\n\n' + + 'For example:\n' + + 'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'Please fix the following files:', + files)] + return [] + def CheckAbslMemoryInclude(input_api, output_api, source_file_filter): pattern = input_api.re.compile( r'^#include\s*"absl/memory/memory.h"', input_api.re.MULTILINE) @@ -1033,7 +1105,7 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter): # eval-ed and thus doesn't have __file__. error_msg = """{} should be listed in {}.""" results = [] - orphan_blacklist = [ + exempt_paths = [ os.path.join('tools_webrtc', 'ios', 'SDK'), ] with _AddToPath(input_api.os_path.join( @@ -1042,7 +1114,7 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter): from check_orphan_headers import IsHeaderInBuildGn file_filter = lambda x: input_api.FilterSourceFile( - x, black_list=orphan_blacklist) and source_file_filter(x) + x, block_list=exempt_paths) and source_file_filter(x) for f in input_api.AffectedSourceFiles(file_filter): if f.LocalPath().endswith('.h'): file_path = os.path.abspath(f.LocalPath()) @@ -1061,7 +1133,7 @@ def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, source_file_filter): error_msg = 'File {} must end with exactly one newline.' results = [] file_filter = lambda x: input_api.FilterSourceFile( - x, white_list=(r'.+\.proto$',)) and source_file_filter(x) + x, allow_list=(r'.+\.proto$',)) and source_file_filter(x) for f in input_api.AffectedSourceFiles(file_filter): file_path = f.LocalPath() with open(file_path) as f: diff --git a/README.chromium b/README.chromium index 246c13dc09..58c8da8403 100644 --- a/README.chromium +++ b/README.chromium @@ -1,13 +1,14 @@ -Name: WebRTC -URL: http://www.webrtc.org -Version: 90 -License: BSD -License File: LICENSE - -Description: -WebRTC provides real time voice and video processing -functionality to enable the implementation of -PeerConnection/MediaStream. - -Third party code used in this project is described -in the file LICENSE_THIRD_PARTY. +Name: WebRTC +URL: http://www.webrtc.org +Version: 90 +CPEPrefix: cpe:/a:webrtc_project:webrtc:90 +License: BSD +License File: LICENSE + +Description: +WebRTC provides real time voice and video processing +functionality to enable the implementation of +PeerConnection/MediaStream. + +Third party code used in this project is described +in the file LICENSE_THIRD_PARTY. diff --git a/abseil-in-webrtc.md b/abseil-in-webrtc.md index 0541d3c7a5..da03af07b1 100644 --- a/abseil-in-webrtc.md +++ b/abseil-in-webrtc.md @@ -23,9 +23,11 @@ adds the first use. * `absl::variant` and related stuff from `absl/types/variant.h`. * The functions in `absl/algorithm/algorithm.h` and `absl/algorithm/container.h`. +* `absl/base/const_init.h` for mutex initialization. * The macros in `absl/base/attributes.h`, `absl/base/config.h` and `absl/base/macros.h`. + ## **Disallowed** ### `absl::make_unique` @@ -34,7 +36,7 @@ adds the first use. ### `absl::Mutex` -*Use `rtc::CriticalSection` instead.* +*Use `webrtc::Mutex` instead.* Chromium has a ban on new static initializers, and `absl::Mutex` uses one. To make `absl::Mutex` available, we would need to nicely ask the @@ -61,3 +63,12 @@ has decided if they will change `absl::Span` to match. These are optimized for speed, not binary size. Even `StrCat` calls with a modest number of arguments can easily add several hundred bytes to the binary. + +## How to depend on Abseil + +For build targets `rtc_library`, `rtc_source_set` and `rtc_static_library`, +dependencies on Abseil need to be listed in `absl_deps` instead of `deps`. + +This is needed in order to support the Abseil component build in Chromium. In +such build mode, WebRTC will depend on a unique Abseil build target what will +generate a shared library. diff --git a/api/BUILD.gn b/api/BUILD.gn index 8d3ee8f460..0d4ba2ca46 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -71,8 +71,8 @@ rtc_library("rtp_headers") { "..:webrtc_common", "units:timestamp", "video:video_rtp_headers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtp_packet_info") { @@ -90,8 +90,8 @@ rtc_library("rtp_packet_info") { "..:webrtc_common", "../rtc_base:rtc_base_approved", "../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("media_stream_interface") { @@ -111,8 +111,8 @@ rtc_library("media_stream_interface") { "../rtc_base/system:rtc_export", "video:recordable_encoded_frame", "video:video_frame", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("libjingle_peerconnection_api") { @@ -166,12 +166,14 @@ rtc_library("libjingle_peerconnection_api") { ":media_stream_interface", ":network_state_predictor_api", ":packet_socket_factory", + ":priority", ":rtc_error", ":rtc_stats_api", ":rtp_packet_info", ":rtp_parameters", ":rtp_transceiver_direction", ":scoped_refptr", + "adaptation:resource_adaptation_api", "audio:audio_mixer_api", "audio_codecs:audio_codecs_api", "crypto:frame_decryptor_interface", @@ -181,23 +183,15 @@ rtc_library("libjingle_peerconnection_api") { "rtc_event_log", "task_queue", "transport:bitrate_settings", - "transport:datagram_transport_interface", "transport:enums", "transport:network_control", "transport:webrtc_key_value_config", - "transport/media:audio_interfaces", - "transport/media:media_transport_interface", - "transport/media:video_interfaces", "transport/rtp:rtp_source", "units:data_rate", "units:timestamp", "video:encoded_image", "video:video_frame", "video:video_rtp_headers", - "//third_party/abseil-cpp/absl/algorithm:container", - "//third_party/abseil-cpp/absl/memory", - "//third_party/abseil-cpp/absl/strings", - "//third_party/abseil-cpp/absl/types:optional", # Basically, don't add stuff here. You might break sensitive downstream # targets like pnacl. API should not depend on anything outside of this @@ -212,6 +206,12 @@ rtc_library("libjingle_peerconnection_api") { "../rtc_base:rtc_base_approved", "../rtc_base/system:rtc_export", ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] } rtc_source_set("frame_transformer_interface") { @@ -221,6 +221,7 @@ rtc_source_set("frame_transformer_interface") { ":scoped_refptr", "../rtc_base:refcount", "video:encoded_frame", + "video:video_frame_metadata", ] } @@ -235,8 +236,8 @@ rtc_library("rtc_error") { "../rtc_base:logging", "../rtc_base:macromagic", "../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("packet_socket_factory") { @@ -272,7 +273,6 @@ rtc_source_set("video_quality_test_fixture_api") { "../test:video_test_common", "transport:bitrate_settings", "transport:network_control", - "transport/media:media_transport_interface", "video_codecs:video_codecs_api", ] } @@ -283,18 +283,23 @@ rtc_source_set("video_quality_analyzer_api") { sources = [ "test/video_quality_analyzer_interface.h" ] deps = [ + ":array_view", ":stats_observer_interface", "video:encoded_image", "video:video_frame", "video:video_rtp_headers", "video_codecs:video_codecs_api", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } -rtc_source_set("track_id_stream_label_map") { +rtc_source_set("track_id_stream_info_map") { visibility = [ "*" ] - sources = [ "test/track_id_stream_label_map.h" ] + sources = [ "test/track_id_stream_info_map.h" ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("rtp_transceiver_direction") { @@ -302,6 +307,10 @@ rtc_source_set("rtp_transceiver_direction") { sources = [ "rtp_transceiver_direction.h" ] } +rtc_source_set("priority") { + sources = [ "priority.h" ] +} + rtc_library("rtp_parameters") { visibility = [ "*" ] sources = [ @@ -312,18 +321,21 @@ rtc_library("rtp_parameters") { ] deps = [ ":array_view", + ":priority", ":rtp_transceiver_direction", "../rtc_base:checks", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } if (is_android) { - java_cpp_enum("rtp_parameters_enums") { - sources = [ "rtp_parameters.h" ] + java_cpp_enum("priority_enums") { + sources = [ "priority.h" ] } } @@ -334,7 +346,7 @@ rtc_source_set("audio_quality_analyzer_api") { deps = [ ":stats_observer_interface", - ":track_id_stream_label_map", + ":track_id_stream_info_map", ] } @@ -343,11 +355,9 @@ rtc_source_set("stats_observer_interface") { testonly = true sources = [ "test/stats_observer_interface.h" ] - deps = [ - # For api/stats_types.h - ":libjingle_peerconnection_api", - ":rtp_parameters", - ] + deps = [ ":rtc_stats_api" ] + + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_source_set("peer_connection_quality_test_fixture_api") { @@ -365,18 +375,21 @@ rtc_source_set("peer_connection_quality_test_fixture_api") { ":media_stream_interface", ":network_state_predictor_api", ":packet_socket_factory", + ":rtp_parameters", ":simulated_network_api", ":stats_observer_interface", + ":track_id_stream_info_map", ":video_quality_analyzer_api", "../media:rtc_media_base", "../rtc_base:rtc_base", "rtc_event_log", "task_queue", "transport:network_control", - "transport/media:media_transport_interface", "units:time_delta", "video:video_frame", "video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", @@ -391,8 +404,8 @@ rtc_source_set("frame_generator_api") { deps = [ ":scoped_refptr", "video:video_frame", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("test_dependency_factory") { @@ -405,7 +418,7 @@ rtc_library("test_dependency_factory") { deps = [ ":video_quality_test_fixture_api", "../rtc_base:checks", - "../rtc_base:thread_checker", + "../rtc_base:platform_thread_types", ] } @@ -451,6 +464,7 @@ if (rtc_include_tests) { deps = [ ":audio_quality_analyzer_api", ":peer_connection_quality_test_fixture_api", + ":time_controller", ":video_quality_analyzer_api", "../test/pc/e2e:peerconnection_quality_test", ] @@ -469,8 +483,8 @@ rtc_library("create_frame_generator") { "../rtc_base:checks", "../system_wrappers", "../test:frame_generator_impl", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("create_peer_connection_quality_test_frame_generator") { @@ -486,8 +500,8 @@ rtc_library("create_peer_connection_quality_test_frame_generator") { ":peer_connection_quality_test_fixture_api", "../rtc_base:checks", "../test:fileutils", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("libjingle_logging_api") { @@ -540,8 +554,8 @@ rtc_library("audio_options_api") { ":array_view", "../rtc_base:stringutils", "../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("transport_api") { @@ -565,11 +579,8 @@ rtc_source_set("bitrate_allocation") { rtc_source_set("simulated_network_api") { visibility = [ "*" ] sources = [ "test/simulated_network.h" ] - deps = [ - "../rtc_base", - "../rtc_base:criticalsection", - "//third_party/abseil-cpp/absl/types:optional", - ] + deps = [ "../rtc_base" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } # TODO(srte): Move to network_emulation sub directory. @@ -702,6 +713,8 @@ if (rtc_include_tests) { "../modules/audio_coding:neteq_test_factory", "../rtc_base:checks", "neteq:neteq_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag", "//third_party/abseil-cpp/absl/flags:parse", "//third_party/abseil-cpp/absl/strings", @@ -852,6 +865,7 @@ if (rtc_include_tests) { } rtc_source_set("mock_peerconnectioninterface") { + visibility = [ "*" ] testonly = true sources = [ "test/mock_peerconnectioninterface.h" ] @@ -861,6 +875,17 @@ if (rtc_include_tests) { ] } + rtc_source_set("mock_peer_connection_factory_interface") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_peer_connection_factory_interface.h" ] + + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + ] + } + rtc_source_set("mock_rtp") { testonly = true sources = [ @@ -874,6 +899,16 @@ if (rtc_include_tests) { ] } + rtc_source_set("mock_transformable_video_frame") { + testonly = true + sources = [ "test/mock_transformable_video_frame.h" ] + + deps = [ + ":frame_transformer_interface", + "../test:test_support", + ] + } + rtc_source_set("mock_video_bitrate_allocator") { testonly = true sources = [ "test/mock_video_bitrate_allocator.h" ] @@ -931,39 +966,6 @@ if (rtc_include_tests) { ] } - rtc_source_set("fake_media_transport") { - testonly = true - - sources = [ - "test/fake_datagram_transport.h", - "test/fake_media_transport.h", - ] - - deps = [ - "../rtc_base:checks", - "transport:datagram_transport_interface", - "transport/media:media_transport_interface", - "//third_party/abseil-cpp/absl/algorithm:container", - ] - } - - rtc_library("loopback_media_transport") { - testonly = true - - sources = [ - "test/loopback_media_transport.cc", - "test/loopback_media_transport.h", - ] - - deps = [ - "../rtc_base", - "../rtc_base:checks", - "transport:datagram_transport_interface", - "transport/media:media_transport_interface", - "//third_party/abseil-cpp/absl/algorithm:container", - ] - } - rtc_library("create_time_controller") { visibility = [ "*" ] testonly = true @@ -994,7 +996,6 @@ if (rtc_include_tests) { "rtp_parameters_unittest.cc", "scoped_refptr_unittest.cc", "test/create_time_controller_unittest.cc", - "test/loopback_media_transport_unittest.cc", ] deps = [ @@ -1002,7 +1003,6 @@ if (rtc_include_tests) { ":create_time_controller", ":function_view", ":libjingle_peerconnection_api", - ":loopback_media_transport", ":rtc_error", ":rtc_event_log_output_file", ":rtp_packet_info", @@ -1033,13 +1033,13 @@ if (rtc_include_tests) { ":dummy_peer_connection", ":fake_frame_decryptor", ":fake_frame_encryptor", - ":fake_media_transport", - ":loopback_media_transport", ":mock_audio_mixer", ":mock_frame_decryptor", ":mock_frame_encryptor", + ":mock_peer_connection_factory_interface", ":mock_peerconnectioninterface", ":mock_rtp", + ":mock_transformable_video_frame", ":mock_video_bitrate_allocator", ":mock_video_bitrate_allocator_factory", ":mock_video_codec_factory", diff --git a/api/DEPS b/api/DEPS index 1e92b12281..220b30b3cf 100644 --- a/api/DEPS +++ b/api/DEPS @@ -115,11 +115,6 @@ specific_include_rules = { "+rtc_base/ref_count.h", ], - "media_transport_interface\.h": [ - "+rtc_base/copy_on_write_buffer.h", # As used by datachannelinterface.h - "+rtc_base/network_route.h", - ], - "packet_socket_factory\.h": [ "+rtc_base/proxy_info.h", "+rtc_base/async_packet_socket.h", @@ -246,6 +241,10 @@ specific_include_rules = { "+modules/audio_processing/include/audio_processing.h", ], + "echo_detector_creator\.h": [ + "+modules/audio_processing/include/audio_processing.h", + ], + "fake_frame_decryptor\.h": [ "+rtc_base/ref_counted_object.h", ], @@ -259,7 +258,6 @@ specific_include_rules = { ], "simulated_network\.h": [ - "+rtc_base/critical_section.h", "+rtc_base/random.h", "+rtc_base/thread_annotations.h", ], diff --git a/api/adaptation/BUILD.gn b/api/adaptation/BUILD.gn new file mode 100644 index 0000000000..dc4c73711e --- /dev/null +++ b/api/adaptation/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved. +# +# Use of this source code is governed by a BSD - style license +# that can be found in the LICENSE file in the root of the source +# tree.An additional intellectual property rights grant can be found +# in the file PATENTS.All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_source_set("resource_adaptation_api") { + visibility = [ "*" ] + sources = [ + "resource.cc", + "resource.h", + ] + deps = [ + "../../api:scoped_refptr", + "../../rtc_base:refcount", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/system:rtc_export", + ] +} diff --git a/api/adaptation/DEPS b/api/adaptation/DEPS new file mode 100644 index 0000000000..cab7fb8e14 --- /dev/null +++ b/api/adaptation/DEPS @@ -0,0 +1,7 @@ +specific_include_rules = { + "resource\.h": [ + # ref_count.h is a public_deps of rtc_base_approved. Necessary because of + # rtc::RefCountInterface. + "+rtc_base/ref_count.h", + ], +} \ No newline at end of file diff --git a/api/adaptation/resource.cc b/api/adaptation/resource.cc new file mode 100644 index 0000000000..0a9c83a311 --- /dev/null +++ b/api/adaptation/resource.cc @@ -0,0 +1,30 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/adaptation/resource.h" + +namespace webrtc { + +const char* ResourceUsageStateToString(ResourceUsageState usage_state) { + switch (usage_state) { + case ResourceUsageState::kOveruse: + return "kOveruse"; + case ResourceUsageState::kUnderuse: + return "kUnderuse"; + } +} + +ResourceListener::~ResourceListener() {} + +Resource::Resource() {} + +Resource::~Resource() {} + +} // namespace webrtc diff --git a/api/adaptation/resource.h b/api/adaptation/resource.h new file mode 100644 index 0000000000..9b3968055f --- /dev/null +++ b/api/adaptation/resource.h @@ -0,0 +1,67 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ADAPTATION_RESOURCE_H_ +#define API_ADAPTATION_RESOURCE_H_ + +#include + +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class Resource; + +enum class ResourceUsageState { + // Action is needed to minimze the load on this resource. + kOveruse, + // Increasing the load on this resource is desired, if possible. + kUnderuse, +}; + +RTC_EXPORT const char* ResourceUsageStateToString( + ResourceUsageState usage_state); + +class RTC_EXPORT ResourceListener { + public: + virtual ~ResourceListener(); + + virtual void OnResourceUsageStateMeasured( + rtc::scoped_refptr resource, + ResourceUsageState usage_state) = 0; +}; + +// A Resource monitors an implementation-specific resource. It may report +// kOveruse or kUnderuse when resource usage is high or low enough that we +// should perform some sort of mitigation to fulfil the resource's constraints. +// +// The methods on this interface are invoked on the adaptation task queue. +// Resource usage measurements may be performed on an any task queue. +// +// The Resource is reference counted to prevent use-after-free when posting +// between task queues. As such, the implementation MUST NOT make any +// assumptions about which task queue Resource is destructed on. +class RTC_EXPORT Resource : public rtc::RefCountInterface { + public: + Resource(); + // Destruction may happen on any task queue. + ~Resource() override; + + virtual std::string Name() const = 0; + // The |listener| may be informed of resource usage measurements on any task + // queue, but not after this method is invoked with the null argument. + virtual void SetResourceListener(ResourceListener* listener) = 0; +}; + +} // namespace webrtc + +#endif // API_ADAPTATION_RESOURCE_H_ diff --git a/api/array_view_unittest.cc b/api/array_view_unittest.cc index 8aa858805f..0357f68aa2 100644 --- a/api/array_view_unittest.cc +++ b/api/array_view_unittest.cc @@ -38,7 +38,7 @@ void CallFixed(ArrayView av) {} } // namespace -TEST(ArrayViewTest, TestConstructFromPtrAndArray) { +TEST(ArrayViewDeathTest, TestConstructFromPtrAndArray) { char arr[] = "Arrr!"; const char carr[] = "Carrr!"; EXPECT_EQ(6u, Call(arr)); @@ -409,7 +409,7 @@ TEST(FixArrayViewTest, TestSwapFixed) { // swap(x, w); // Compile error, because different sizes. } -TEST(ArrayViewTest, TestIndexing) { +TEST(ArrayViewDeathTest, TestIndexing) { char arr[] = "abcdefg"; ArrayView x(arr); const ArrayView y(arr); diff --git a/api/audio/BUILD.gn b/api/audio/BUILD.gn index 2405d9d041..117e5cc0ab 100644 --- a/api/audio/BUILD.gn +++ b/api/audio/BUILD.gn @@ -61,8 +61,8 @@ rtc_library("aec3_config_json") { "../../rtc_base:rtc_base_approved", "../../rtc_base:rtc_json", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("aec3_factory") { @@ -87,3 +87,17 @@ rtc_source_set("echo_control") { sources = [ "echo_control.h" ] deps = [ "../../rtc_base:checks" ] } + +rtc_source_set("echo_detector_creator") { + visibility = [ "*" ] + sources = [ + "echo_detector_creator.cc", + "echo_detector_creator.h", + ] + deps = [ + "../../api:scoped_refptr", + "../../modules/audio_processing:api", + "../../modules/audio_processing:audio_processing", + "../../rtc_base:refcount", + ] +} diff --git a/api/audio/audio_frame.cc b/api/audio/audio_frame.cc index 47459ac333..c6e5cf4dd6 100644 --- a/api/audio/audio_frame.cc +++ b/api/audio/audio_frame.cc @@ -11,6 +11,8 @@ #include "api/audio/audio_frame.h" #include +#include +#include #include "rtc_base/checks.h" #include "rtc_base/time_utils.h" @@ -22,6 +24,28 @@ AudioFrame::AudioFrame() { static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes"); } +void swap(AudioFrame& a, AudioFrame& b) { + using std::swap; + swap(a.timestamp_, b.timestamp_); + swap(a.elapsed_time_ms_, b.elapsed_time_ms_); + swap(a.ntp_time_ms_, b.ntp_time_ms_); + swap(a.samples_per_channel_, b.samples_per_channel_); + swap(a.sample_rate_hz_, b.sample_rate_hz_); + swap(a.num_channels_, b.num_channels_); + swap(a.channel_layout_, b.channel_layout_); + swap(a.speech_type_, b.speech_type_); + swap(a.vad_activity_, b.vad_activity_); + swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_); + swap(a.packet_infos_, b.packet_infos_); + const size_t length_a = a.samples_per_channel_ * a.num_channels_; + const size_t length_b = b.samples_per_channel_ * b.num_channels_; + RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples); + RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples); + std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_); + swap(a.muted_, b.muted_); + swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_); +} + void AudioFrame::Reset() { ResetWithoutMuting(); muted_ = true; diff --git a/api/audio/audio_frame.h b/api/audio/audio_frame.h index 06b0b28b38..78539f57eb 100644 --- a/api/audio/audio_frame.h +++ b/api/audio/audio_frame.h @@ -14,6 +14,8 @@ #include #include +#include + #include "api/audio/channel_layout.h" #include "api/rtp_packet_infos.h" #include "rtc_base/constructor_magic.h" @@ -58,6 +60,8 @@ class AudioFrame { AudioFrame(); + friend void swap(AudioFrame& a, AudioFrame& b); + // Resets all members to their default state. void Reset(); // Same as Reset(), but leaves mute state unchanged. Muting a frame requires diff --git a/api/audio/echo_detector_creator.cc b/api/audio/echo_detector_creator.cc new file mode 100644 index 0000000000..4c3d9e61fe --- /dev/null +++ b/api/audio/echo_detector_creator.cc @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/audio/echo_detector_creator.h" + +#include "modules/audio_processing/residual_echo_detector.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +rtc::scoped_refptr CreateEchoDetector() { + return new rtc::RefCountedObject(); +} + +} // namespace webrtc diff --git a/api/audio/echo_detector_creator.h b/api/audio/echo_detector_creator.h new file mode 100644 index 0000000000..5ba171de97 --- /dev/null +++ b/api/audio/echo_detector_creator.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_DETECTOR_CREATOR_H_ +#define API_AUDIO_ECHO_DETECTOR_CREATOR_H_ + +#include "api/scoped_refptr.h" +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { + +// Returns an instance of the WebRTC implementation of a residual echo detector. +// It can be provided to the webrtc::AudioProcessingBuilder to obtain the +// usual residual echo metrics. +rtc::scoped_refptr CreateEchoDetector(); + +} // namespace webrtc + +#endif // API_AUDIO_ECHO_DETECTOR_CREATOR_H_ diff --git a/api/audio/test/audio_frame_unittest.cc b/api/audio/test/audio_frame_unittest.cc index dbf45ceabc..f8d3318274 100644 --- a/api/audio/test/audio_frame_unittest.cc +++ b/api/audio/test/audio_frame_unittest.cc @@ -133,4 +133,54 @@ TEST(AudioFrameTest, CopyFrom) { EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples))); } +TEST(AudioFrameTest, SwapFrames) { + AudioFrame frame1, frame2; + int16_t samples1[kNumChannelsMono * kSamplesPerChannel]; + for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) { + samples1[i] = i; + } + frame1.UpdateFrame(kTimestamp, samples1, kSamplesPerChannel, kSampleRateHz, + AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannelsMono); + frame1.set_absolute_capture_timestamp_ms(12345678); + const auto frame1_channel_layout = frame1.channel_layout(); + + int16_t samples2[(kNumChannelsMono + 1) * (kSamplesPerChannel + 1)]; + for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1); + ++i) { + samples2[i] = 1000 + i; + } + frame2.UpdateFrame(kTimestamp + 1, samples2, kSamplesPerChannel + 1, + kSampleRateHz + 1, AudioFrame::kNormalSpeech, + AudioFrame::kVadPassive, kNumChannelsMono + 1); + const auto frame2_channel_layout = frame2.channel_layout(); + + swap(frame1, frame2); + + EXPECT_EQ(kTimestamp + 1, frame1.timestamp_); + ASSERT_EQ(kSamplesPerChannel + 1, frame1.samples_per_channel_); + EXPECT_EQ(kSampleRateHz + 1, frame1.sample_rate_hz_); + EXPECT_EQ(AudioFrame::kNormalSpeech, frame1.speech_type_); + EXPECT_EQ(AudioFrame::kVadPassive, frame1.vad_activity_); + ASSERT_EQ(kNumChannelsMono + 1, frame1.num_channels_); + for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1); + ++i) { + EXPECT_EQ(samples2[i], frame1.data()[i]); + } + EXPECT_FALSE(frame1.absolute_capture_timestamp_ms()); + EXPECT_EQ(frame2_channel_layout, frame1.channel_layout()); + + EXPECT_EQ(kTimestamp, frame2.timestamp_); + ASSERT_EQ(kSamplesPerChannel, frame2.samples_per_channel_); + EXPECT_EQ(kSampleRateHz, frame2.sample_rate_hz_); + EXPECT_EQ(AudioFrame::kPLC, frame2.speech_type_); + EXPECT_EQ(AudioFrame::kVadActive, frame2.vad_activity_); + ASSERT_EQ(kNumChannelsMono, frame2.num_channels_); + for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) { + EXPECT_EQ(samples1[i], frame2.data()[i]); + } + EXPECT_EQ(12345678, frame2.absolute_capture_timestamp_ms()); + EXPECT_EQ(frame1_channel_layout, frame2.channel_layout()); +} + } // namespace webrtc diff --git a/api/audio_codecs/BUILD.gn b/api/audio_codecs/BUILD.gn index 987e20f178..b6292de570 100644 --- a/api/audio_codecs/BUILD.gn +++ b/api/audio_codecs/BUILD.gn @@ -38,6 +38,8 @@ rtc_library("audio_codecs_api") { "../../rtc_base:sanitizer", "../../rtc_base/system:rtc_export", "../units:time_delta", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/L16/BUILD.gn b/api/audio_codecs/L16/BUILD.gn index bef671237e..1f7a1e5a0b 100644 --- a/api/audio_codecs/L16/BUILD.gn +++ b/api/audio_codecs/L16/BUILD.gn @@ -25,6 +25,8 @@ rtc_library("audio_encoder_L16") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -42,6 +44,8 @@ rtc_library("audio_decoder_L16") { "../../../modules/audio_coding:pcm16b", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/g711/BUILD.gn b/api/audio_codecs/g711/BUILD.gn index ba0586b901..92d77bed9f 100644 --- a/api/audio_codecs/g711/BUILD.gn +++ b/api/audio_codecs/g711/BUILD.gn @@ -25,6 +25,8 @@ rtc_library("audio_encoder_g711") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -42,6 +44,8 @@ rtc_library("audio_decoder_g711") { "../../../modules/audio_coding:g711", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/g722/BUILD.gn b/api/audio_codecs/g722/BUILD.gn index 8738ef889a..a186eabbb7 100644 --- a/api/audio_codecs/g722/BUILD.gn +++ b/api/audio_codecs/g722/BUILD.gn @@ -31,6 +31,8 @@ rtc_library("audio_encoder_g722") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -48,6 +50,8 @@ rtc_library("audio_decoder_g722") { "../../../modules/audio_coding:g722", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/ilbc/BUILD.gn b/api/audio_codecs/ilbc/BUILD.gn index 066a73cff2..b6a5045eaf 100644 --- a/api/audio_codecs/ilbc/BUILD.gn +++ b/api/audio_codecs/ilbc/BUILD.gn @@ -30,6 +30,8 @@ rtc_library("audio_encoder_ilbc") { "../../../modules/audio_coding:ilbc", "../../../rtc_base:rtc_base_approved", "../../../rtc_base:safe_minmax", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -46,6 +48,8 @@ rtc_library("audio_decoder_ilbc") { "..:audio_codecs_api", "../../../modules/audio_coding:ilbc", "../../../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/isac/BUILD.gn b/api/audio_codecs/isac/BUILD.gn index 9eb32147e1..6ff6e5f092 100644 --- a/api/audio_codecs/isac/BUILD.gn +++ b/api/audio_codecs/isac/BUILD.gn @@ -68,6 +68,8 @@ rtc_library("audio_encoder_isac_fix") { "../../../modules/audio_coding:isac_fix", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -85,6 +87,8 @@ rtc_library("audio_decoder_isac_fix") { "../../../modules/audio_coding:isac_fix", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -102,6 +106,8 @@ rtc_library("audio_encoder_isac_float") { "../../../modules/audio_coding:isac", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -119,6 +125,8 @@ rtc_library("audio_decoder_isac_float") { "../../../modules/audio_coding:isac", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] diff --git a/api/audio_codecs/opus/BUILD.gn b/api/audio_codecs/opus/BUILD.gn index 5fb626d990..586e9b3dd8 100644 --- a/api/audio_codecs/opus/BUILD.gn +++ b/api/audio_codecs/opus/BUILD.gn @@ -23,8 +23,8 @@ rtc_library("audio_encoder_opus_config") { deps = [ "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] if (rtc_opus_variable_complexity) { defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ] @@ -49,6 +49,8 @@ rtc_library("audio_encoder_opus") { "../../../modules/audio_coding:webrtc_opus", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -66,6 +68,8 @@ rtc_library("audio_decoder_opus") { "../../../modules/audio_coding:webrtc_opus", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] @@ -82,8 +86,8 @@ rtc_library("audio_encoder_multiopus") { "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", "../opus:audio_encoder_opus_config", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_decoder_multiopus") { @@ -99,6 +103,8 @@ rtc_library("audio_decoder_multiopus") { "../../../modules/audio_coding:webrtc_multiopus", "../../../rtc_base:rtc_base_approved", "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", diff --git a/api/audio_options.h b/api/audio_options.h index b714998c6b..1b0d1ad0bd 100644 --- a/api/audio_options.h +++ b/api/audio_options.h @@ -75,6 +75,8 @@ struct RTC_EXPORT AudioOptions { // and check if any other AudioOptions members are unused. absl::optional combined_audio_video_bwe; // Enable audio network adaptor. + // TODO(webrtc:11717): Remove this API in favor of adaptivePtime in + // RtpEncodingParameters. absl::optional audio_network_adaptor; // Config string for audio network adaptor. absl::optional audio_network_adaptor_config; diff --git a/api/data_channel_interface.h b/api/data_channel_interface.h index e08830feaf..5b2b1263ab 100644 --- a/api/data_channel_interface.h +++ b/api/data_channel_interface.h @@ -20,6 +20,7 @@ #include #include "absl/types/optional.h" +#include "api/priority.h" #include "api/rtc_error.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" @@ -61,6 +62,9 @@ struct DataChannelInit { // The stream id, or SID, for SCTP data channels. -1 if unset (see above). int id = -1; + + // https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member + absl::optional priority; }; // At the JavaScript level, data can be passed in as a string or a blob, so @@ -154,6 +158,7 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface { // If negotiated in-band, this ID will be populated once the DTLS role is // determined, and until then this will return -1. virtual int id() const = 0; + virtual Priority priority() const { return Priority::kLow; } virtual DataState state() const = 0; // When state is kClosed, and the DataChannel was not closed using // the closing procedure, returns the error information about the closing. diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h index e712b3c190..2cfe6edb88 100644 --- a/api/frame_transformer_interface.h +++ b/api/frame_transformer_interface.h @@ -16,6 +16,7 @@ #include "api/scoped_refptr.h" #include "api/video/encoded_frame.h" +#include "api/video/video_frame_metadata.h" #include "rtc_base/ref_count.h" namespace webrtc { @@ -48,6 +49,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { // TODO(bugs.webrtc.org/11380) remove from interface once // webrtc::RtpDescriptorAuthentication is exposed in api/. virtual std::vector GetAdditionalData() const = 0; + + virtual const VideoFrameMetadata& GetMetadata() const = 0; }; // Extends the TransformableFrameInterface to expose audio-specific information. diff --git a/api/neteq/BUILD.gn b/api/neteq/BUILD.gn index 1ab02ec92b..4e85c4d268 100644 --- a/api/neteq/BUILD.gn +++ b/api/neteq/BUILD.gn @@ -23,8 +23,8 @@ rtc_source_set("neteq_api") { "../../rtc_base:rtc_base_approved", "../../system_wrappers:system_wrappers", "../audio_codecs:audio_codecs_api", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("custom_neteq_factory") { @@ -56,8 +56,8 @@ rtc_source_set("neteq_controller_api") { ":tick_timer", "../../rtc_base:rtc_base_approved", "../../system_wrappers:system_wrappers", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("default_neteq_controller_factory") { diff --git a/api/neteq/neteq.cc b/api/neteq/neteq.cc index 155ddf2cf3..e8ef4dbd39 100644 --- a/api/neteq/neteq.cc +++ b/api/neteq/neteq.cc @@ -30,7 +30,8 @@ std::string NetEq::Config::ToString() const { << ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate=" << (enable_fast_accelerate ? "true" : "false") << ", enable_muted_state=" << (enable_muted_state ? "true" : "false") - << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false"); + << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false") + << ", extra_output_delay_ms=" << extra_output_delay_ms; return ss.str(); } diff --git a/api/neteq/neteq.h b/api/neteq/neteq.h index f62d3795f0..15ad3aac0e 100644 --- a/api/neteq/neteq.h +++ b/api/neteq/neteq.h @@ -138,6 +138,10 @@ class NetEq { bool enable_rtx_handling = false; absl::optional codec_pair_id; bool for_test_no_time_stretching = false; // Use only for testing. + // Adds extra delay to the output of NetEq, without affecting jitter or + // loss behavior. This is mainly for testing. Value must be a non-negative + // multiple of 10 ms. + int extra_output_delay_ms = 0; }; enum ReturnCodes { kOK = 0, kFail = -1 }; diff --git a/api/peer_connection_interface.cc b/api/peer_connection_interface.cc index 0c25405784..f82e84b80f 100644 --- a/api/peer_connection_interface.cc +++ b/api/peer_connection_interface.cc @@ -53,27 +53,6 @@ RTCError PeerConnectionInterface::SetConfiguration( return RTCError(); } -RTCError PeerConnectionInterface::SetBitrate(const BitrateSettings& bitrate) { - BitrateParameters bitrate_parameters; - bitrate_parameters.min_bitrate_bps = bitrate.min_bitrate_bps; - bitrate_parameters.current_bitrate_bps = bitrate.start_bitrate_bps; - bitrate_parameters.max_bitrate_bps = bitrate.max_bitrate_bps; - return SetBitrate(bitrate_parameters); -} - -RTCError PeerConnectionInterface::SetBitrate( - const BitrateParameters& bitrate_parameters) { - BitrateSettings bitrate; - bitrate.min_bitrate_bps = bitrate_parameters.min_bitrate_bps; - bitrate.start_bitrate_bps = bitrate_parameters.current_bitrate_bps; - bitrate.max_bitrate_bps = bitrate_parameters.max_bitrate_bps; - return SetBitrate(bitrate); -} - -PeerConnectionInterface::BitrateParameters::BitrateParameters() = default; - -PeerConnectionInterface::BitrateParameters::~BitrateParameters() = default; - PeerConnectionDependencies::PeerConnectionDependencies( PeerConnectionObserver* observer_in) : observer(observer_in) {} diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index 1d81de74d8..fd4d2df6a7 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -73,6 +73,7 @@ #include #include +#include "api/adaptation/resource.h" #include "api/async_resolver_factory.h" #include "api/audio/audio_mixer.h" #include "api/audio_codecs/audio_decoder_factory.h" @@ -102,7 +103,6 @@ #include "api/task_queue/task_queue_factory.h" #include "api/transport/bitrate_settings.h" #include "api/transport/enums.h" -#include "api/transport/media/media_transport_interface.h" #include "api/transport/network_control.h" #include "api/transport/webrtc_key_value_config.h" #include "api/turn_customizer.h" @@ -613,34 +613,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // correctly. This flag will be deprecated soon. Do not rely on it. bool active_reset_srtp_params = false; - // DEPRECATED. Do not use. This option is ignored by peer connection. - // TODO(webrtc:9719): Delete this option. - bool use_media_transport = false; - - // DEPRECATED. Do not use. This option is ignored by peer connection. - // TODO(webrtc:9719): Delete this option. - bool use_media_transport_for_data_channels = false; - - // If MediaTransportFactory is provided in PeerConnectionFactory, this flag - // informs PeerConnection that it should use the DatagramTransportInterface - // for packets instead DTLS. It's invalid to set it to |true| if the - // MediaTransportFactory wasn't provided. - absl::optional use_datagram_transport; - - // If MediaTransportFactory is provided in PeerConnectionFactory, this flag - // informs PeerConnection that it should use the DatagramTransport's - // implementation of DataChannelTransportInterface for data channels instead - // of SCTP-DTLS. - absl::optional use_datagram_transport_for_data_channels; - - // If true, this PeerConnection will only use datagram transport for data - // channels when receiving an incoming offer that includes datagram - // transport parameters. It will not request use of a datagram transport - // when it creates the initial, outgoing offer. - // This setting only applies when |use_datagram_transport_for_data_channels| - // is true. - absl::optional use_datagram_transport_for_data_channels_receive_only; - // Defines advanced optional cryptographic settings related to SRTP and // frame encryption for native WebRTC. Setting this will overwrite any // settings set in PeerConnectionFactory (which is deprecated). @@ -666,8 +638,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // Whether network condition based codec switching is allowed. absl::optional allow_codec_switching; - bool enable_simulcast_stats = true; - // // Don't forget to update operator== if adding something. // @@ -1045,28 +1015,13 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { virtual bool RemoveIceCandidates( const std::vector& candidates) = 0; - // 0 <= min <= current <= max should hold for set parameters. - struct BitrateParameters { - BitrateParameters(); - ~BitrateParameters(); - - absl::optional min_bitrate_bps; - absl::optional current_bitrate_bps; - absl::optional max_bitrate_bps; - }; - // SetBitrate limits the bandwidth allocated for all RTP streams sent by // this PeerConnection. Other limitations might affect these limits and // are respected (for example "b=AS" in SDP). // // Setting |current_bitrate_bps| will reset the current bitrate estimate // to the provided value. - virtual RTCError SetBitrate(const BitrateSettings& bitrate); - - // TODO(nisse): Deprecated - use version above. These two default - // implementations require subclasses to implement one or the other - // of the methods. - virtual RTCError SetBitrate(const BitrateParameters& bitrate_parameters); + virtual RTCError SetBitrate(const BitrateSettings& bitrate) = 0; // Enable/disable playout of received audio streams. Enabled by default. Note // that even if playout is enabled, streams will only be played out if the @@ -1118,6 +1073,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { return absl::nullopt; } + // When a resource is overused, the PeerConnection will try to reduce the load + // on the sysem, for example by reducing the resolution or frame rate of + // encoded streams. The Resource API allows injecting platform-specific usage + // measurements. The conditions to trigger kOveruse or kUnderuse are up to the + // implementation. + // TODO(hbos): Make pure virtual when implemented by downstream projects. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) {} + // Start RtcEventLog using an existing output-sink. Takes ownership of // |output| and passes it on to Call, which will take the ownership. If the // operation fails the output will be closed and deallocated. The event log @@ -1332,7 +1295,6 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { std::unique_ptr network_state_predictor_factory; std::unique_ptr network_controller_factory; - std::unique_ptr media_transport_factory; std::unique_ptr neteq_factory; std::unique_ptr trials; }; diff --git a/api/peer_connection_proxy.h b/api/peer_connection_proxy.h index c278308ccb..23887e53da 100644 --- a/api/peer_connection_proxy.h +++ b/api/peer_connection_proxy.h @@ -132,6 +132,7 @@ PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state) PROXY_METHOD0(PeerConnectionState, peer_connection_state) PROXY_METHOD0(IceGatheringState, ice_gathering_state) PROXY_METHOD0(absl::optional, can_trickle_ice_candidates) +PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr) PROXY_METHOD2(bool, StartRtcEventLog, std::unique_ptr, diff --git a/test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc b/api/priority.h similarity index 56% rename from test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc rename to api/priority.h index f7403b9567..4953e453a3 100644 --- a/test/fuzzers/rtp_rtcp_demuxer_helper_fuzzer.cc +++ b/api/priority.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * Copyright 2020 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source @@ -8,16 +8,19 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include -#include - -#include "api/array_view.h" -#include "call/rtp_rtcp_demuxer_helper.h" +#ifndef API_PRIORITY_H_ +#define API_PRIORITY_H_ namespace webrtc { -void FuzzOneInput(const uint8_t* data, size_t size) { - ParseRtcpPacketSenderSsrc(rtc::MakeArrayView(data, size)); -} +// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc +enum class Priority { + kVeryLow, + kLow, + kMedium, + kHigh, +}; } // namespace webrtc + +#endif // API_PRIORITY_H_ diff --git a/api/proxy.h b/api/proxy.h index 385992e659..b1ebe31acd 100644 --- a/api/proxy.h +++ b/api/proxy.h @@ -55,6 +55,7 @@ #include #include #include +#include #include #include "api/scoped_refptr.h" @@ -396,6 +397,16 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { return call.Marshal(RTC_FROM_HERE, worker_thread_); \ } +// For use when returning purely const state (set during construction). +// Use with caution. This method should only be used when the return value will +// always be the same. +#define BYPASS_PROXY_CONSTMETHOD0(r, method) \ + r method() const override { \ + static_assert(!std::is_pointer::value, "Type is a pointer"); \ + static_assert(!std::is_reference::value, "Type is a reference"); \ + return c_->method(); \ + } + } // namespace webrtc #endif // API_PROXY_H_ diff --git a/api/rtc_event_log_output_file_unittest.cc b/api/rtc_event_log_output_file_unittest.cc index 071909b2c5..4274215491 100644 --- a/api/rtc_event_log_output_file_unittest.cc +++ b/api/rtc_event_log_output_file_unittest.cc @@ -141,14 +141,16 @@ TEST_F(RtcEventLogOutputFileTest, AllowReasonableFileSizeLimits) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(RtcEventLogOutputFileTest, WritingToInactiveFileForbidden) { +class RtcEventLogOutputFileDeathTest : public RtcEventLogOutputFileTest {}; + +TEST_F(RtcEventLogOutputFileDeathTest, WritingToInactiveFileForbidden) { RtcEventLogOutputFile output_file(output_file_name_, 2); ASSERT_FALSE(output_file.Write("abc")); ASSERT_FALSE(output_file.IsActive()); EXPECT_DEATH(output_file.Write("abc"), ""); } -TEST_F(RtcEventLogOutputFileTest, DisallowUnreasonableFileSizeLimits) { +TEST_F(RtcEventLogOutputFileDeathTest, DisallowUnreasonableFileSizeLimits) { // Keeping in a temporary unique_ptr to make it clearer that the death is // triggered by construction, not destruction. std::unique_ptr output_file; diff --git a/api/rtp_headers.cc b/api/rtp_headers.cc index bf973b6fe5..e0ad9eb26e 100644 --- a/api/rtp_headers.cc +++ b/api/rtp_headers.cc @@ -26,9 +26,7 @@ RTPHeaderExtension::RTPHeaderExtension() videoRotation(kVideoRotation_0), hasVideoContentType(false), videoContentType(VideoContentType::UNSPECIFIED), - has_video_timing(false), - has_frame_marking(false), - frame_marking({false, false, false, false, false, 0xFF, 0, 0}) {} + has_video_timing(false) {} RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) = default; diff --git a/api/rtp_headers.h b/api/rtp_headers.h index 163347f675..454149ca6e 100644 --- a/api/rtp_headers.h +++ b/api/rtp_headers.h @@ -21,10 +21,9 @@ #include "api/units/timestamp.h" #include "api/video/color_space.h" #include "api/video/video_content_type.h" -#include "api/video/video_frame_marking.h" #include "api/video/video_rotation.h" #include "api/video/video_timing.h" -#include "common_types.h" // NOLINT(build/include) +#include "common_types.h" // NOLINT (build/include) namespace webrtc { @@ -143,9 +142,6 @@ struct RTPHeaderExtension { bool has_video_timing; VideoSendTiming video_timing; - bool has_frame_marking; - FrameMarking frame_marking; - PlayoutDelay playout_delay = {-1, -1}; // For identification of a stream when ssrc is not signaled. See diff --git a/api/rtp_parameters.cc b/api/rtp_parameters.cc index a05b2bfa7b..28acb68be6 100644 --- a/api/rtp_parameters.cc +++ b/api/rtp_parameters.cc @@ -18,6 +18,20 @@ namespace webrtc { +const char* DegradationPreferenceToString( + DegradationPreference degradation_preference) { + switch (degradation_preference) { + case DegradationPreference::DISABLED: + return "disabled"; + case DegradationPreference::MAINTAIN_FRAMERATE: + return "maintain-framerate"; + case DegradationPreference::MAINTAIN_RESOLUTION: + return "maintain-resolution"; + case DegradationPreference::BALANCED: + return "balanced"; + } +} + const double kDefaultBitratePriority = 1.0; RtcpFeedback::RtcpFeedback() = default; @@ -105,7 +119,6 @@ constexpr char RtpExtension::kAbsoluteCaptureTimeUri[]; constexpr char RtpExtension::kVideoRotationUri[]; constexpr char RtpExtension::kVideoContentTypeUri[]; constexpr char RtpExtension::kVideoTimingUri[]; -constexpr char RtpExtension::kFrameMarkingUri[]; constexpr char RtpExtension::kGenericFrameDescriptorUri00[]; constexpr char RtpExtension::kDependencyDescriptorUri[]; constexpr char RtpExtension::kTransportSequenceNumberUri[]; @@ -144,7 +157,6 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) { uri == webrtc::RtpExtension::kVideoContentTypeUri || uri == webrtc::RtpExtension::kVideoTimingUri || uri == webrtc::RtpExtension::kMidUri || - uri == webrtc::RtpExtension::kFrameMarkingUri || uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00 || uri == webrtc::RtpExtension::kDependencyDescriptorUri || uri == webrtc::RtpExtension::kColorSpaceUri || diff --git a/api/rtp_parameters.h b/api/rtp_parameters.h index 49c1e0c885..b667bf812c 100644 --- a/api/rtp_parameters.h +++ b/api/rtp_parameters.h @@ -20,6 +20,7 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/media_types.h" +#include "api/priority.h" #include "api/rtp_transceiver_direction.h" #include "rtc_base/system/rtc_export.h" @@ -91,15 +92,10 @@ enum class DegradationPreference { BALANCED, }; -RTC_EXPORT extern const double kDefaultBitratePriority; +RTC_EXPORT const char* DegradationPreferenceToString( + DegradationPreference degradation_preference); -// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc -enum class Priority { - kVeryLow, - kLow, - kMedium, - kHigh, -}; +RTC_EXPORT extern const double kDefaultBitratePriority; struct RTC_EXPORT RtcpFeedback { RtcpFeedbackType type = RtcpFeedbackType::CCM; @@ -226,7 +222,7 @@ struct RTC_EXPORT RtpHeaderExtensionCapability { bool preferred_encrypt = false; // The direction of the extension. The kStopped value is only used with - // RtpTransceiverInterface::header_extensions_offered() and + // RtpTransceiverInterface::HeaderExtensionsToOffer() and // SetOfferedRtpHeaderExtensions(). RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv; @@ -314,10 +310,6 @@ struct RTC_EXPORT RtpExtension { static constexpr char kVideoTimingUri[] = "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"; - // Header extension for video frame marking. - static constexpr char kFrameMarkingUri[] = - "http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07"; - // Experimental codec agnostic frame descriptor. static constexpr char kGenericFrameDescriptorUri00[] = "http://www.webrtc.org/experiments/rtp-hdrext/" @@ -481,6 +473,10 @@ struct RTC_EXPORT RtpEncodingParameters { // Called "encodingId" in ORTC. std::string rid; + // Allow dynamic frame length changes for audio: + // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime + bool adaptive_ptime = false; + bool operator==(const RtpEncodingParameters& o) const { return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority && network_priority == o.network_priority && @@ -489,7 +485,8 @@ struct RTC_EXPORT RtpEncodingParameters { max_framerate == o.max_framerate && num_temporal_layers == o.num_temporal_layers && scale_resolution_down_by == o.scale_resolution_down_by && - active == o.active && rid == o.rid; + active == o.active && rid == o.rid && + adaptive_ptime == o.adaptive_ptime; } bool operator!=(const RtpEncodingParameters& o) const { return !(*this == o); diff --git a/api/rtp_transceiver_interface.cc b/api/rtp_transceiver_interface.cc index d4e2b26e33..e795e51dfb 100644 --- a/api/rtp_transceiver_interface.cc +++ b/api/rtp_transceiver_interface.cc @@ -41,4 +41,10 @@ RtpTransceiverInterface::HeaderExtensionsToOffer() const { return {}; } +webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer) { + return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + } // namespace webrtc diff --git a/api/rtp_transceiver_interface.h b/api/rtp_transceiver_interface.h index 9dbafd46ec..13277d9a50 100644 --- a/api/rtp_transceiver_interface.h +++ b/api/rtp_transceiver_interface.h @@ -133,6 +133,13 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { virtual std::vector HeaderExtensionsToOffer() const; + // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation + // so that it negotiates use of header extensions which are not kStopped. + // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface + virtual webrtc::RTCError SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer); + protected: ~RtpTransceiverInterface() override = default; }; diff --git a/api/stats/rtc_stats.h b/api/stats/rtc_stats.h index d45902e0a5..5de5b7fbb0 100644 --- a/api/stats/rtc_stats.h +++ b/api/stats/rtc_stats.h @@ -319,6 +319,14 @@ class RTCStatsMember : public RTCStatsMemberInterface { std::string ValueToString() const override; std::string ValueToJson() const override; + template + inline T ValueOrDefault(U default_value) const { + if (is_defined()) { + return *(*this); + } + return default_value; + } + // Assignment operators. T& operator=(const T& value) { value_ = value; diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h index 28d841db09..7d8f5f5f9a 100644 --- a/api/stats/rtcstats_objects.h +++ b/api/stats/rtcstats_objects.h @@ -134,7 +134,7 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats { RTCStatsMember label; RTCStatsMember protocol; - RTCStatsMember datachannelid; + RTCStatsMember data_channel_identifier; // TODO(hbos): Support enum types? "RTCStatsMember"? RTCStatsMember state; RTCStatsMember messages_sent; @@ -419,6 +419,18 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { // TODO(hbos): Collect and populate this value for both "audio" and "video", // currently not collected for "video". https://bugs.webrtc.org/7065 RTCStatsMember jitter; + RTCStatsMember jitter_buffer_delay; + RTCStatsMember jitter_buffer_emitted_count; + RTCStatsMember total_samples_received; + RTCStatsMember concealed_samples; + RTCStatsMember silent_concealed_samples; + RTCStatsMember concealment_events; + RTCStatsMember inserted_samples_for_deceleration; + RTCStatsMember removed_samples_for_acceleration; + RTCStatsMember audio_level; + RTCStatsMember total_audio_energy; + RTCStatsMember total_samples_duration; + RTCStatsMember frames_received; // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 RTCStatsMember round_trip_time; // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 @@ -441,8 +453,13 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { RTCStatsMember gap_loss_rate; // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 RTCStatsMember gap_discard_rate; + RTCStatsMember frame_width; + RTCStatsMember frame_height; + RTCStatsMember frame_bit_depth; + RTCStatsMember frames_per_second; RTCStatsMember frames_decoded; RTCStatsMember key_frames_decoded; + RTCStatsMember frames_dropped; RTCStatsMember total_decode_time; RTCStatsMember total_inter_frame_delay; RTCStatsMember total_squared_inter_frame_delay; @@ -602,7 +619,9 @@ class RTC_EXPORT RTCTransportStats final : public RTCStats { ~RTCTransportStats() override; RTCStatsMember bytes_sent; + RTCStatsMember packets_sent; RTCStatsMember bytes_received; + RTCStatsMember packets_received; RTCStatsMember rtcp_transport_stats_id; // TODO(hbos): Support enum types? "RTCStatsMember"? RTCStatsMember dtls_state; diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn index 4c9f591ec1..1072057e3f 100644 --- a/api/task_queue/BUILD.gn +++ b/api/task_queue/BUILD.gn @@ -21,6 +21,8 @@ rtc_library("task_queue") { "../../rtc_base:checks", "../../rtc_base:macromagic", "../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:config", "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/strings", @@ -51,6 +53,8 @@ rtc_library("task_queue_test") { deps = [ "../../../webrtc_overrides:webrtc_component", "../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", ] @@ -62,6 +66,8 @@ rtc_library("task_queue_test") { "../../rtc_base:timeutils", "../../rtc_base/task_utils:to_queued_task", "../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/strings", ] diff --git a/api/task_queue/task_queue_test.cc b/api/task_queue/task_queue_test.cc index a8a799f11b..0d411d2d9c 100644 --- a/api/task_queue/task_queue_test.cc +++ b/api/task_queue/task_queue_test.cc @@ -37,9 +37,11 @@ TEST_P(TaskQueueTest, PostAndCheckCurrent) { rtc::Event event; auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent"); - // We're not running a task, so there shouldn't be a current queue. + // We're not running a task, so |queue| shouldn't be current. + // Note that because rtc::Thread also supports the TQ interface and + // TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that + // means that TaskQueueBase::Current() will still return a valid value. EXPECT_FALSE(queue->IsCurrent()); - EXPECT_FALSE(TaskQueueBase::Current()); queue->PostTask(ToQueuedTask([&event, &queue] { EXPECT_TRUE(queue->IsCurrent()); @@ -269,5 +271,10 @@ TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) { EXPECT_TRUE(done.Wait(1000)); } +// TaskQueueTest is a set of tests for any implementation of the TaskQueueBase. +// Tests are instantiated next to the concrete implementation(s). +// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#creating-value-parameterized-abstract-tests +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TaskQueueTest); + } // namespace } // namespace webrtc diff --git a/api/test/DEPS b/api/test/DEPS index 1a02bf16e9..4f985965fc 100644 --- a/api/test/DEPS +++ b/api/test/DEPS @@ -13,7 +13,6 @@ specific_include_rules = { ], "loopback_media_transport\.h": [ "+rtc_base/async_invoker.h", - "+rtc_base/critical_section.h", "+rtc_base/thread.h", "+rtc_base/thread_checker.h", ], diff --git a/api/test/audio_quality_analyzer_interface.h b/api/test/audio_quality_analyzer_interface.h index 88392d7fd2..c1044795d1 100644 --- a/api/test/audio_quality_analyzer_interface.h +++ b/api/test/audio_quality_analyzer_interface.h @@ -14,7 +14,7 @@ #include #include "api/test/stats_observer_interface.h" -#include "api/test/track_id_stream_label_map.h" +#include "api/test/track_id_stream_info_map.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -31,7 +31,7 @@ class AudioQualityAnalyzerInterface : public StatsObserverInterface { // stream_id matching. The caller is responsible for ensuring the // AnalyzerHelper outlives the instance of the AudioQualityAnalyzerInterface. virtual void Start(std::string test_case_name, - TrackIdStreamLabelMap* analyzer_helper) = 0; + TrackIdStreamInfoMap* analyzer_helper) = 0; // Will be called by the framework at the end of the test. The analyzer // has to finalize all its stats and it should report them. diff --git a/api/test/audioproc_float.cc b/api/test/audioproc_float.cc index bba9c622a1..c8d7ff7193 100644 --- a/api/test/audioproc_float.cc +++ b/api/test/audioproc_float.cc @@ -17,6 +17,12 @@ namespace webrtc { namespace test { +int AudioprocFloat(rtc::scoped_refptr audio_processing, + int argc, + char* argv[]) { + return AudioprocFloatImpl(std::move(audio_processing), argc, argv); +} + int AudioprocFloat(std::unique_ptr ap_builder, int argc, char* argv[]) { diff --git a/api/test/audioproc_float.h b/api/test/audioproc_float.h index 2625e6ad9a..fec2ad11fa 100644 --- a/api/test/audioproc_float.h +++ b/api/test/audioproc_float.h @@ -19,6 +19,22 @@ namespace webrtc { namespace test { +// This is an interface for the audio processing simulation utility. This +// utility can be used to simulate the audioprocessing module using a recording +// (either an AEC dump or wav files), and generate the output as a wav file. +// Any audio_processing object specified in the input is used for the +// simulation. The optional |audio_processing| object provides the +// AudioProcessing instance that is used during the simulation. Note that when +// the audio_processing object is specified all functionality that relies on +// using the AudioProcessingBuilder is deactivated, since the AudioProcessing +// object is already created and the builder is not used in the simulation. It +// is needed to pass the command line flags as |argc| and |argv|, so these can +// be interpreted properly by the utility. To see a list of all supported +// command line flags, run the executable with the '--help' flag. +int AudioprocFloat(rtc::scoped_refptr audio_processing, + int argc, + char* argv[]); + // This is an interface for the audio processing simulation utility. This // utility can be used to simulate the audioprocessing module using a recording // (either an AEC dump or wav files), and generate the output as a wav file. diff --git a/api/test/compile_all_headers.cc b/api/test/compile_all_headers.cc index 47c5c6ec84..4cece5b286 100644 --- a/api/test/compile_all_headers.cc +++ b/api/test/compile_all_headers.cc @@ -27,16 +27,17 @@ // "api/test/videocodec_test_fixture.h" // "api/test/videocodec_test_stats.h" +#include "api/test/dummy_peer_connection.h" #include "api/test/fake_frame_decryptor.h" #include "api/test/fake_frame_encryptor.h" -#include "api/test/fake_media_transport.h" -#include "api/test/loopback_media_transport.h" #include "api/test/mock_audio_mixer.h" #include "api/test/mock_frame_decryptor.h" #include "api/test/mock_frame_encryptor.h" +#include "api/test/mock_peer_connection_factory_interface.h" #include "api/test/mock_peerconnectioninterface.h" #include "api/test/mock_rtpreceiver.h" #include "api/test/mock_rtpsender.h" +#include "api/test/mock_transformable_video_frame.h" #include "api/test/mock_video_bitrate_allocator.h" #include "api/test/mock_video_bitrate_allocator_factory.h" #include "api/test/mock_video_decoder.h" diff --git a/api/test/create_network_emulation_manager.h b/api/test/create_network_emulation_manager.h index c57c34874c..f444743786 100644 --- a/api/test/create_network_emulation_manager.h +++ b/api/test/create_network_emulation_manager.h @@ -1,4 +1,3 @@ - /* * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. * @@ -18,6 +17,7 @@ namespace webrtc { +// Returns a non-null NetworkEmulationManager instance. std::unique_ptr CreateNetworkEmulationManager( TimeMode mode = TimeMode::kRealTime); diff --git a/api/test/create_peerconnection_quality_test_fixture.cc b/api/test/create_peerconnection_quality_test_fixture.cc index 1e027bf31a..2d9d0821fc 100644 --- a/api/test/create_peerconnection_quality_test_fixture.cc +++ b/api/test/create_peerconnection_quality_test_fixture.cc @@ -13,6 +13,7 @@ #include #include +#include "api/test/time_controller.h" #include "test/pc/e2e/peer_connection_quality_test.h" namespace webrtc { @@ -21,11 +22,12 @@ namespace webrtc_pc_e2e { std::unique_ptr CreatePeerConnectionE2EQualityTestFixture( std::string test_case_name, + TimeController& time_controller, std::unique_ptr audio_quality_analyzer, std::unique_ptr video_quality_analyzer) { return std::make_unique( - std::move(test_case_name), std::move(audio_quality_analyzer), - std::move(video_quality_analyzer)); + std::move(test_case_name), time_controller, + std::move(audio_quality_analyzer), std::move(video_quality_analyzer)); } } // namespace webrtc_pc_e2e diff --git a/api/test/create_peerconnection_quality_test_fixture.h b/api/test/create_peerconnection_quality_test_fixture.h index 330d86de02..95b9ced5d2 100644 --- a/api/test/create_peerconnection_quality_test_fixture.h +++ b/api/test/create_peerconnection_quality_test_fixture.h @@ -15,19 +15,25 @@ #include "api/test/audio_quality_analyzer_interface.h" #include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/time_controller.h" #include "api/test/video_quality_analyzer_interface.h" namespace webrtc { namespace webrtc_pc_e2e { // API is in development. Can be changed/removed without notice. + // Create test fixture to establish test call between Alice and Bob. // During the test Alice will be caller and Bob will answer the call. // |test_case_name| is a name of test case, that will be used for all metrics // reporting. +// |time_controller| is used to manage all rtc::Thread's and TaskQueue +// instances. Instance of |time_controller| have to outlive created fixture. +// Returns a non-null PeerConnectionE2EQualityTestFixture instance. std::unique_ptr CreatePeerConnectionE2EQualityTestFixture( std::string test_case_name, + TimeController& time_controller, std::unique_ptr audio_quality_analyzer, std::unique_ptr video_quality_analyzer); diff --git a/api/test/create_time_controller.cc b/api/test/create_time_controller.cc index d3b046bd61..a2c0cb713f 100644 --- a/api/test/create_time_controller.cc +++ b/api/test/create_time_controller.cc @@ -35,13 +35,18 @@ std::unique_ptr CreateTimeControllerBasedCallFactory( explicit TimeControllerBasedCallFactory(TimeController* time_controller) : time_controller_(time_controller) {} Call* CreateCall(const Call::Config& config) override { - return Call::Create(config, time_controller_->GetClock(), - time_controller_->CreateProcessThread("CallModules"), + if (!module_thread_) { + module_thread_ = SharedModuleThread::Create( + time_controller_->CreateProcessThread("CallModules"), + [this]() { module_thread_ = nullptr; }); + } + return Call::Create(config, time_controller_->GetClock(), module_thread_, time_controller_->CreateProcessThread("Pacer")); } private: TimeController* time_controller_; + rtc::scoped_refptr module_thread_; }; return std::make_unique(time_controller); } diff --git a/api/test/dummy_peer_connection.h b/api/test/dummy_peer_connection.h index 102b0684c0..97a97d0c81 100644 --- a/api/test/dummy_peer_connection.h +++ b/api/test/dummy_peer_connection.h @@ -194,10 +194,6 @@ class DummyPeerConnection : public PeerConnectionInterface { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - RTCError SetBitrate(const BitrateParameters& bitrate_parameters) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); - } - void SetAudioPlayout(bool playout) override { FATAL() << "Not implemented"; } void SetAudioRecording(bool recording) override { FATAL() << "Not implemented"; diff --git a/api/test/fake_datagram_transport.h b/api/test/fake_datagram_transport.h deleted file mode 100644 index 847b4d842a..0000000000 --- a/api/test/fake_datagram_transport.h +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_FAKE_DATAGRAM_TRANSPORT_H_ -#define API_TEST_FAKE_DATAGRAM_TRANSPORT_H_ - -#include -#include - -#include "api/transport/datagram_transport_interface.h" -#include "api/transport/media/media_transport_interface.h" - -namespace webrtc { - -// Maxmum size of datagrams sent by |FakeDatagramTransport|. -constexpr size_t kMaxFakeDatagramSize = 1000; - -// Fake datagram transport. Does not support making an actual connection -// or sending data. Only used for tests that need to stub out a transport. -class FakeDatagramTransport : public DatagramTransportInterface { - public: - FakeDatagramTransport( - const MediaTransportSettings& settings, - std::string transport_parameters, - const std::function& - are_parameters_compatible) - : settings_(settings), - transport_parameters_(transport_parameters), - are_parameters_compatible_(are_parameters_compatible) {} - - ~FakeDatagramTransport() override { RTC_DCHECK(!state_callback_); } - - void Connect(rtc::PacketTransportInternal* packet_transport) override { - packet_transport_ = packet_transport; - } - - CongestionControlInterface* congestion_control() override { - return nullptr; // Datagram interface doesn't provide this yet. - } - - void SetTransportStateCallback( - MediaTransportStateCallback* callback) override { - state_callback_ = callback; - } - - RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) override { - return RTCError::OK(); - } - - size_t GetLargestDatagramSize() const override { - return kMaxFakeDatagramSize; - } - - void SetDatagramSink(DatagramSinkInterface* sink) override {} - - std::string GetTransportParameters() const override { - if (settings_.remote_transport_parameters) { - return *settings_.remote_transport_parameters; - } - return transport_parameters_; - } - - RTCError SetRemoteTransportParameters( - absl::string_view remote_parameters) override { - if (are_parameters_compatible_(GetTransportParameters(), - remote_parameters)) { - return RTCError::OK(); - } - return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, - "Incompatible remote transport parameters"); - } - - RTCError OpenChannel(int channel_id) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCError CloseChannel(int channel_id) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - void SetDataSink(DataChannelSink* /*sink*/) override {} - - bool IsReadyToSend() const override { return false; } - - rtc::PacketTransportInternal* packet_transport() { return packet_transport_; } - - void set_state(webrtc::MediaTransportState state) { - if (state_callback_) { - state_callback_->OnStateChanged(state); - } - } - - const MediaTransportSettings& settings() { return settings_; } - - private: - const MediaTransportSettings settings_; - const std::string transport_parameters_; - const std::function - are_parameters_compatible_; - - rtc::PacketTransportInternal* packet_transport_ = nullptr; - MediaTransportStateCallback* state_callback_ = nullptr; -}; - -} // namespace webrtc - -#endif // API_TEST_FAKE_DATAGRAM_TRANSPORT_H_ diff --git a/api/test/fake_media_transport.h b/api/test/fake_media_transport.h deleted file mode 100644 index 530394710a..0000000000 --- a/api/test/fake_media_transport.h +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_FAKE_MEDIA_TRANSPORT_H_ -#define API_TEST_FAKE_MEDIA_TRANSPORT_H_ - -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "api/test/fake_datagram_transport.h" -#include "api/transport/media/media_transport_interface.h" - -namespace webrtc { - -// Fake media transport factory creates fake media transport. -// Also creates fake datagram transport, since both media and datagram -// transports are created by |MediaTransportFactory|. -class FakeMediaTransportFactory : public MediaTransportFactory { - public: - explicit FakeMediaTransportFactory( - const absl::optional& transport_offer = "") - : transport_offer_(transport_offer) {} - ~FakeMediaTransportFactory() = default; - - std::string GetTransportName() const override { return "fake"; } - - RTCErrorOr> CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCErrorOr> CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); - } - - RTCErrorOr> - CreateDatagramTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings) override { - return std::unique_ptr( - new FakeDatagramTransport(settings, transport_offer_.value_or(""), - transport_parameters_comparison_)); - } - - void set_transport_parameters_comparison( - std::function comparison) { - transport_parameters_comparison_ = std::move(comparison); - } - - private: - const absl::optional transport_offer_; - std::function - transport_parameters_comparison_ = - [](absl::string_view local, absl::string_view remote) { - return local == remote; - }; -}; - -} // namespace webrtc - -#endif // API_TEST_FAKE_MEDIA_TRANSPORT_H_ diff --git a/api/test/loopback_media_transport.cc b/api/test/loopback_media_transport.cc deleted file mode 100644 index 18ce93cd7e..0000000000 --- a/api/test/loopback_media_transport.cc +++ /dev/null @@ -1,373 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/loopback_media_transport.h" - -#include - -#include "absl/algorithm/container.h" -#include "rtc_base/time_utils.h" - -namespace webrtc { - -namespace { - -constexpr size_t kLoopbackMaxDatagramSize = 1200; - -class WrapperDatagramTransport : public DatagramTransportInterface { - public: - explicit WrapperDatagramTransport(DatagramTransportInterface* wrapped) - : wrapped_(wrapped) {} - - // Datagram transport overrides. - void Connect(rtc::PacketTransportInternal* packet_transport) override { - return wrapped_->Connect(packet_transport); - } - - CongestionControlInterface* congestion_control() override { - return wrapped_->congestion_control(); - } - - void SetTransportStateCallback( - MediaTransportStateCallback* callback) override { - return wrapped_->SetTransportStateCallback(callback); - } - - RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) override { - return wrapped_->SendDatagram(data, datagram_id); - } - - size_t GetLargestDatagramSize() const override { - return wrapped_->GetLargestDatagramSize(); - } - - void SetDatagramSink(DatagramSinkInterface* sink) override { - return wrapped_->SetDatagramSink(sink); - } - - std::string GetTransportParameters() const override { - return wrapped_->GetTransportParameters(); - } - - RTCError SetRemoteTransportParameters(absl::string_view parameters) override { - return wrapped_->SetRemoteTransportParameters(parameters); - } - - // Data channel overrides. - RTCError OpenChannel(int channel_id) override { - return wrapped_->OpenChannel(channel_id); - } - - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override { - return wrapped_->SendData(channel_id, params, buffer); - } - - RTCError CloseChannel(int channel_id) override { - return wrapped_->CloseChannel(channel_id); - } - - void SetDataSink(DataChannelSink* sink) override { - wrapped_->SetDataSink(sink); - } - - bool IsReadyToSend() const override { return wrapped_->IsReadyToSend(); } - - private: - DatagramTransportInterface* wrapped_; -}; - -} // namespace - -WrapperMediaTransportFactory::WrapperMediaTransportFactory( - DatagramTransportInterface* wrapped_datagram_transport) - : wrapped_datagram_transport_(wrapped_datagram_transport) {} - -WrapperMediaTransportFactory::WrapperMediaTransportFactory( - MediaTransportFactory* wrapped) - : wrapped_factory_(wrapped) {} - -RTCErrorOr> -WrapperMediaTransportFactory::CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); -} - -RTCErrorOr> -WrapperMediaTransportFactory::CreateDatagramTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - created_transport_count_++; - if (wrapped_factory_) { - return wrapped_factory_->CreateDatagramTransport(network_thread, settings); - } - return { - std::make_unique(wrapped_datagram_transport_)}; -} - -std::string WrapperMediaTransportFactory::GetTransportName() const { - if (wrapped_factory_) { - return wrapped_factory_->GetTransportName(); - } - return "wrapped-transport"; -} - -int WrapperMediaTransportFactory::created_transport_count() const { - return created_transport_count_; -} - -RTCErrorOr> -WrapperMediaTransportFactory::CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return RTCError(RTCErrorType::UNSUPPORTED_OPERATION); -} - -MediaTransportPair::MediaTransportPair(rtc::Thread* thread) - : first_datagram_transport_(thread), - second_datagram_transport_(thread), - first_factory_(&first_datagram_transport_), - second_factory_(&second_datagram_transport_) { - first_datagram_transport_.Connect(&second_datagram_transport_); - second_datagram_transport_.Connect(&first_datagram_transport_); -} - -MediaTransportPair::~MediaTransportPair() = default; - -MediaTransportPair::LoopbackDataChannelTransport::LoopbackDataChannelTransport( - rtc::Thread* thread) - : thread_(thread) {} - -MediaTransportPair::LoopbackDataChannelTransport:: - ~LoopbackDataChannelTransport() { - RTC_CHECK(data_sink_ == nullptr); -} - -void MediaTransportPair::LoopbackDataChannelTransport::Connect( - LoopbackDataChannelTransport* other) { - other_ = other; -} - -RTCError MediaTransportPair::LoopbackDataChannelTransport::OpenChannel( - int channel_id) { - // No-op. No need to open channels for the loopback. - return RTCError::OK(); -} - -RTCError MediaTransportPair::LoopbackDataChannelTransport::SendData( - int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, - [this, channel_id, params, buffer] { - other_->OnData(channel_id, params.type, buffer); - }); - return RTCError::OK(); -} - -RTCError MediaTransportPair::LoopbackDataChannelTransport::CloseChannel( - int channel_id) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, [this, channel_id] { - other_->OnRemoteCloseChannel(channel_id); - rtc::CritScope lock(&sink_lock_); - if (data_sink_) { - data_sink_->OnChannelClosed(channel_id); - } - }); - return RTCError::OK(); -} - -void MediaTransportPair::LoopbackDataChannelTransport::SetDataSink( - DataChannelSink* sink) { - rtc::CritScope lock(&sink_lock_); - data_sink_ = sink; - if (data_sink_ && ready_to_send_) { - data_sink_->OnReadyToSend(); - } -} - -bool MediaTransportPair::LoopbackDataChannelTransport::IsReadyToSend() const { - rtc::CritScope lock(&sink_lock_); - return ready_to_send_; -} - -void MediaTransportPair::LoopbackDataChannelTransport::FlushAsyncInvokes() { - invoker_.Flush(thread_); -} - -void MediaTransportPair::LoopbackDataChannelTransport::OnData( - int channel_id, - DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer) { - rtc::CritScope lock(&sink_lock_); - if (data_sink_) { - data_sink_->OnDataReceived(channel_id, type, buffer); - } -} - -void MediaTransportPair::LoopbackDataChannelTransport::OnRemoteCloseChannel( - int channel_id) { - rtc::CritScope lock(&sink_lock_); - if (data_sink_) { - data_sink_->OnChannelClosing(channel_id); - data_sink_->OnChannelClosed(channel_id); - } -} - -void MediaTransportPair::LoopbackDataChannelTransport::OnReadyToSend( - bool ready_to_send) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, [this, ready_to_send] { - rtc::CritScope lock(&sink_lock_); - ready_to_send_ = ready_to_send; - // Propagate state to data channel sink, if present. - if (data_sink_ && ready_to_send_) { - data_sink_->OnReadyToSend(); - } - }); -} - -MediaTransportPair::LoopbackDatagramTransport::LoopbackDatagramTransport( - rtc::Thread* thread) - : thread_(thread), dc_transport_(thread) {} - -void MediaTransportPair::LoopbackDatagramTransport::Connect( - LoopbackDatagramTransport* other) { - other_ = other; - dc_transport_.Connect(&other->dc_transport_); -} - -void MediaTransportPair::LoopbackDatagramTransport::Connect( - rtc::PacketTransportInternal* packet_transport) { - if (state_after_connect_) { - SetState(*state_after_connect_); - } -} - -CongestionControlInterface* -MediaTransportPair::LoopbackDatagramTransport::congestion_control() { - return nullptr; -} - -void MediaTransportPair::LoopbackDatagramTransport::SetTransportStateCallback( - MediaTransportStateCallback* callback) { - RTC_DCHECK_RUN_ON(thread_); - state_callback_ = callback; - if (state_callback_) { - state_callback_->OnStateChanged(state_); - } -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::SendDatagram( - rtc::ArrayView data, - DatagramId datagram_id) { - rtc::CopyOnWriteBuffer buffer; - buffer.SetData(data.data(), data.size()); - invoker_.AsyncInvoke( - RTC_FROM_HERE, thread_, [this, datagram_id, buffer = std::move(buffer)] { - RTC_DCHECK_RUN_ON(thread_); - other_->DeliverDatagram(std::move(buffer)); - if (sink_) { - DatagramAck ack; - ack.datagram_id = datagram_id; - ack.receive_timestamp = Timestamp::Micros(rtc::TimeMicros()); - sink_->OnDatagramAcked(ack); - } - }); - return RTCError::OK(); -} - -size_t MediaTransportPair::LoopbackDatagramTransport::GetLargestDatagramSize() - const { - return kLoopbackMaxDatagramSize; -} - -void MediaTransportPair::LoopbackDatagramTransport::SetDatagramSink( - DatagramSinkInterface* sink) { - RTC_DCHECK_RUN_ON(thread_); - sink_ = sink; -} - -std::string -MediaTransportPair::LoopbackDatagramTransport::GetTransportParameters() const { - return transport_parameters_; -} - -RTCError -MediaTransportPair::LoopbackDatagramTransport::SetRemoteTransportParameters( - absl::string_view remote_parameters) { - RTC_DCHECK_RUN_ON(thread_); - if (transport_parameters_comparison_(GetTransportParameters(), - remote_parameters)) { - return RTCError::OK(); - } - return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, - "Incompatible remote transport parameters"); -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::OpenChannel( - int channel_id) { - return dc_transport_.OpenChannel(channel_id); -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::SendData( - int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) { - return dc_transport_.SendData(channel_id, params, buffer); -} - -RTCError MediaTransportPair::LoopbackDatagramTransport::CloseChannel( - int channel_id) { - return dc_transport_.CloseChannel(channel_id); -} - -void MediaTransportPair::LoopbackDatagramTransport::SetDataSink( - DataChannelSink* sink) { - dc_transport_.SetDataSink(sink); -} - -bool MediaTransportPair::LoopbackDatagramTransport::IsReadyToSend() const { - return dc_transport_.IsReadyToSend(); -} - -void MediaTransportPair::LoopbackDatagramTransport::SetState( - MediaTransportState state) { - invoker_.AsyncInvoke(RTC_FROM_HERE, thread_, [this, state] { - RTC_DCHECK_RUN_ON(thread_); - state_ = state; - if (state_callback_) { - state_callback_->OnStateChanged(state_); - } - }); - dc_transport_.OnReadyToSend(state == MediaTransportState::kWritable); -} - -void MediaTransportPair::LoopbackDatagramTransport::SetStateAfterConnect( - MediaTransportState state) { - state_after_connect_ = state; -} - -void MediaTransportPair::LoopbackDatagramTransport::FlushAsyncInvokes() { - dc_transport_.FlushAsyncInvokes(); -} - -void MediaTransportPair::LoopbackDatagramTransport::DeliverDatagram( - rtc::CopyOnWriteBuffer buffer) { - RTC_DCHECK_RUN_ON(thread_); - if (sink_) { - sink_->OnDatagramReceived(buffer); - } -} - -} // namespace webrtc diff --git a/api/test/loopback_media_transport.h b/api/test/loopback_media_transport.h deleted file mode 100644 index 468965ba31..0000000000 --- a/api/test/loopback_media_transport.h +++ /dev/null @@ -1,269 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_ -#define API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_ - -#include -#include -#include -#include - -#include "api/transport/datagram_transport_interface.h" -#include "api/transport/media/media_transport_interface.h" -#include "rtc_base/async_invoker.h" -#include "rtc_base/critical_section.h" -#include "rtc_base/thread.h" -#include "rtc_base/thread_checker.h" - -namespace webrtc { - -// Wrapper used to hand out unique_ptrs to loopback media -// transport without ownership changes to the underlying -// transport. -// It works in two modes: -// It can either wrap a factory, or it can wrap an existing interface. -// In the former mode, it delegates the work to the wrapped factory. -// In the latter mode, it always returns static instance of the transport -// interface. -// -// Example use: -// Factory wrap_static_interface = Wrapper(media_transport_interface); -// Factory wrap_factory = Wrapper(wrap_static_interface); -// The second factory may be created multiple times, and ownership may be passed -// to the client. The first factory counts the number of invocations of -// CreateMediaTransport(); -class WrapperMediaTransportFactory : public MediaTransportFactory { - public: - explicit WrapperMediaTransportFactory( - DatagramTransportInterface* wrapped_datagram_transport); - explicit WrapperMediaTransportFactory(MediaTransportFactory* wrapped); - - RTCErrorOr> CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override; - - RTCErrorOr> CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) override; - - RTCErrorOr> - CreateDatagramTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings) override; - - std::string GetTransportName() const override; - - int created_transport_count() const; - - private: - DatagramTransportInterface* wrapped_datagram_transport_ = nullptr; - MediaTransportFactory* wrapped_factory_ = nullptr; - int created_transport_count_ = 0; -}; - -// Contains two MediaTransportsInterfaces that are connected to each other. -// Currently supports audio only. -class MediaTransportPair { - public: - struct Stats { - int sent_audio_frames = 0; - int received_audio_frames = 0; - int sent_video_frames = 0; - int received_video_frames = 0; - }; - - explicit MediaTransportPair(rtc::Thread* thread); - ~MediaTransportPair(); - - DatagramTransportInterface* first_datagram_transport() { - return &first_datagram_transport_; - } - DatagramTransportInterface* second_datagram_transport() { - return &second_datagram_transport_; - } - - std::unique_ptr first_factory() { - return std::make_unique(&first_factory_); - } - - std::unique_ptr second_factory() { - return std::make_unique(&second_factory_); - } - - void SetState(MediaTransportState state) { - first_datagram_transport_.SetState(state); - second_datagram_transport_.SetState(state); - } - - void SetFirstState(MediaTransportState state) { - first_datagram_transport_.SetState(state); - } - - void SetSecondStateAfterConnect(MediaTransportState state) { - second_datagram_transport_.SetState(state); - } - - void SetFirstDatagramTransportParameters(const std::string& params) { - first_datagram_transport_.set_transport_parameters(params); - } - - void SetSecondDatagramTransportParameters(const std::string& params) { - second_datagram_transport_.set_transport_parameters(params); - } - - void SetFirstDatagramTransportParametersComparison( - std::function comparison) { - first_datagram_transport_.set_transport_parameters_comparison( - std::move(comparison)); - } - - void SetSecondDatagramTransportParametersComparison( - std::function comparison) { - second_datagram_transport_.set_transport_parameters_comparison( - std::move(comparison)); - } - - void FlushAsyncInvokes() { - first_datagram_transport_.FlushAsyncInvokes(); - second_datagram_transport_.FlushAsyncInvokes(); - } - - int first_factory_transport_count() const { - return first_factory_.created_transport_count(); - } - - int second_factory_transport_count() const { - return second_factory_.created_transport_count(); - } - - private: - class LoopbackDataChannelTransport : public DataChannelTransportInterface { - public: - explicit LoopbackDataChannelTransport(rtc::Thread* thread); - ~LoopbackDataChannelTransport() override; - - void Connect(LoopbackDataChannelTransport* other); - - RTCError OpenChannel(int channel_id) override; - - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override; - - RTCError CloseChannel(int channel_id) override; - - bool IsReadyToSend() const override; - - void SetDataSink(DataChannelSink* sink) override; - - void OnReadyToSend(bool ready_to_send); - - void FlushAsyncInvokes(); - - private: - void OnData(int channel_id, - DataMessageType type, - const rtc::CopyOnWriteBuffer& buffer); - - void OnRemoteCloseChannel(int channel_id); - - rtc::Thread* const thread_; - rtc::CriticalSection sink_lock_; - DataChannelSink* data_sink_ RTC_GUARDED_BY(sink_lock_) = nullptr; - - bool ready_to_send_ RTC_GUARDED_BY(sink_lock_) = false; - - LoopbackDataChannelTransport* other_; - - rtc::AsyncInvoker invoker_; - }; - - class LoopbackDatagramTransport : public DatagramTransportInterface { - public: - explicit LoopbackDatagramTransport(rtc::Thread* thread); - - void Connect(LoopbackDatagramTransport* other); - - // Datagram transport overrides. - void Connect(rtc::PacketTransportInternal* packet_transport) override; - CongestionControlInterface* congestion_control() override; - void SetTransportStateCallback( - MediaTransportStateCallback* callback) override; - RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) override; - size_t GetLargestDatagramSize() const override; - void SetDatagramSink(DatagramSinkInterface* sink) override; - std::string GetTransportParameters() const override; - RTCError SetRemoteTransportParameters( - absl::string_view remote_parameters) override; - - // Data channel overrides. - RTCError OpenChannel(int channel_id) override; - RTCError SendData(int channel_id, - const SendDataParams& params, - const rtc::CopyOnWriteBuffer& buffer) override; - RTCError CloseChannel(int channel_id) override; - void SetDataSink(DataChannelSink* sink) override; - bool IsReadyToSend() const override; - - // Loopback-specific functionality. - void SetState(MediaTransportState state); - - // When Connect() is called, the datagram transport will enter this state. - // This is useful for mimicking zero-RTT connectivity, for example. - void SetStateAfterConnect(MediaTransportState state); - void FlushAsyncInvokes(); - - void set_transport_parameters(const std::string& value) { - transport_parameters_ = value; - } - - void set_transport_parameters_comparison( - std::function comparison) { - thread_->Invoke( - RTC_FROM_HERE, [this, comparison = std::move(comparison)] { - RTC_DCHECK_RUN_ON(thread_); - transport_parameters_comparison_ = std::move(comparison); - }); - } - - private: - void DeliverDatagram(rtc::CopyOnWriteBuffer buffer); - - rtc::Thread* thread_; - LoopbackDataChannelTransport dc_transport_; - - MediaTransportState state_ RTC_GUARDED_BY(thread_) = - MediaTransportState::kPending; - DatagramSinkInterface* sink_ RTC_GUARDED_BY(thread_) = nullptr; - MediaTransportStateCallback* state_callback_ RTC_GUARDED_BY(thread_) = - nullptr; - LoopbackDatagramTransport* other_; - - std::string transport_parameters_; - std::function - transport_parameters_comparison_ RTC_GUARDED_BY(thread_) = - [](absl::string_view a, absl::string_view b) { return a == b; }; - - absl::optional state_after_connect_; - - rtc::AsyncInvoker invoker_; - }; - - LoopbackDatagramTransport first_datagram_transport_; - LoopbackDatagramTransport second_datagram_transport_; - WrapperMediaTransportFactory first_factory_; - WrapperMediaTransportFactory second_factory_; -}; - -} // namespace webrtc - -#endif // API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_ diff --git a/api/test/loopback_media_transport_unittest.cc b/api/test/loopback_media_transport_unittest.cc deleted file mode 100644 index f036de3eae..0000000000 --- a/api/test/loopback_media_transport_unittest.cc +++ /dev/null @@ -1,201 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/test/loopback_media_transport.h" - -#include -#include -#include - -#include "test/gmock.h" - -namespace webrtc { - -namespace { - -class MockMediaTransportAudioSinkInterface - : public MediaTransportAudioSinkInterface { - public: - MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedAudioFrame)); -}; - -class MockMediaTransportVideoSinkInterface - : public MediaTransportVideoSinkInterface { - public: - MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedVideoFrame)); -}; - -class MockMediaTransportKeyFrameRequestCallback - : public MediaTransportKeyFrameRequestCallback { - public: - MOCK_METHOD1(OnKeyFrameRequested, void(uint64_t)); -}; - -class MockDataChannelSink : public DataChannelSink { - public: - MOCK_METHOD3(OnDataReceived, - void(int, DataMessageType, const rtc::CopyOnWriteBuffer&)); - MOCK_METHOD1(OnChannelClosing, void(int)); - MOCK_METHOD1(OnChannelClosed, void(int)); - MOCK_METHOD0(OnReadyToSend, void()); -}; - -class MockStateCallback : public MediaTransportStateCallback { - public: - MOCK_METHOD1(OnStateChanged, void(MediaTransportState)); -}; - -} // namespace - -TEST(LoopbackMediaTransport, DataDeliveredToSink) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink sink; - transport_pair.first_datagram_transport()->SetDataSink(&sink); - - const int channel_id = 1; - EXPECT_CALL( - sink, OnDataReceived( - channel_id, DataMessageType::kText, - ::testing::Property( - &rtc::CopyOnWriteBuffer::cdata, ::testing::StrEq("foo")))); - - SendDataParams params; - params.type = DataMessageType::kText; - rtc::CopyOnWriteBuffer buffer("foo"); - transport_pair.second_datagram_transport()->SendData(channel_id, params, - buffer); - - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, CloseDeliveredToSink) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink first_sink; - transport_pair.first_datagram_transport()->SetDataSink(&first_sink); - - MockDataChannelSink second_sink; - transport_pair.second_datagram_transport()->SetDataSink(&second_sink); - - const int channel_id = 1; - { - ::testing::InSequence s; - EXPECT_CALL(second_sink, OnChannelClosing(channel_id)); - EXPECT_CALL(second_sink, OnChannelClosed(channel_id)); - EXPECT_CALL(first_sink, OnChannelClosed(channel_id)); - } - - transport_pair.first_datagram_transport()->CloseChannel(channel_id); - - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); - transport_pair.second_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, InitialStateDeliveredWhenCallbackSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockStateCallback state_callback; - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending)); - - thread->Invoke(RTC_FROM_HERE, [&transport_pair, &state_callback] { - transport_pair.first_datagram_transport()->SetTransportStateCallback( - &state_callback); - }); - transport_pair.FlushAsyncInvokes(); -} - -TEST(LoopbackMediaTransport, ChangedStateDeliveredWhenCallbackSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); - - MockStateCallback state_callback; - - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable)); - thread->Invoke(RTC_FROM_HERE, [&transport_pair, &state_callback] { - transport_pair.first_datagram_transport()->SetTransportStateCallback( - &state_callback); - }); - transport_pair.FlushAsyncInvokes(); -} - -TEST(LoopbackMediaTransport, StateChangeDeliveredToCallback) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockStateCallback state_callback; - - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending)); - EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable)); - thread->Invoke(RTC_FROM_HERE, [&transport_pair, &state_callback] { - transport_pair.first_datagram_transport()->SetTransportStateCallback( - &state_callback); - }); - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); -} - -TEST(LoopbackMediaTransport, NotReadyToSendWhenDataSinkSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink data_channel_sink; - EXPECT_CALL(data_channel_sink, OnReadyToSend()).Times(0); - - transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink); - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, ReadyToSendWhenDataSinkSet) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); - - MockDataChannelSink data_channel_sink; - EXPECT_CALL(data_channel_sink, OnReadyToSend()); - - transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink); - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -TEST(LoopbackMediaTransport, StateChangeDeliveredToDataSink) { - std::unique_ptr thread = rtc::Thread::Create(); - thread->Start(); - MediaTransportPair transport_pair(thread.get()); - - MockDataChannelSink data_channel_sink; - EXPECT_CALL(data_channel_sink, OnReadyToSend()); - - transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink); - transport_pair.SetState(MediaTransportState::kWritable); - transport_pair.FlushAsyncInvokes(); - transport_pair.first_datagram_transport()->SetDataSink(nullptr); -} - -} // namespace webrtc diff --git a/api/test/mock_audio_mixer.h b/api/test/mock_audio_mixer.h index aee717bebf..88dc108ca3 100644 --- a/api/test/mock_audio_mixer.h +++ b/api/test/mock_audio_mixer.h @@ -19,12 +19,9 @@ namespace test { class MockAudioMixer : public AudioMixer { public: - MOCK_METHOD(bool, AddSource, (Source * audio_source), (override)); - MOCK_METHOD(void, RemoveSource, (Source * audio_source), (override)); - MOCK_METHOD(void, - Mix, - (size_t number_of_channels, AudioFrame* audio_frame_for_mixing), - (override)); + MOCK_METHOD(bool, AddSource, (Source*), (override)); + MOCK_METHOD(void, RemoveSource, (Source*), (override)); + MOCK_METHOD(void, Mix, (size_t number_of_channels, AudioFrame*), (override)); }; } // namespace test } // namespace webrtc diff --git a/api/test/mock_peer_connection_factory_interface.h b/api/test/mock_peer_connection_factory_interface.h new file mode 100644 index 0000000000..19c3f4063e --- /dev/null +++ b/api/test/mock_peer_connection_factory_interface.h @@ -0,0 +1,75 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ +#define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ + +#include +#include + +#include "api/peer_connection_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockPeerConnectionFactoryInterface final + : public rtc::RefCountedObject { + public: + rtc::scoped_refptr Create() { + return new MockPeerConnectionFactoryInterface(); + } + + MOCK_METHOD(void, SetOptions, (const Options&), (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreatePeerConnection, + (const PeerConnectionInterface::RTCConfiguration&, + PeerConnectionDependencies), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreatePeerConnection, + (const PeerConnectionInterface::RTCConfiguration&, + std::unique_ptr, + std::unique_ptr, + PeerConnectionObserver*), + (override)); + MOCK_METHOD(RtpCapabilities, + GetRtpSenderCapabilities, + (cricket::MediaType), + (const override)); + MOCK_METHOD(RtpCapabilities, + GetRtpReceiverCapabilities, + (cricket::MediaType), + (const override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateLocalMediaStream, + (const std::string&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateAudioSource, + (const cricket::AudioOptions&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateVideoTrack, + (const std::string&, VideoTrackSourceInterface*), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateAudioTrack, + (const std::string&, AudioSourceInterface*), + (override)); + MOCK_METHOD(bool, StartAecDump, (FILE*, int64_t), (override)); + MOCK_METHOD(void, StopAecDump, (), (override)); + + protected: + MockPeerConnectionFactoryInterface() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ diff --git a/api/test/mock_peerconnectioninterface.h b/api/test/mock_peerconnectioninterface.h index 6b247b7cee..be34df0b32 100644 --- a/api/test/mock_peerconnectioninterface.h +++ b/api/test/mock_peerconnectioninterface.h @@ -167,7 +167,6 @@ class MockPeerConnectionInterface (const std::vector&), (override)); MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override)); - MOCK_METHOD(RTCError, SetBitrate, (const BitrateParameters&), (override)); MOCK_METHOD(void, SetAudioPlayout, (bool), (override)); MOCK_METHOD(void, SetAudioRecording, (bool), (override)); MOCK_METHOD(rtc::scoped_refptr, diff --git a/api/test/mock_transformable_video_frame.h b/api/test/mock_transformable_video_frame.h new file mode 100644 index 0000000000..36798b5d73 --- /dev/null +++ b/api/test/mock_transformable_video_frame.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ +#define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockTransformableVideoFrame + : public webrtc::TransformableVideoFrameInterface { + public: + MOCK_METHOD(rtc::ArrayView, GetData, (), (const override)); + MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const override)); + MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); + MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); + MOCK_METHOD(std::vector, GetAdditionalData, (), (const, override)); + MOCK_METHOD(const webrtc::VideoFrameMetadata&, + GetMetadata, + (), + (const, override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ diff --git a/api/test/mock_video_bitrate_allocator_factory.h b/api/test/mock_video_bitrate_allocator_factory.h index c7d883a5b9..16af191970 100644 --- a/api/test/mock_video_bitrate_allocator_factory.h +++ b/api/test/mock_video_bitrate_allocator_factory.h @@ -26,7 +26,7 @@ class MockVideoBitrateAllocatorFactory CreateVideoBitrateAllocator, (const VideoCodec&), (override)); - MOCK_METHOD(void, Die, (), ()); + MOCK_METHOD(void, Die, ()); }; } // namespace webrtc diff --git a/api/test/mock_video_decoder_factory.h b/api/test/mock_video_decoder_factory.h index 1f832a156b..98a5d40eb6 100644 --- a/api/test/mock_video_decoder_factory.h +++ b/api/test/mock_video_decoder_factory.h @@ -27,12 +27,12 @@ class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory { MOCK_METHOD(std::vector, GetSupportedFormats, (), - (const override)); + (const, override)); MOCK_METHOD(std::unique_ptr, CreateVideoDecoder, (const webrtc::SdpVideoFormat&), (override)); - MOCK_METHOD(void, Die, (), ()); + MOCK_METHOD(void, Die, ()); }; } // namespace webrtc diff --git a/api/test/mock_video_encoder.h b/api/test/mock_video_encoder.h index c4b6b3e50c..26d758fd6a 100644 --- a/api/test/mock_video_encoder.h +++ b/api/test/mock_video_encoder.h @@ -23,8 +23,8 @@ class MockEncodedImageCallback : public EncodedImageCallback { MOCK_METHOD(Result, OnEncodedImage, (const EncodedImage& encodedImage, - const CodecSpecificInfo* codecSpecificInfo, - const RTPFragmentationHeader* fragmentation), + const CodecSpecificInfo*, + const RTPFragmentationHeader*), (override)); MOCK_METHOD(void, OnDroppedFrame, (DropReason reason), (override)); }; @@ -33,31 +33,41 @@ class MockVideoEncoder : public VideoEncoder { public: MOCK_METHOD(void, SetFecControllerOverride, - (FecControllerOverride * fec_controller_override), + (FecControllerOverride*), (override)); MOCK_METHOD(int32_t, InitEncode, - (const VideoCodec* codecSettings, - int32_t numberOfCores, - size_t maxPayloadSize), + (const VideoCodec*, int32_t numberOfCores, size_t maxPayloadSize), + (override)); + MOCK_METHOD(int32_t, + InitEncode, + (const VideoCodec*, const VideoEncoder::Settings& settings), (override)); - MOCK_METHOD2(InitEncode, - int32_t(const VideoCodec* codecSettings, - const VideoEncoder::Settings& settings)); - MOCK_METHOD2(Encode, - int32_t(const VideoFrame& inputImage, - const std::vector* frame_types)); - MOCK_METHOD1(RegisterEncodeCompleteCallback, - int32_t(EncodedImageCallback* callback)); - MOCK_METHOD0(Release, int32_t()); - MOCK_METHOD0(Reset, int32_t()); - MOCK_METHOD1(SetRates, void(const RateControlParameters& parameters)); - MOCK_METHOD1(OnPacketLossRateUpdate, void(float packet_loss_rate)); - MOCK_METHOD1(OnRttUpdate, void(int64_t rtt_ms)); - MOCK_METHOD1(OnLossNotification, - void(const LossNotification& loss_notification)); - MOCK_CONST_METHOD0(GetEncoderInfo, EncoderInfo(void)); + MOCK_METHOD(int32_t, + Encode, + (const VideoFrame& inputImage, + const std::vector*), + (override)); + MOCK_METHOD(int32_t, + RegisterEncodeCompleteCallback, + (EncodedImageCallback*), + (override)); + MOCK_METHOD(int32_t, Release, (), (override)); + MOCK_METHOD(void, + SetRates, + (const RateControlParameters& parameters), + (override)); + MOCK_METHOD(void, + OnPacketLossRateUpdate, + (float packet_loss_rate), + (override)); + MOCK_METHOD(void, OnRttUpdate, (int64_t rtt_ms), (override)); + MOCK_METHOD(void, + OnLossNotification, + (const LossNotification& loss_notification), + (override)); + MOCK_METHOD(EncoderInfo, GetEncoderInfo, (), (const, override)); }; } // namespace webrtc diff --git a/api/test/mock_video_encoder_factory.h b/api/test/mock_video_encoder_factory.h index fa08dbd6bc..1aa14631be 100644 --- a/api/test/mock_video_encoder_factory.h +++ b/api/test/mock_video_encoder_factory.h @@ -27,17 +27,17 @@ class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory { MOCK_METHOD(std::vector, GetSupportedFormats, (), - (const override)); + (const, override)); MOCK_METHOD(CodecInfo, QueryVideoEncoder, (const SdpVideoFormat&), - (const override)); + (const, override)); MOCK_METHOD(std::unique_ptr, CreateVideoEncoder, (const SdpVideoFormat&), (override)); - MOCK_METHOD(void, Die, (), ()); + MOCK_METHOD(void, Die, ()); }; } // namespace webrtc diff --git a/api/test/network_emulation/BUILD.gn b/api/test/network_emulation/BUILD.gn index 5fda1e288a..a3dd961c81 100644 --- a/api/test/network_emulation/BUILD.gn +++ b/api/test/network_emulation/BUILD.gn @@ -23,6 +23,6 @@ rtc_library("network_emulation") { "../../units:data_rate", "../../units:data_size", "../../units:timestamp", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } diff --git a/api/test/network_emulation/network_emulation_interfaces.h b/api/test/network_emulation/network_emulation_interfaces.h index 0986df4a08..260ab0e292 100644 --- a/api/test/network_emulation/network_emulation_interfaces.h +++ b/api/test/network_emulation/network_emulation_interfaces.h @@ -10,6 +10,9 @@ #ifndef API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ #define API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ +#include +#include + #include "absl/types/optional.h" #include "api/units/data_rate.h" #include "api/units/data_size.h" @@ -56,9 +59,7 @@ class EmulatedNetworkReceiverInterface { virtual void OnPacketReceived(EmulatedIpPacket packet) = 0; }; -struct EmulatedNetworkStats { - int64_t packets_sent = 0; - DataSize bytes_sent = DataSize::Zero(); +struct EmulatedNetworkIncomingStats { // Total amount of packets received with or without destination. int64_t packets_received = 0; // Total amount of bytes in received packets. @@ -69,22 +70,122 @@ struct EmulatedNetworkStats { DataSize bytes_dropped = DataSize::Zero(); DataSize first_received_packet_size = DataSize::Zero(); - DataSize first_sent_packet_size = DataSize::Zero(); - Timestamp first_packet_sent_time = Timestamp::PlusInfinity(); - Timestamp last_packet_sent_time = Timestamp::PlusInfinity(); + // Timestamps are initialized to different infinities for simplifying + // computations. Client have to assume that it is some infinite value + // if unset. Client mustn't consider sign of infinit value. Timestamp first_packet_received_time = Timestamp::PlusInfinity(); - Timestamp last_packet_received_time = Timestamp::PlusInfinity(); + Timestamp last_packet_received_time = Timestamp::MinusInfinity(); + + DataRate AverageReceiveRate() const { + RTC_DCHECK_GE(packets_received, 2); + RTC_DCHECK(first_packet_received_time.IsFinite()); + RTC_DCHECK(last_packet_received_time.IsFinite()); + return (bytes_received - first_received_packet_size) / + (last_packet_received_time - first_packet_received_time); + } +}; + +struct EmulatedNetworkStats { + int64_t packets_sent = 0; + DataSize bytes_sent = DataSize::Zero(); + + DataSize first_sent_packet_size = DataSize::Zero(); + Timestamp first_packet_sent_time = Timestamp::PlusInfinity(); + Timestamp last_packet_sent_time = Timestamp::MinusInfinity(); + + // List of IP addresses that were used to send data considered in this stats + // object. + std::vector local_addresses; + + std::map + incoming_stats_per_source; DataRate AverageSendRate() const { RTC_DCHECK_GE(packets_sent, 2); return (bytes_sent - first_sent_packet_size) / (last_packet_sent_time - first_packet_sent_time); } + + // Total amount of packets received regardless of the destination address. + int64_t PacketsReceived() const { + int64_t packets_received = 0; + for (const auto& incoming_stats : incoming_stats_per_source) { + packets_received += incoming_stats.second.packets_received; + } + return packets_received; + } + + // Total amount of bytes in received packets. + DataSize BytesReceived() const { + DataSize bytes_received = DataSize::Zero(); + for (const auto& incoming_stats : incoming_stats_per_source) { + bytes_received += incoming_stats.second.bytes_received; + } + return bytes_received; + } + + // Total amount of packets that were received, but no destination was found. + int64_t PacketsDropped() const { + int64_t packets_dropped = 0; + for (const auto& incoming_stats : incoming_stats_per_source) { + packets_dropped += incoming_stats.second.packets_dropped; + } + return packets_dropped; + } + + // Total amount of bytes in dropped packets. + DataSize BytesDropped() const { + DataSize bytes_dropped = DataSize::Zero(); + for (const auto& incoming_stats : incoming_stats_per_source) { + bytes_dropped += incoming_stats.second.bytes_dropped; + } + return bytes_dropped; + } + + DataSize FirstReceivedPacketSize() const { + Timestamp first_packet_received_time = Timestamp::PlusInfinity(); + DataSize first_received_packet_size = DataSize::Zero(); + for (const auto& incoming_stats : incoming_stats_per_source) { + if (first_packet_received_time > + incoming_stats.second.first_packet_received_time) { + first_packet_received_time = + incoming_stats.second.first_packet_received_time; + first_received_packet_size = + incoming_stats.second.first_received_packet_size; + } + } + return first_received_packet_size; + } + + Timestamp FirstPacketReceivedTime() const { + Timestamp first_packet_received_time = Timestamp::PlusInfinity(); + for (const auto& incoming_stats : incoming_stats_per_source) { + if (first_packet_received_time > + incoming_stats.second.first_packet_received_time) { + first_packet_received_time = + incoming_stats.second.first_packet_received_time; + } + } + return first_packet_received_time; + } + + Timestamp LastPacketReceivedTime() const { + Timestamp last_packet_received_time = Timestamp::MinusInfinity(); + for (const auto& incoming_stats : incoming_stats_per_source) { + if (last_packet_received_time < + incoming_stats.second.last_packet_received_time) { + last_packet_received_time = + incoming_stats.second.last_packet_received_time; + } + } + return last_packet_received_time; + } + DataRate AverageReceiveRate() const { - RTC_DCHECK_GE(packets_received, 2); - return (bytes_received - first_received_packet_size) / - (last_packet_received_time - first_packet_received_time); + RTC_DCHECK_GE(PacketsReceived(), 2); + return (BytesReceived() - FirstReceivedPacketSize()) / + (LastPacketReceivedTime() - FirstPacketReceivedTime()); } }; diff --git a/api/test/peerconnection_quality_test_fixture.h b/api/test/peerconnection_quality_test_fixture.h index d55647a841..f370478956 100644 --- a/api/test/peerconnection_quality_test_fixture.h +++ b/api/test/peerconnection_quality_test_fixture.h @@ -26,13 +26,14 @@ #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" #include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/rtp_parameters.h" #include "api/task_queue/task_queue_factory.h" #include "api/test/audio_quality_analyzer_interface.h" #include "api/test/frame_generator_interface.h" #include "api/test/simulated_network.h" #include "api/test/stats_observer_interface.h" +#include "api/test/track_id_stream_info_map.h" #include "api/test/video_quality_analyzer_interface.h" -#include "api/transport/media/media_transport_interface.h" #include "api/transport/network_control.h" #include "api/units/time_delta.h" #include "api/video_codecs/video_decoder_factory.h" @@ -53,6 +54,12 @@ constexpr size_t kDefaultSlidesHeight = 1110; // API is in development. Can be changed/removed without notice. class PeerConnectionE2EQualityTestFixture { public: + // The index of required capturing device in OS provided list of video + // devices. On Linux and Windows the list will be obtained via + // webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via + // [RTCCameraVideoCapturer captureDevices]. + enum class CapturingDeviceIndex : size_t {}; + // Contains parameters for screen share scrolling. // // If scrolling is enabled, then it will be done by putting sliding window @@ -116,8 +123,6 @@ class PeerConnectionE2EQualityTestFixture { std::vector slides_yuv_file_names; }; - enum VideoGeneratorType { kDefault, kI420A, kI010 }; - // Config for Vp8 simulcast or Vp9 SVC testing. // // SVC support is limited: @@ -160,6 +165,14 @@ class PeerConnectionE2EQualityTestFixture { // It requires Selective Forwarding Unit (SFU) to be configured in the // network. absl::optional target_spatial_index; + + // Encoding parameters per simulcast layer. If not empty, |encoding_params| + // size have to be equal to |simulcast_streams_count|. Will be used to set + // transceiver send encoding params for simulcast layers. Applicable only + // for codecs that support simulcast (ex. Vp8) and will be ignored + // otherwise. RtpEncodingParameters::rid may be changed by fixture + // implementation to ensure signaling correctness. + std::vector encoding_params; }; // Contains properties of single video stream. @@ -178,12 +191,6 @@ class PeerConnectionE2EQualityTestFixture { // Will be set for current video track. If equals to kText or kDetailed - // screencast in on. absl::optional content_hint; - // If specified this capturing device will be used to get input video. The - // |capturing_device_index| is the index of required capturing device in OS - // provided list of video devices. On Linux and Windows the list will be - // obtained via webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via - // [RTCCameraVideoCapturer captureDevices]. - absl::optional capturing_device_index; // If presented video will be transfered in simulcast/SVC mode depending on // which encoder is used. // @@ -222,8 +229,7 @@ class PeerConnectionE2EQualityTestFixture { bool show_on_screen = false; // If specified, determines a sync group to which this video stream belongs. // According to bugs.webrtc.org/4762 WebRTC supports synchronization only - // for pair of single audio and single video stream. Framework won't do any - // enforcements on this field. + // for pair of single audio and single video stream. absl::optional sync_group; }; @@ -250,8 +256,7 @@ class PeerConnectionE2EQualityTestFixture { int sampling_frequency_in_hz = 48000; // If specified, determines a sync group to which this audio stream belongs. // According to bugs.webrtc.org/4762 WebRTC supports synchronization only - // for pair of single audio and single video stream. Framework won't do any - // enforcements on this field. + // for pair of single audio and single video stream. absl::optional sync_group; }; @@ -280,8 +285,6 @@ class PeerConnectionE2EQualityTestFixture { virtual PeerConfigurer* SetNetworkControllerFactory( std::unique_ptr network_controller_factory) = 0; - virtual PeerConfigurer* SetMediaTransportFactory( - std::unique_ptr media_transport_factory) = 0; virtual PeerConfigurer* SetVideoEncoderFactory( std::unique_ptr video_encoder_factory) = 0; virtual PeerConfigurer* SetVideoDecoderFactory( @@ -312,6 +315,11 @@ class PeerConnectionE2EQualityTestFixture { virtual PeerConfigurer* AddVideoConfig( VideoConfig config, std::unique_ptr generator) = 0; + // Add new video stream to the call that will be sent from this peer. + // Capturing device with specified index will be used to get input video. + virtual PeerConfigurer* AddVideoConfig( + VideoConfig config, + CapturingDeviceIndex capturing_device_index) = 0; // Set the audio stream for the call from this peer. If this method won't // be invoked, this peer will send no audio. virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0; @@ -325,8 +333,8 @@ class PeerConnectionE2EQualityTestFixture { PeerConnectionInterface::RTCConfiguration configuration) = 0; // Set bitrate parameters on PeerConnection. This constraints will be // applied to all summed RTP streams for this peer. - virtual PeerConfigurer* SetBitrateParameters( - PeerConnectionInterface::BitrateParameters bitrate_params) = 0; + virtual PeerConfigurer* SetBitrateSettings( + BitrateSettings bitrate_settings) = 0; }; // Contains configuration for echo emulator. @@ -400,7 +408,14 @@ class PeerConnectionE2EQualityTestFixture { // Invoked by framework after peer connection factory and peer connection // itself will be created but before offer/answer exchange will be started. - virtual void Start(absl::string_view test_case_name) = 0; + // |test_case_name| is name of test case, that should be used to report all + // metrics. + // |reporter_helper| is a pointer to a class that will allow track_id to + // stream_id matching. The caller is responsible for ensuring the + // TrackIdStreamInfoMap will be valid from Start() to + // StopAndReportResults(). + virtual void Start(absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) = 0; // Invoked by framework after call is ended and peer connection factory and // peer connection are destroyed. @@ -436,6 +451,12 @@ class PeerConnectionE2EQualityTestFixture { virtual void AddPeer(rtc::Thread* network_thread, rtc::NetworkManager* network_manager, rtc::FunctionView configurer) = 0; + // Runs the media quality test, which includes setting up the call with + // configured participants, running it according to provided |run_params| and + // terminating it properly at the end. During call duration media quality + // metrics are gathered, which are then reported to stdout and (if configured) + // to the json/protobuf output file through the WebRTC perf test results + // reporting system. virtual void Run(RunParams run_params) = 0; // Returns real test duration - the time of test execution measured during diff --git a/api/test/simulated_network.h b/api/test/simulated_network.h index 0d5c6613a6..3fba61f74d 100644 --- a/api/test/simulated_network.h +++ b/api/test/simulated_network.h @@ -19,7 +19,6 @@ #include #include "absl/types/optional.h" -#include "rtc_base/critical_section.h" #include "rtc_base/random.h" #include "rtc_base/thread_annotations.h" diff --git a/api/test/stats_observer_interface.h b/api/test/stats_observer_interface.h index 98c8dd937f..ea4d6c23db 100644 --- a/api/test/stats_observer_interface.h +++ b/api/test/stats_observer_interface.h @@ -11,9 +11,8 @@ #ifndef API_TEST_STATS_OBSERVER_INTERFACE_H_ #define API_TEST_STATS_OBSERVER_INTERFACE_H_ -#include - -#include "api/stats_types.h" +#include "absl/strings/string_view.h" +#include "api/stats/rtc_stats_report.h" namespace webrtc { namespace webrtc_pc_e2e { @@ -25,8 +24,9 @@ class StatsObserverInterface { // Method called when stats reports are available for the PeerConnection // identified by |pc_label|. - virtual void OnStatsReports(const std::string& pc_label, - const StatsReports& reports) = 0; + virtual void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) = 0; }; } // namespace webrtc_pc_e2e diff --git a/api/test/test_dependency_factory.cc b/api/test/test_dependency_factory.cc index e72f55aab5..41ad70cc3f 100644 --- a/api/test/test_dependency_factory.cc +++ b/api/test/test_dependency_factory.cc @@ -14,22 +14,24 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/thread_checker.h" +#include "rtc_base/platform_thread_types.h" namespace webrtc { +namespace { // This checks everything in this file gets called on the same thread. It's // static because it needs to look at the static methods too. -rtc::ThreadChecker* GetThreadChecker() { - static rtc::ThreadChecker checker; - return &checker; +bool IsValidTestDependencyFactoryThread() { + const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef(); + return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef()); } +} // namespace std::unique_ptr TestDependencyFactory::instance_ = nullptr; const TestDependencyFactory& TestDependencyFactory::GetInstance() { - RTC_DCHECK(GetThreadChecker()->IsCurrent()); + RTC_DCHECK(IsValidTestDependencyFactoryThread()); if (instance_ == nullptr) { instance_ = std::make_unique(); } @@ -38,14 +40,14 @@ const TestDependencyFactory& TestDependencyFactory::GetInstance() { void TestDependencyFactory::SetInstance( std::unique_ptr instance) { - RTC_DCHECK(GetThreadChecker()->IsCurrent()); + RTC_DCHECK(IsValidTestDependencyFactoryThread()); RTC_CHECK(instance_ == nullptr); instance_ = std::move(instance); } std::unique_ptr TestDependencyFactory::CreateComponents() const { - RTC_DCHECK(GetThreadChecker()->IsCurrent()); + RTC_DCHECK(IsValidTestDependencyFactoryThread()); return nullptr; } diff --git a/api/test/time_controller.h b/api/test/time_controller.h index 4d7f9e6c39..bd3192ddf2 100644 --- a/api/test/time_controller.h +++ b/api/test/time_controller.h @@ -46,6 +46,7 @@ class TimeController { const char* thread_name) = 0; // Creates an rtc::Thread instance. If |socket_server| is nullptr, a default // noop socket server is created. + // Returned thread is not null and started. virtual std::unique_ptr CreateThread( const std::string& name, std::unique_ptr socket_server = nullptr) = 0; @@ -59,6 +60,8 @@ class TimeController { // Waits until condition() == true, polling condition() in small time // intervals. + // Returns true if condition() was evaluated to true before |max_duration| + // elapsed and false otherwise. bool Wait(const std::function& condition, TimeDelta max_duration = TimeDelta::Seconds(5)); }; diff --git a/api/test/track_id_stream_info_map.h b/api/test/track_id_stream_info_map.h new file mode 100644 index 0000000000..bb73cfd997 --- /dev/null +++ b/api/test/track_id_stream_info_map.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ +#define API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ + +#include "absl/strings/string_view.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// Instances of |TrackIdStreamInfoMap| provide bookkeeping capabilities that +// are useful to associate stats reports track_ids to the remote stream info. +class TrackIdStreamInfoMap { + public: + virtual ~TrackIdStreamInfoMap() = default; + + // These methods must be called on the same thread where + // StatsObserverInterface::OnStatsReports is invoked. + + // Returns a reference to a stream label owned by the TrackIdStreamInfoMap. + // Precondition: |track_id| must be already mapped to stream label. + virtual absl::string_view GetStreamLabelFromTrackId( + absl::string_view track_id) const = 0; + + // Returns a reference to a sync group name owned by the TrackIdStreamInfoMap. + // Precondition: |track_id| must be already mapped to sync group. + virtual absl::string_view GetSyncGroupLabelFromTrackId( + absl::string_view track_id) const = 0; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ diff --git a/api/test/track_id_stream_label_map.h b/api/test/track_id_stream_label_map.h deleted file mode 100644 index e8dc947ab1..0000000000 --- a/api/test/track_id_stream_label_map.h +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_ -#define API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_ - -#include - -namespace webrtc { -namespace webrtc_pc_e2e { - -// Instances of |TrackIdStreamLabelMap| provide bookkeeping capabilities that -// are useful to associate stats reports track_ids to the remote stream_id. -class TrackIdStreamLabelMap { - public: - virtual ~TrackIdStreamLabelMap() = default; - - // This method must be called on the same thread where - // StatsObserverInterface::OnStatsReports is invoked. - // Returns a reference to a stream label owned by the TrackIdStreamLabelMap. - // Precondition: |track_id| must be already mapped to a stream_label. - virtual const std::string& GetStreamLabelFromTrackId( - const std::string& track_id) const = 0; -}; - -} // namespace webrtc_pc_e2e -} // namespace webrtc - -#endif // API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_ diff --git a/api/test/video_quality_analyzer_interface.h b/api/test/video_quality_analyzer_interface.h index 0d3f441534..c5370a7089 100644 --- a/api/test/video_quality_analyzer_interface.h +++ b/api/test/video_quality_analyzer_interface.h @@ -14,7 +14,9 @@ #include #include +#include "absl/strings/string_view.h" #include "absl/types/optional.h" +#include "api/array_view.h" #include "api/test/stats_observer_interface.h" #include "api/video/encoded_image.h" #include "api/video/video_frame.h" @@ -76,42 +78,65 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface { // calculations. Analyzer can perform simple calculations on the calling // thread in each method, but should remember, that it is the same thread, // that is used in video pipeline. - virtual void Start(std::string test_case_name, int max_threads_count) {} + virtual void Start(std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) {} // Will be called when frame was generated from the input stream. + // |peer_name| is name of the peer on which side frame was captured. // Returns frame id, that will be set by framework to the frame. - virtual uint16_t OnFrameCaptured(const std::string& stream_label, + virtual uint16_t OnFrameCaptured(absl::string_view peer_name, + const std::string& stream_label, const VideoFrame& frame) = 0; // Will be called before calling the encoder. - virtual void OnFramePreEncode(const VideoFrame& frame) {} + // |peer_name| is name of the peer on which side frame came to encoder. + virtual void OnFramePreEncode(absl::string_view peer_name, + const VideoFrame& frame) {} // Will be called for each EncodedImage received from encoder. Single // VideoFrame can produce multiple EncodedImages. Each encoded image will // have id from VideoFrame. - virtual void OnFrameEncoded(uint16_t frame_id, + // |peer_name| is name of the peer on which side frame was encoded. + virtual void OnFrameEncoded(absl::string_view peer_name, + uint16_t frame_id, const EncodedImage& encoded_image, const EncoderStats& stats) {} // Will be called for each frame dropped by encoder. - virtual void OnFrameDropped(EncodedImageCallback::DropReason reason) {} + // |peer_name| is name of the peer on which side frame drop was detected. + virtual void OnFrameDropped(absl::string_view peer_name, + EncodedImageCallback::DropReason reason) {} // Will be called before calling the decoder. - virtual void OnFramePreDecode(uint16_t frame_id, + // |peer_name| is name of the peer on which side frame was received. + virtual void OnFramePreDecode(absl::string_view peer_name, + uint16_t frame_id, const EncodedImage& encoded_image) {} // Will be called after decoding the frame. - virtual void OnFrameDecoded(const VideoFrame& frame, + // |peer_name| is name of the peer on which side frame was decoded. + virtual void OnFrameDecoded(absl::string_view peer_name, + const VideoFrame& frame, const DecoderStats& stats) {} // Will be called when frame will be obtained from PeerConnection stack. - virtual void OnFrameRendered(const VideoFrame& frame) {} + // |peer_name| is name of the peer on which side frame was rendered. + virtual void OnFrameRendered(absl::string_view peer_name, + const VideoFrame& frame) {} // Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK. // All available codes are listed in // modules/video_coding/include/video_error_codes.h - virtual void OnEncoderError(const VideoFrame& frame, int32_t error_code) {} + // |peer_name| is name of the peer on which side error acquired. + virtual void OnEncoderError(absl::string_view peer_name, + const VideoFrame& frame, + int32_t error_code) {} // Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK. // All available codes are listed in // modules/video_coding/include/video_error_codes.h - virtual void OnDecoderError(uint16_t frame_id, int32_t error_code) {} + // |peer_name| is name of the peer on which side error acquired. + virtual void OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, + int32_t error_code) {} // Will be called every time new stats reports are available for the // Peer Connection identified by |pc_label|. - void OnStatsReports(const std::string& pc_label, - const StatsReports& stats_reports) override {} + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override {} // Tells analyzer that analysis complete and it should calculate final // statistics. diff --git a/api/test/video_quality_test_fixture.h b/api/test/video_quality_test_fixture.h index ec07c23cd4..92c398aa54 100644 --- a/api/test/video_quality_test_fixture.h +++ b/api/test/video_quality_test_fixture.h @@ -22,6 +22,7 @@ #include "api/test/simulated_network.h" #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_config.h" #include "api/video_codecs/video_encoder_factory.h" @@ -31,60 +32,56 @@ namespace webrtc { class VideoQualityTestFixtureInterface { public: // Parameters are grouped into smaller structs to make it easier to set - // the desired elements and skip unused, using aggregate initialization. - // Unfortunately, C++11 (as opposed to C11) doesn't support unnamed structs, - // which makes the implementation of VideoQualityTest a bit uglier. + // the desired elements and skip unused. struct Params { - Params(); - ~Params(); struct CallConfig { - bool send_side_bwe; - bool generic_descriptor; + bool send_side_bwe = false; + bool generic_descriptor = false; BitrateConstraints call_bitrate_config; - int num_thumbnails; + int num_thumbnails = 0; // Indicates if secondary_(video|ss|screenshare) structures are used. - bool dual_video; + bool dual_video = false; } call; struct Video { - bool enabled; - size_t width; - size_t height; - int32_t fps; - int min_bitrate_bps; - int target_bitrate_bps; - int max_bitrate_bps; - bool suspend_below_min_bitrate; - std::string codec; - int num_temporal_layers; - int selected_tl; - int min_transmit_bps; - bool ulpfec; - bool flexfec; - bool automatic_scaling; + bool enabled = false; + size_t width = 640; + size_t height = 480; + int32_t fps = 30; + int min_bitrate_bps = 50; + int target_bitrate_bps = 800; + int max_bitrate_bps = 800; + bool suspend_below_min_bitrate = false; + std::string codec = "VP8"; + int num_temporal_layers = 1; + int selected_tl = -1; + int min_transmit_bps = 0; + bool ulpfec = false; + bool flexfec = false; + bool automatic_scaling = false; std::string clip_path; // "Generator" to generate frames instead. - size_t capture_device_index; + size_t capture_device_index = 0; SdpVideoFormat::Parameters sdp_params; - double encoder_overshoot_factor; + double encoder_overshoot_factor = 0.0; } video[2]; struct Audio { - bool enabled; - bool sync_video; - bool dtx; - bool use_real_adm; + bool enabled = false; + bool sync_video = false; + bool dtx = false; + bool use_real_adm = false; absl::optional ana_config; } audio; struct Screenshare { - bool enabled; - bool generate_slides; - int32_t slide_change_interval; - int32_t scroll_duration; + bool enabled = false; + bool generate_slides = false; + int32_t slide_change_interval = 10; + int32_t scroll_duration = 0; std::vector slides; } screenshare[2]; struct Analyzer { std::string test_label; - double avg_psnr_threshold; // (*) - double avg_ssim_threshold; // (*) - int test_durations_secs; + double avg_psnr_threshold = 0.0; // (*) + double avg_ssim_threshold = 0.0; // (*) + int test_durations_secs = 0; std::string graph_data_output_filename; std::string graph_title; } analyzer; @@ -95,14 +92,14 @@ class VideoQualityTestFixtureInterface { absl::optional config; struct SS { // Spatial scalability. std::vector streams; // If empty, one stream is assumed. - size_t selected_stream; - int num_spatial_layers; - int selected_sl; - InterLayerPredMode inter_layer_pred; + size_t selected_stream = 0; + int num_spatial_layers = 0; + int selected_sl = -1; + InterLayerPredMode inter_layer_pred = InterLayerPredMode::kOn; // If empty, bitrates are generated in VP9Impl automatically. std::vector spatial_layers; // If set, default parameters will be used instead of |streams|. - bool infer_streams; + bool infer_streams = false; } ss[2]; struct Logging { std::string rtc_event_log_name; diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn index 0f07301fe4..a4ada07108 100644 --- a/api/transport/BUILD.gn +++ b/api/transport/BUILD.gn @@ -14,10 +14,8 @@ rtc_library("bitrate_settings") { "bitrate_settings.cc", "bitrate_settings.h", ] - deps = [ - "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", - ] + deps = [ "../../rtc_base/system:rtc_export" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("enums") { @@ -41,6 +39,8 @@ rtc_library("network_control") { "../units:data_size", "../units:time_delta", "../units:timestamp", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/types:optional", ] @@ -49,10 +49,8 @@ rtc_library("network_control") { rtc_source_set("webrtc_key_value_config") { visibility = [ "*" ] sources = [ "webrtc_key_value_config.h" ] - deps = [ - "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/strings", - ] + deps = [ "../../rtc_base/system:rtc_export" ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("field_trial_based_config") { @@ -64,26 +62,20 @@ rtc_library("field_trial_based_config") { deps = [ ":webrtc_key_value_config", "../../system_wrappers:field_trial", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } +# TODO(nisse): Rename? rtc_source_set("datagram_transport_interface") { visibility = [ "*" ] - sources = [ - "congestion_control_interface.h", - "data_channel_transport_interface.h", - "datagram_transport_interface.h", - ] + sources = [ "data_channel_transport_interface.h" ] deps = [ - ":network_control", "..:array_view", "..:rtc_error", "../../rtc_base:rtc_base_approved", - "../units:data_rate", - "../units:timestamp", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("goog_cc") { diff --git a/api/transport/congestion_control_interface.h b/api/transport/congestion_control_interface.h deleted file mode 100644 index 40552cb4ff..0000000000 --- a/api/transport/congestion_control_interface.h +++ /dev/null @@ -1,75 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media and datagram transports. - -#ifndef API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_ -#define API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_ - -#include -#include -#include - -#include "api/transport/network_control.h" -#include "api/units/data_rate.h" - -namespace webrtc { - -// TODO(nisse): Defined together with MediaTransportInterface. But we should use -// types that aren't tied to media, so that MediaTransportInterface can depend -// on CongestionControlInterface, but not the other way around. -// api/transport/network_control.h may be a reasonable place. -class MediaTransportRttObserver; -struct MediaTransportAllocatedBitrateLimits; -struct MediaTransportTargetRateConstraints; - -// Defines congestion control feedback interface for media and datagram -// transports. -class CongestionControlInterface { - public: - virtual ~CongestionControlInterface() = default; - - // Updates allocation limits. - virtual void SetAllocatedBitrateLimits( - const MediaTransportAllocatedBitrateLimits& limits) = 0; - - // Sets starting rate. - virtual void SetTargetBitrateLimits( - const MediaTransportTargetRateConstraints& target_rate_constraints) = 0; - - // Intended for receive side. AddRttObserver registers an observer to be - // called for each RTT measurement, typically once per ACK. Before media - // transport is destructed the observer must be unregistered. - // - // TODO(sukhanov): Looks like AddRttObserver and RemoveRttObserver were - // never implemented for media transport, so keeping noop implementation. - virtual void AddRttObserver(MediaTransportRttObserver* observer) {} - virtual void RemoveRttObserver(MediaTransportRttObserver* observer) {} - - // Adds a target bitrate observer. Before media transport is destructed - // the observer must be unregistered (by calling - // RemoveTargetTransferRateObserver). - // A newly registered observer will be called back with the latest recorded - // target rate, if available. - virtual void AddTargetTransferRateObserver( - TargetTransferRateObserver* observer) = 0; - - // Removes an existing |observer| from observers. If observer was never - // registered, an error is logged and method does nothing. - virtual void RemoveTargetTransferRateObserver( - TargetTransferRateObserver* observer) = 0; - - // Returns the last known target transfer rate as reported to the above - // observers. - virtual absl::optional GetLatestTargetTransferRate() = 0; -}; - -} // namespace webrtc - -#endif // API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_ diff --git a/api/transport/data_channel_transport_interface.h b/api/transport/data_channel_transport_interface.h index 671deffc6e..7b8c653c39 100644 --- a/api/transport/data_channel_transport_interface.h +++ b/api/transport/data_channel_transport_interface.h @@ -35,8 +35,8 @@ enum class DataMessageType { // sent reliably and in-order, even if the data channel is configured for // unreliable delivery. struct SendDataParams { - SendDataParams(); - SendDataParams(const SendDataParams&); + SendDataParams() = default; + SendDataParams(const SendDataParams&) = default; DataMessageType type = DataMessageType::kText; diff --git a/api/transport/datagram_transport_interface.h b/api/transport/datagram_transport_interface.h deleted file mode 100644 index 01736b978d..0000000000 --- a/api/transport/datagram_transport_interface.h +++ /dev/null @@ -1,151 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media and datagram transports. - -#ifndef API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_ -#define API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_ - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/rtc_error.h" -#include "api/transport/congestion_control_interface.h" -#include "api/transport/data_channel_transport_interface.h" -#include "api/units/data_rate.h" -#include "api/units/timestamp.h" - -namespace rtc { -class PacketTransportInternal; -} // namespace rtc - -namespace webrtc { - -class MediaTransportStateCallback; - -typedef int64_t DatagramId; - -struct DatagramAck { - // |datagram_id| is same as passed in - // DatagramTransportInterface::SendDatagram. - DatagramId datagram_id; - - // The timestamp at which the remote peer received the identified datagram, - // according to that peer's clock. - Timestamp receive_timestamp = Timestamp::MinusInfinity(); -}; - -// All sink methods are called on network thread. -class DatagramSinkInterface { - public: - virtual ~DatagramSinkInterface() {} - - // Called when new packet is received. - virtual void OnDatagramReceived(rtc::ArrayView data) = 0; - - // Called when datagram is actually sent (datragram can be delayed due - // to congestion control or fusing). |datagram_id| is same as passed in - // DatagramTransportInterface::SendDatagram. - virtual void OnDatagramSent(DatagramId datagram_id) = 0; - - // Called when datagram is ACKed. - virtual void OnDatagramAcked(const DatagramAck& datagram_ack) = 0; - - // Called when a datagram is lost. - virtual void OnDatagramLost(DatagramId datagram_id) = 0; -}; - -// Datagram transport allows to send and receive unreliable packets (datagrams) -// and receive feedback from congestion control (via -// CongestionControlInterface). The idea is to send RTP packets as datagrams and -// have underlying implementation of datagram transport to use QUIC datagram -// protocol. -class DatagramTransportInterface : public DataChannelTransportInterface { - public: - virtual ~DatagramTransportInterface() = default; - - // Connect the datagram transport to the ICE transport. - // The implementation must be able to ignore incoming packets that don't - // belong to it. - virtual void Connect(rtc::PacketTransportInternal* packet_transport) = 0; - - // Returns congestion control feedback interface or nullptr if datagram - // transport does not implement congestion control. - // - // Note that right now datagram transport is used without congestion control, - // but we plan to use it in the future. - virtual CongestionControlInterface* congestion_control() = 0; - - // Sets a state observer callback. Before datagram transport is destroyed, the - // callback must be unregistered by setting it to nullptr. - // A newly registered callback will be called with the current state. - // Datagram transport does not invoke this callback concurrently. - virtual void SetTransportStateCallback( - MediaTransportStateCallback* callback) = 0; - - // Start asynchronous send of datagram. The status returned by this method - // only pertains to the synchronous operations (e.g. serialization / - // packetization), not to the asynchronous operation. - // - // Datagrams larger than GetLargestDatagramSize() will fail and return error. - // - // Datagrams are sent in FIFO order. - // - // |datagram_id| is only used in ACK/LOST notifications in - // DatagramSinkInterface and does not need to be unique. - virtual RTCError SendDatagram(rtc::ArrayView data, - DatagramId datagram_id) = 0; - - // Returns maximum size of datagram message, does not change. - // TODO(sukhanov): Because value may be undefined before connection setup - // is complete, consider returning error when called before connection is - // established. Currently returns hardcoded const, because integration - // prototype may call before connection is established. - virtual size_t GetLargestDatagramSize() const = 0; - - // Sets packet sink. Sink must be unset by calling - // SetDataTransportSink(nullptr) before the data transport is destroyed or - // before new sink is set. - virtual void SetDatagramSink(DatagramSinkInterface* sink) = 0; - - // Retrieves transport parameters for this datagram transport. May be called - // on either client- or server-perspective transports. - // - // For servers, the parameters represent what kind of connections and data the - // server is prepared to accept. This is generally a superset of acceptable - // parameters. - // - // For clients, the parameters echo the server configuration used to create - // the client, possibly removing any fields or parameters which the client - // does not understand. - virtual std::string GetTransportParameters() const = 0; - - // Sets remote transport parameters. |remote_params| is a serialized string - // of opaque parameters, understood by the datagram transport implementation. - // Returns an error if |remote_params| are not compatible with this transport. - // - // TODO(mellem): Make pure virtual. The default implementation maintains - // original negotiation behavior (negotiation falls back to RTP if the - // remote datagram transport fails to echo exactly the local parameters). - virtual RTCError SetRemoteTransportParameters( - absl::string_view remote_params) { - if (remote_params == GetTransportParameters()) { - return RTCError::OK(); - } - return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, - "Local and remote transport parameters do not match"); - } -}; - -} // namespace webrtc - -#endif // API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_ diff --git a/api/transport/media/BUILD.gn b/api/transport/media/BUILD.gn deleted file mode 100644 index 24a364c2e5..0000000000 --- a/api/transport/media/BUILD.gn +++ /dev/null @@ -1,52 +0,0 @@ -# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../../webrtc.gni") - -rtc_library("media_transport_interface") { - visibility = [ "*" ] - sources = [ - "media_transport_config.cc", - "media_transport_config.h", - "media_transport_interface.cc", - "media_transport_interface.h", - ] - deps = [ - ":audio_interfaces", - ":video_interfaces", - "..:datagram_transport_interface", - "..:network_control", - "../..:array_view", - "../..:rtc_error", - "../../..:webrtc_common", - "../../../rtc_base", - "../../../rtc_base:checks", - "../../../rtc_base:rtc_base_approved", - "../../../rtc_base:stringutils", - "../../units:data_rate", - "//third_party/abseil-cpp/absl/types:optional", - ] -} - -rtc_library("audio_interfaces") { - visibility = [ "*" ] - sources = [ - "audio_transport.cc", - "audio_transport.h", - ] - deps = [ "../..:array_view" ] -} - -rtc_library("video_interfaces") { - visibility = [ "*" ] - sources = [ - "video_transport.cc", - "video_transport.h", - ] - deps = [ "../../video:encoded_image" ] -} diff --git a/api/transport/media/audio_transport.cc b/api/transport/media/audio_transport.cc deleted file mode 100644 index 0f5fe8bcf2..0000000000 --- a/api/transport/media/audio_transport.cc +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#include "api/transport/media/audio_transport.h" - -#include - -namespace webrtc { - -MediaTransportEncodedAudioFrame::~MediaTransportEncodedAudioFrame() {} - -MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame( - int sampling_rate_hz, - int starting_sample_index, - int samples_per_channel, - int sequence_number, - FrameType frame_type, - int payload_type, - std::vector encoded_data) - : sampling_rate_hz_(sampling_rate_hz), - starting_sample_index_(starting_sample_index), - samples_per_channel_(samples_per_channel), - sequence_number_(sequence_number), - frame_type_(frame_type), - payload_type_(payload_type), - encoded_data_(std::move(encoded_data)) {} - -MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=( - const MediaTransportEncodedAudioFrame&) = default; - -MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=( - MediaTransportEncodedAudioFrame&&) = default; - -MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame( - const MediaTransportEncodedAudioFrame&) = default; - -MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame( - MediaTransportEncodedAudioFrame&&) = default; - -} // namespace webrtc diff --git a/api/transport/media/audio_transport.h b/api/transport/media/audio_transport.h deleted file mode 100644 index dcbdcd7afe..0000000000 --- a/api/transport/media/audio_transport.h +++ /dev/null @@ -1,120 +0,0 @@ -/* Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#ifndef API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_ -#define API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_ - -#include - -#include "api/array_view.h" - -namespace webrtc { - -// Represents encoded audio frame in any encoding (type of encoding is opaque). -// To avoid copying of encoded data use move semantics when passing by value. -class MediaTransportEncodedAudioFrame final { - public: - enum class FrameType { - // Normal audio frame (equivalent to webrtc::kAudioFrameSpeech). - kSpeech, - - // DTX frame (equivalent to webrtc::kAudioFrameCN). - kDiscontinuousTransmission, - // TODO(nisse): Mis-spelled version, update users, then delete. - kDiscountinuousTransmission = kDiscontinuousTransmission, - }; - - MediaTransportEncodedAudioFrame( - // Audio sampling rate, for example 48000. - int sampling_rate_hz, - - // Starting sample index of the frame, i.e. how many audio samples were - // before this frame since the beginning of the call or beginning of time - // in one channel (the starting point should not matter for NetEq). In - // WebRTC it is used as a timestamp of the frame. - // TODO(sukhanov): Starting_sample_index is currently adjusted on the - // receiver side in RTP path. Non-RTP implementations should preserve it. - // For NetEq initial offset should not matter so we should consider fixing - // RTP path. - int starting_sample_index, - - // Number of audio samples in audio frame in 1 channel. - int samples_per_channel, - - // Sequence number of the frame in the order sent, it is currently - // required by NetEq, but we can fix NetEq, because starting_sample_index - // should be enough. - int sequence_number, - - // If audio frame is a speech or discontinued transmission. - FrameType frame_type, - - // Opaque payload type. In RTP codepath payload type is stored in RTP - // header. In other implementations it should be simply passed through the - // wire -- it's needed for decoder. - int payload_type, - - // Vector with opaque encoded data. - std::vector encoded_data); - - ~MediaTransportEncodedAudioFrame(); - MediaTransportEncodedAudioFrame(const MediaTransportEncodedAudioFrame&); - MediaTransportEncodedAudioFrame& operator=( - const MediaTransportEncodedAudioFrame& other); - MediaTransportEncodedAudioFrame& operator=( - MediaTransportEncodedAudioFrame&& other); - MediaTransportEncodedAudioFrame(MediaTransportEncodedAudioFrame&&); - - // Getters. - int sampling_rate_hz() const { return sampling_rate_hz_; } - int starting_sample_index() const { return starting_sample_index_; } - int samples_per_channel() const { return samples_per_channel_; } - int sequence_number() const { return sequence_number_; } - - int payload_type() const { return payload_type_; } - FrameType frame_type() const { return frame_type_; } - - rtc::ArrayView encoded_data() const { return encoded_data_; } - - private: - int sampling_rate_hz_; - int starting_sample_index_; - int samples_per_channel_; - - // TODO(sukhanov): Refactor NetEq so we don't need sequence number. - // Having sample_index and samples_per_channel should be enough. - int sequence_number_; - - FrameType frame_type_; - - int payload_type_; - - std::vector encoded_data_; -}; - -// Interface for receiving encoded audio frames from MediaTransportInterface -// implementations. -class MediaTransportAudioSinkInterface { - public: - virtual ~MediaTransportAudioSinkInterface() = default; - - // Called when new encoded audio frame is received. - virtual void OnData(uint64_t channel_id, - MediaTransportEncodedAudioFrame frame) = 0; -}; - -} // namespace webrtc -#endif // API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_ diff --git a/api/transport/media/media_transport_config.cc b/api/transport/media/media_transport_config.cc deleted file mode 100644 index b9b19cb6f0..0000000000 --- a/api/transport/media/media_transport_config.cc +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "api/transport/media/media_transport_config.h" - -#include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" - -namespace webrtc { - -MediaTransportConfig::MediaTransportConfig(size_t rtp_max_packet_size) - : rtp_max_packet_size(rtp_max_packet_size) { - RTC_DCHECK_GT(rtp_max_packet_size, 0); -} - -std::string MediaTransportConfig::DebugString() const { - rtc::StringBuilder result; - result << "{rtp_max_packet_size: " << rtp_max_packet_size.value_or(0) << "}"; - return result.Release(); -} - -} // namespace webrtc diff --git a/api/transport/media/media_transport_config.h b/api/transport/media/media_transport_config.h deleted file mode 100644 index 7ef65453ae..0000000000 --- a/api/transport/media/media_transport_config.h +++ /dev/null @@ -1,38 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_ -#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_ - -#include -#include -#include - -#include "absl/types/optional.h" - -namespace webrtc { - -// Media transport config is made available to both transport and audio / video -// layers, but access to individual interfaces should not be open without -// necessity. -struct MediaTransportConfig { - // Default constructor for no-media transport scenarios. - MediaTransportConfig() = default; - - // Constructor for datagram transport scenarios. - explicit MediaTransportConfig(size_t rtp_max_packet_size); - - std::string DebugString() const; - - // If provided, limits RTP packet size (excludes ICE, IP or network overhead). - absl::optional rtp_max_packet_size; -}; - -} // namespace webrtc - -#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_ diff --git a/api/transport/media/media_transport_interface.cc b/api/transport/media/media_transport_interface.cc deleted file mode 100644 index 323ddca689..0000000000 --- a/api/transport/media/media_transport_interface.cc +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#include "api/transport/media/media_transport_interface.h" - -#include -#include - -#include "api/transport/datagram_transport_interface.h" - -namespace webrtc { - -MediaTransportSettings::MediaTransportSettings() = default; -MediaTransportSettings::MediaTransportSettings(const MediaTransportSettings&) = - default; -MediaTransportSettings& MediaTransportSettings::operator=( - const MediaTransportSettings&) = default; -MediaTransportSettings::~MediaTransportSettings() = default; - -SendDataParams::SendDataParams() = default; -SendDataParams::SendDataParams(const SendDataParams&) = default; - -RTCErrorOr> -MediaTransportFactory::CreateMediaTransport( - rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return std::unique_ptr(nullptr); -} - -RTCErrorOr> -MediaTransportFactory::CreateMediaTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return std::unique_ptr(nullptr); -} - -RTCErrorOr> -MediaTransportFactory::CreateDatagramTransport( - rtc::Thread* network_thread, - const MediaTransportSettings& settings) { - return std::unique_ptr(nullptr); -} - -std::string MediaTransportFactory::GetTransportName() const { - return ""; -} - -MediaTransportInterface::MediaTransportInterface() = default; -MediaTransportInterface::~MediaTransportInterface() = default; - -absl::optional -MediaTransportInterface::GetTransportParametersOffer() const { - return absl::nullopt; -} - -void MediaTransportInterface::Connect( - rtc::PacketTransportInternal* packet_transport) {} - -void MediaTransportInterface::SetKeyFrameRequestCallback( - MediaTransportKeyFrameRequestCallback* callback) {} - -absl::optional -MediaTransportInterface::GetLatestTargetTransferRate() { - return absl::nullopt; -} - -void MediaTransportInterface::AddNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback) {} - -void MediaTransportInterface::RemoveNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback) {} - -void MediaTransportInterface::SetFirstAudioPacketReceivedObserver( - AudioPacketReceivedObserver* observer) {} - -void MediaTransportInterface::AddTargetTransferRateObserver( - TargetTransferRateObserver* observer) {} -void MediaTransportInterface::RemoveTargetTransferRateObserver( - TargetTransferRateObserver* observer) {} - -void MediaTransportInterface::AddRttObserver( - MediaTransportRttObserver* observer) {} -void MediaTransportInterface::RemoveRttObserver( - MediaTransportRttObserver* observer) {} - -size_t MediaTransportInterface::GetAudioPacketOverhead() const { - return 0; -} - -void MediaTransportInterface::SetAllocatedBitrateLimits( - const MediaTransportAllocatedBitrateLimits& limits) {} - -} // namespace webrtc diff --git a/api/transport/media/media_transport_interface.h b/api/transport/media/media_transport_interface.h deleted file mode 100644 index dbe68d344b..0000000000 --- a/api/transport/media/media_transport_interface.h +++ /dev/null @@ -1,320 +0,0 @@ -/* Copyright 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_ -#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_ - -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/rtc_error.h" -#include "api/transport/data_channel_transport_interface.h" -#include "api/transport/media/audio_transport.h" -#include "api/transport/media/video_transport.h" -#include "api/transport/network_control.h" -#include "api/units/data_rate.h" -#include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/network_route.h" - -namespace rtc { -class PacketTransportInternal; -class Thread; -} // namespace rtc - -namespace webrtc { - -class DatagramTransportInterface; -class RtcEventLog; - -class AudioPacketReceivedObserver { - public: - virtual ~AudioPacketReceivedObserver() = default; - - // Invoked for the first received audio packet on a given channel id. - // It will be invoked once for each channel id. - virtual void OnFirstAudioPacketReceived(int64_t channel_id) = 0; -}; - -// Used to configure stream allocations. -struct MediaTransportAllocatedBitrateLimits { - DataRate min_pacing_rate = DataRate::Zero(); - DataRate max_padding_bitrate = DataRate::Zero(); - DataRate max_total_allocated_bitrate = DataRate::Zero(); -}; - -// Used to configure target bitrate constraints. -// If the value is provided, the constraint is updated. -// If the value is omitted, the value is left unchanged. -struct MediaTransportTargetRateConstraints { - absl::optional min_bitrate; - absl::optional max_bitrate; - absl::optional starting_bitrate; -}; - -// A collection of settings for creation of media transport. -struct MediaTransportSettings final { - MediaTransportSettings(); - MediaTransportSettings(const MediaTransportSettings&); - MediaTransportSettings& operator=(const MediaTransportSettings&); - ~MediaTransportSettings(); - - // Group calls are not currently supported, in 1:1 call one side must set - // is_caller = true and another is_caller = false. - bool is_caller; - - // Must be set if a pre-shared key is used for the call. - // TODO(bugs.webrtc.org/9944): This should become zero buffer in the distant - // future. - absl::optional pre_shared_key; - - // If present, this is a config passed from the caller to the answerer in the - // offer. Each media transport knows how to understand its own parameters. - absl::optional remote_transport_parameters; - - // If present, provides the event log that media transport should use. - // Media transport does not own it. The lifetime of |event_log| will exceed - // the lifetime of the instance of MediaTransportInterface instance. - RtcEventLog* event_log = nullptr; -}; - -// Callback to notify about network route changes. -class MediaTransportNetworkChangeCallback { - public: - virtual ~MediaTransportNetworkChangeCallback() = default; - - // Called when the network route is changed, with the new network route. - virtual void OnNetworkRouteChanged( - const rtc::NetworkRoute& new_network_route) = 0; -}; - -// State of the media transport. Media transport begins in the pending state. -// It transitions to writable when it is ready to send media. It may transition -// back to pending if the connection is blocked. It may transition to closed at -// any time. Closed is terminal: a transport will never re-open once closed. -enum class MediaTransportState { - kPending, - kWritable, - kClosed, -}; - -// Callback invoked whenever the state of the media transport changes. -class MediaTransportStateCallback { - public: - virtual ~MediaTransportStateCallback() = default; - - // Invoked whenever the state of the media transport changes. - virtual void OnStateChanged(MediaTransportState state) = 0; -}; - -// Callback for RTT measurements on the receive side. -// TODO(nisse): Related interfaces: CallStatsObserver and RtcpRttStats. It's -// somewhat unclear what type of measurement is needed. It's used to configure -// NACK generation and playout buffer. Either raw measurement values or recent -// maximum would make sense for this use. Need consolidation of RTT signalling. -class MediaTransportRttObserver { - public: - virtual ~MediaTransportRttObserver() = default; - - // Invoked when a new RTT measurement is available, typically once per ACK. - virtual void OnRttUpdated(int64_t rtt_ms) = 0; -}; - -// Media transport interface for sending / receiving encoded audio/video frames -// and receiving bandwidth estimate update from congestion control. -class MediaTransportInterface : public DataChannelTransportInterface { - public: - MediaTransportInterface(); - virtual ~MediaTransportInterface(); - - // Retrieves callers config (i.e. media transport offer) that should be passed - // to the callee, before the call is connected. Such config is opaque to SDP - // (sdp just passes it through). The config is a binary blob, so SDP may - // choose to use base64 to serialize it (or any other approach that guarantees - // that the binary blob goes through). This should only be called for the - // caller's perspective. - // - // This may return an unset optional, which means that the given media - // transport is not supported / disabled and shouldn't be reported in SDP. - // - // It may also return an empty string, in which case the media transport is - // supported, but without any extra settings. - // TODO(psla): Make abstract. - virtual absl::optional GetTransportParametersOffer() const; - - // Connect the media transport to the ICE transport. - // The implementation must be able to ignore incoming packets that don't - // belong to it. - // TODO(psla): Make abstract. - virtual void Connect(rtc::PacketTransportInternal* packet_transport); - - // Start asynchronous send of audio frame. The status returned by this method - // only pertains to the synchronous operations (e.g. - // serialization/packetization), not to the asynchronous operation. - - virtual RTCError SendAudioFrame(uint64_t channel_id, - MediaTransportEncodedAudioFrame frame) = 0; - - // Start asynchronous send of video frame. The status returned by this method - // only pertains to the synchronous operations (e.g. - // serialization/packetization), not to the asynchronous operation. - virtual RTCError SendVideoFrame( - uint64_t channel_id, - const MediaTransportEncodedVideoFrame& frame) = 0; - - // Used by video sender to be notified on key frame requests. - virtual void SetKeyFrameRequestCallback( - MediaTransportKeyFrameRequestCallback* callback); - - // Requests a keyframe for the particular channel (stream). The caller should - // check that the keyframe is not present in a jitter buffer already (i.e. - // don't request a keyframe if there is one that you will get from the jitter - // buffer in a moment). - virtual RTCError RequestKeyFrame(uint64_t channel_id) = 0; - - // Sets audio sink. Sink must be unset by calling SetReceiveAudioSink(nullptr) - // before the media transport is destroyed or before new sink is set. - virtual void SetReceiveAudioSink(MediaTransportAudioSinkInterface* sink) = 0; - - // Registers a video sink. Before destruction of media transport, you must - // pass a nullptr. - virtual void SetReceiveVideoSink(MediaTransportVideoSinkInterface* sink) = 0; - - // Adds a target bitrate observer. Before media transport is destructed - // the observer must be unregistered (by calling - // RemoveTargetTransferRateObserver). - // A newly registered observer will be called back with the latest recorded - // target rate, if available. - virtual void AddTargetTransferRateObserver( - TargetTransferRateObserver* observer); - - // Removes an existing |observer| from observers. If observer was never - // registered, an error is logged and method does nothing. - virtual void RemoveTargetTransferRateObserver( - TargetTransferRateObserver* observer); - - // Sets audio packets observer, which gets informed about incoming audio - // packets. Before destruction, the observer must be unregistered by setting - // nullptr. - // - // This method may be temporary, when the multiplexer is implemented (or - // multiplexer may use it to demultiplex channel ids). - virtual void SetFirstAudioPacketReceivedObserver( - AudioPacketReceivedObserver* observer); - - // Intended for receive side. AddRttObserver registers an observer to be - // called for each RTT measurement, typically once per ACK. Before media - // transport is destructed the observer must be unregistered. - virtual void AddRttObserver(MediaTransportRttObserver* observer); - virtual void RemoveRttObserver(MediaTransportRttObserver* observer); - - // Returns the last known target transfer rate as reported to the above - // observers. - virtual absl::optional GetLatestTargetTransferRate(); - - // Gets the audio packet overhead in bytes. Returned overhead does not include - // transport overhead (ipv4/6, turn channeldata, tcp/udp, etc.). - // If the transport is capable of fusing packets together, this overhead - // might not be a very accurate number. - // TODO(nisse): Deprecated. - virtual size_t GetAudioPacketOverhead() const; - - // Corresponding observers for audio and video overhead. Before destruction, - // the observers must be unregistered by setting nullptr. - - // Registers an observer for network change events. If the network route is - // already established when the callback is added, |callback| will be called - // immediately with the current network route. Before media transport is - // destroyed, the callback must be removed. - virtual void AddNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback); - virtual void RemoveNetworkChangeCallback( - MediaTransportNetworkChangeCallback* callback); - - // Sets a state observer callback. Before media transport is destroyed, the - // callback must be unregistered by setting it to nullptr. - // A newly registered callback will be called with the current state. - // Media transport does not invoke this callback concurrently. - virtual void SetMediaTransportStateCallback( - MediaTransportStateCallback* callback) = 0; - - // Updates allocation limits. - // TODO(psla): Make abstract when downstream implementation implement it. - virtual void SetAllocatedBitrateLimits( - const MediaTransportAllocatedBitrateLimits& limits); - - // Sets starting rate. - // TODO(psla): Make abstract when downstream implementation implement it. - virtual void SetTargetBitrateLimits( - const MediaTransportTargetRateConstraints& target_rate_constraints) {} - - // TODO(sukhanov): RtcEventLogs. -}; - -// If media transport factory is set in peer connection factory, it will be -// used to create media transport for sending/receiving encoded frames and -// this transport will be used instead of default RTP/SRTP transport. -// -// Currently Media Transport negotiation is not supported in SDP. -// If application is using media transport, it must negotiate it before -// setting media transport factory in peer connection. -class MediaTransportFactory { - public: - virtual ~MediaTransportFactory() = default; - - // Creates media transport. - // - Does not take ownership of packet_transport or network_thread. - // - Does not support group calls, in 1:1 call one side must set - // is_caller = true and another is_caller = false. - virtual RTCErrorOr> - CreateMediaTransport(rtc::PacketTransportInternal* packet_transport, - rtc::Thread* network_thread, - const MediaTransportSettings& settings); - - // Creates a new Media Transport in a disconnected state. If the media - // transport for the caller is created, one can then call - // MediaTransportInterface::GetTransportParametersOffer on that new instance. - // TODO(psla): Make abstract. - virtual RTCErrorOr> - CreateMediaTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings); - - // Creates a new Datagram Transport in a disconnected state. If the datagram - // transport for the caller is created, one can then call - // DatagramTransportInterface::GetTransportParametersOffer on that new - // instance. - // - // TODO(sukhanov): Consider separating media and datagram transport factories. - // TODO(sukhanov): Move factory to a separate .h file. - virtual RTCErrorOr> - CreateDatagramTransport(rtc::Thread* network_thread, - const MediaTransportSettings& settings); - - // Gets a transport name which is supported by the implementation. - // Different factories should return different transport names, and at runtime - // it will be checked that different names were used. - // For example, "rtp" or "generic" may be returned by two different - // implementations. - // The value returned by this method must never change in the lifetime of the - // factory. - // TODO(psla): Make abstract. - virtual std::string GetTransportName() const; -}; - -} // namespace webrtc -#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_ diff --git a/api/transport/media/video_transport.cc b/api/transport/media/video_transport.cc deleted file mode 100644 index a6f5304048..0000000000 --- a/api/transport/media/video_transport.cc +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#include "api/transport/media/video_transport.h" - -#include - -namespace webrtc { - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame() = default; - -MediaTransportEncodedVideoFrame::~MediaTransportEncodedVideoFrame() = default; - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame( - int64_t frame_id, - std::vector referenced_frame_ids, - int payload_type, - const webrtc::EncodedImage& encoded_image) - : payload_type_(payload_type), - encoded_image_(encoded_image), - frame_id_(frame_id), - referenced_frame_ids_(std::move(referenced_frame_ids)) {} - -MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=( - const MediaTransportEncodedVideoFrame&) = default; - -MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=( - MediaTransportEncodedVideoFrame&&) = default; - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame( - const MediaTransportEncodedVideoFrame& o) - : MediaTransportEncodedVideoFrame() { - *this = o; -} - -MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame( - MediaTransportEncodedVideoFrame&& o) - : MediaTransportEncodedVideoFrame() { - *this = std::move(o); -} - -} // namespace webrtc diff --git a/api/transport/media/video_transport.h b/api/transport/media/video_transport.h deleted file mode 100644 index affd2e0d38..0000000000 --- a/api/transport/media/video_transport.h +++ /dev/null @@ -1,101 +0,0 @@ -/* Copyright 2019 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// This is EXPERIMENTAL interface for media transport. -// -// The goal is to refactor WebRTC code so that audio and video frames -// are sent / received through the media transport interface. This will -// enable different media transport implementations, including QUIC-based -// media transport. - -#ifndef API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_ -#define API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_ - -#include - -#include "api/video/encoded_image.h" - -namespace webrtc { - -// Represents encoded video frame, along with the codec information. -class MediaTransportEncodedVideoFrame final { - public: - MediaTransportEncodedVideoFrame(int64_t frame_id, - std::vector referenced_frame_ids, - int payload_type, - const webrtc::EncodedImage& encoded_image); - ~MediaTransportEncodedVideoFrame(); - MediaTransportEncodedVideoFrame(const MediaTransportEncodedVideoFrame&); - MediaTransportEncodedVideoFrame& operator=( - const MediaTransportEncodedVideoFrame& other); - MediaTransportEncodedVideoFrame& operator=( - MediaTransportEncodedVideoFrame&& other); - MediaTransportEncodedVideoFrame(MediaTransportEncodedVideoFrame&&); - - int payload_type() const { return payload_type_; } - const webrtc::EncodedImage& encoded_image() const { return encoded_image_; } - - int64_t frame_id() const { return frame_id_; } - const std::vector& referenced_frame_ids() const { - return referenced_frame_ids_; - } - - // Hack to workaround lack of ownership of the EncodedImage buffer. If we - // don't already own the underlying data, make a copy. - void Retain() { encoded_image_.Retain(); } - - private: - MediaTransportEncodedVideoFrame(); - - int payload_type_; - - // The buffer is not always owned by the encoded image. On the sender it means - // that it will need to make a copy using the Retain() method, if it wants to - // deliver it asynchronously. - webrtc::EncodedImage encoded_image_; - - // Frame id uniquely identifies a frame in a stream. It needs to be unique in - // a given time window (i.e. technically unique identifier for the lifetime of - // the connection is not needed, but you need to guarantee that remote side - // got rid of the previous frame_id if you plan to reuse it). - // - // It is required by a remote jitter buffer, and is the same as - // EncodedFrame::id::picture_id. - // - // This data must be opaque to the media transport, and media transport should - // itself not make any assumptions about what it is and its uniqueness. - int64_t frame_id_; - - // A single frame might depend on other frames. This is set of identifiers on - // which the current frame depends. - std::vector referenced_frame_ids_; -}; - -// Interface for receiving encoded video frames from MediaTransportInterface -// implementations. -class MediaTransportVideoSinkInterface { - public: - virtual ~MediaTransportVideoSinkInterface() = default; - - // Called when new encoded video frame is received. - virtual void OnData(uint64_t channel_id, - MediaTransportEncodedVideoFrame frame) = 0; -}; - -// Interface for video sender to be notified of received key frame request. -class MediaTransportKeyFrameRequestCallback { - public: - virtual ~MediaTransportKeyFrameRequestCallback() = default; - - // Called when a key frame request is received on the transport. - virtual void OnKeyFrameRequested(uint64_t channel_id) = 0; -}; - -} // namespace webrtc -#endif // API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_ diff --git a/api/transport/rtp/BUILD.gn b/api/transport/rtp/BUILD.gn index b0849502c8..7b01169360 100644 --- a/api/transport/rtp/BUILD.gn +++ b/api/transport/rtp/BUILD.gn @@ -14,15 +14,20 @@ rtc_source_set("rtp_source") { deps = [ "../../../api:rtp_headers", "../../../rtc_base:checks", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("dependency_descriptor") { visibility = [ "*" ] - sources = [ "dependency_descriptor.h" ] - deps = [ + sources = [ + "dependency_descriptor.cc", + "dependency_descriptor.h", + ] + deps = [ "../../../rtc_base:checks" ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", ] } diff --git a/api/transport/rtp/dependency_descriptor.cc b/api/transport/rtp/dependency_descriptor.cc new file mode 100644 index 0000000000..2a9b6d9a71 --- /dev/null +++ b/api/transport/rtp/dependency_descriptor.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/rtp/dependency_descriptor.h" + +#include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +constexpr int DependencyDescriptor::kMaxSpatialIds; +constexpr int DependencyDescriptor::kMaxTemporalIds; +constexpr int DependencyDescriptor::kMaxTemplates; +constexpr int DependencyDescriptor::kMaxDecodeTargets; + +namespace webrtc_impl { + +absl::InlinedVector StringToDecodeTargetIndications( + absl::string_view symbols) { + absl::InlinedVector dtis; + dtis.reserve(symbols.size()); + for (char symbol : symbols) { + DecodeTargetIndication indication; + switch (symbol) { + case '-': + indication = DecodeTargetIndication::kNotPresent; + break; + case 'D': + indication = DecodeTargetIndication::kDiscardable; + break; + case 'R': + indication = DecodeTargetIndication::kRequired; + break; + case 'S': + indication = DecodeTargetIndication::kSwitch; + break; + default: + RTC_NOTREACHED(); + } + dtis.push_back(indication); + } + return dtis; +} + +} // namespace webrtc_impl +} // namespace webrtc diff --git a/api/transport/rtp/dependency_descriptor.h b/api/transport/rtp/dependency_descriptor.h index a488f56dfd..6967c83517 100644 --- a/api/transport/rtp/dependency_descriptor.h +++ b/api/transport/rtp/dependency_descriptor.h @@ -13,10 +13,12 @@ #include +#include #include #include #include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" #include "absl/types/optional.h" namespace webrtc { @@ -52,6 +54,13 @@ enum class DecodeTargetIndication { }; struct FrameDependencyTemplate { + // Setters are named briefly to chain them when building the template. + FrameDependencyTemplate& S(int spatial_layer); + FrameDependencyTemplate& T(int temporal_layer); + FrameDependencyTemplate& Dtis(absl::string_view dtis); + FrameDependencyTemplate& FrameDiffs(std::initializer_list diffs); + FrameDependencyTemplate& ChainDiffs(std::initializer_list diffs); + friend bool operator==(const FrameDependencyTemplate& lhs, const FrameDependencyTemplate& rhs) { return lhs.spatial_id == rhs.spatial_id && @@ -82,14 +91,18 @@ struct FrameDependencyStructure { int num_decode_targets = 0; int num_chains = 0; // If chains are used (num_chains > 0), maps decode target index into index of - // the chain protecting that target or |num_chains| value if decode target is - // not protected by a chain. + // the chain protecting that target. absl::InlinedVector decode_target_protected_by_chain; absl::InlinedVector resolutions; std::vector templates; }; struct DependencyDescriptor { + static constexpr int kMaxSpatialIds = 4; + static constexpr int kMaxTemporalIds = 8; + static constexpr int kMaxDecodeTargets = 32; + static constexpr int kMaxTemplates = 64; + bool first_packet_in_frame = true; bool last_packet_in_frame = true; int frame_number = 0; @@ -99,6 +112,37 @@ struct DependencyDescriptor { std::unique_ptr attached_structure; }; +// Below are implementation details. +namespace webrtc_impl { +absl::InlinedVector StringToDecodeTargetIndications( + absl::string_view indication_symbols); +} // namespace webrtc_impl + +inline FrameDependencyTemplate& FrameDependencyTemplate::S(int spatial_layer) { + this->spatial_id = spatial_layer; + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::T(int temporal_layer) { + this->temporal_id = temporal_layer; + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::Dtis( + absl::string_view dtis) { + this->decode_target_indications = + webrtc_impl::StringToDecodeTargetIndications(dtis); + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::FrameDiffs( + std::initializer_list diffs) { + this->frame_diffs.assign(diffs.begin(), diffs.end()); + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::ChainDiffs( + std::initializer_list diffs) { + this->chain_diffs.assign(diffs.begin(), diffs.end()); + return *this; +} + } // namespace webrtc #endif // API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_ diff --git a/api/transport/test/mock_network_control.h b/api/transport/test/mock_network_control.h index 54a416cb77..f613004fb7 100644 --- a/api/transport/test/mock_network_control.h +++ b/api/transport/test/mock_network_control.h @@ -18,11 +18,16 @@ namespace webrtc { class MockNetworkStateEstimator : public NetworkStateEstimator { public: - MOCK_METHOD0(GetCurrentEstimate, absl::optional()); - MOCK_METHOD1(OnTransportPacketsFeedback, - void(const TransportPacketsFeedback&)); - MOCK_METHOD1(OnReceivedPacket, void(const PacketResult&)); - MOCK_METHOD1(OnRouteChange, void(const NetworkRouteChange&)); + MOCK_METHOD(absl::optional, + GetCurrentEstimate, + (), + (override)); + MOCK_METHOD(void, + OnTransportPacketsFeedback, + (const TransportPacketsFeedback&), + (override)); + MOCK_METHOD(void, OnReceivedPacket, (const PacketResult&), (override)); + MOCK_METHOD(void, OnRouteChange, (const NetworkRouteChange&), (override)); }; } // namespace webrtc diff --git a/api/units/data_rate_unittest.cc b/api/units/data_rate_unittest.cc index 4a6dd21af3..f77b3702d4 100644 --- a/api/units/data_rate_unittest.cc +++ b/api/units/data_rate_unittest.cc @@ -175,7 +175,7 @@ TEST(UnitConversionTest, DataRateAndDataSizeAndFrequency) { EXPECT_EQ((rate_b / freq_a).bytes(), kBitsPerSecond / kHertz / 8); } -TEST(UnitConversionTest, DivisionFailsOnLargeSize) { +TEST(UnitConversionDeathTest, DivisionFailsOnLargeSize) { // Note that the failure is expected since the current implementation is // implementated in a way that does not support division of large sizes. If // the implementation is changed, this test can safely be removed. diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index 290c2f2abb..ee7e51d4c4 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -21,7 +21,6 @@ rtc_library("video_rtp_headers") { "hdr_metadata.h", "video_content_type.cc", "video_content_type.h", - "video_frame_marking.h", "video_rotation.h", "video_timing.cc", "video_timing.h", @@ -31,8 +30,8 @@ rtc_library("video_rtp_headers") { "..:array_view", "../../rtc_base:rtc_base_approved", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_frame") { @@ -56,8 +55,8 @@ rtc_library("video_frame") { "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (is_android) { @@ -141,8 +140,8 @@ rtc_library("encoded_image") { "../../rtc_base:deprecation", "../../rtc_base:rtc_base_approved", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("encoded_frame") { @@ -173,8 +172,8 @@ rtc_library("video_bitrate_allocation") { "../../rtc_base:safe_conversions", "../../rtc_base:stringutils", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_bitrate_allocator") { @@ -209,8 +208,8 @@ rtc_source_set("video_stream_decoder") { "../task_queue", "../units:time_delta", "../video_codecs:video_codecs_api", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_stream_decoder_create") { @@ -237,7 +236,10 @@ rtc_library("video_adaptation") { "video_adaptation_reason.h", ] - deps = [ "../../rtc_base:checks" ] + deps = [ + "../../rtc_base:checks", + "../../rtc_base:stringutils", + ] } rtc_source_set("video_stream_encoder") { @@ -256,10 +258,29 @@ rtc_source_set("video_stream_encoder") { ":video_codec_constants", ":video_frame", "..:rtp_parameters", + "..:scoped_refptr", "../:fec_controller_api", "../:rtp_parameters", + "../adaptation:resource_adaptation_api", "../units:data_rate", "../video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("video_frame_metadata") { + visibility = [ "*" ] + sources = [ + "video_frame_metadata.cc", + "video_frame_metadata.h", + ] + deps = [ + "..:array_view", + "../../modules/rtp_rtcp:rtp_video_header", + "../transport/rtp:dependency_descriptor", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/types:optional", ] } @@ -299,16 +320,21 @@ rtc_library("builtin_video_bitrate_allocator_factory") { "../../modules/video_coding:video_coding_utility", "../../modules/video_coding:webrtc_vp9_helpers", "../video_codecs:video_codecs_api", - "//third_party/abseil-cpp/absl/base:core_headers", ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] } if (rtc_include_tests) { rtc_library("video_unittests") { testonly = true - sources = [ "video_stream_decoder_create_unittest.cc" ] + sources = [ + "video_frame_metadata_unittest.cc", + "video_stream_decoder_create_unittest.cc", + ] deps = [ + ":video_frame_metadata", ":video_stream_decoder_create", + "../../modules/rtp_rtcp:rtp_video_header", "../../test:test_support", "../task_queue:default_task_queue_factory", "../video_codecs:builtin_video_decoder_factory", diff --git a/api/video/OWNERS.webrtc b/api/video/OWNERS.webrtc index 315f85e7d0..e4a16c360a 100644 --- a/api/video/OWNERS.webrtc +++ b/api/video/OWNERS.webrtc @@ -1,3 +1,4 @@ +brandtr@webrtc.org magjed@webrtc.org nisse@webrtc.org diff --git a/api/video/encoded_image.h b/api/video/encoded_image.h index d89095f467..35c2584dfa 100644 --- a/api/video/encoded_image.h +++ b/api/video/encoded_image.h @@ -21,7 +21,6 @@ #include "api/scoped_refptr.h" #include "api/video/color_space.h" #include "api/video/video_codec_constants.h" -#include "api/video/video_codec_type.h" #include "api/video/video_content_type.h" #include "api/video/video_frame_type.h" #include "api/video/video_rotation.h" diff --git a/api/video/test/BUILD.gn b/api/video/test/BUILD.gn index 5f697a081c..5633371102 100644 --- a/api/video/test/BUILD.gn +++ b/api/video/test/BUILD.gn @@ -21,8 +21,8 @@ rtc_library("rtc_api_video_unittests") { "..:video_frame", "..:video_rtp_headers", "../../../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("mock_recordable_encoded_frame") { diff --git a/api/video/test/mock_recordable_encoded_frame.h b/api/video/test/mock_recordable_encoded_frame.h index 1788a493c6..2178932d2a 100644 --- a/api/video/test/mock_recordable_encoded_frame.h +++ b/api/video/test/mock_recordable_encoded_frame.h @@ -17,13 +17,18 @@ namespace webrtc { class MockRecordableEncodedFrame : public RecordableEncodedFrame { public: - MOCK_CONST_METHOD0(encoded_buffer, - rtc::scoped_refptr()); - MOCK_CONST_METHOD0(color_space, absl::optional()); - MOCK_CONST_METHOD0(codec, VideoCodecType()); - MOCK_CONST_METHOD0(is_key_frame, bool()); - MOCK_CONST_METHOD0(resolution, EncodedResolution()); - MOCK_CONST_METHOD0(render_time, Timestamp()); + MOCK_METHOD(rtc::scoped_refptr, + encoded_buffer, + (), + (const, override)); + MOCK_METHOD(absl::optional, + color_space, + (), + (const, override)); + MOCK_METHOD(VideoCodecType, codec, (), (const, override)); + MOCK_METHOD(bool, is_key_frame, (), (const, override)); + MOCK_METHOD(EncodedResolution, resolution, (), (const, override)); + MOCK_METHOD(Timestamp, render_time, (), (const, override)); }; } // namespace webrtc #endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ diff --git a/api/video/video_adaptation_counters.cc b/api/video/video_adaptation_counters.cc index 25e0bee1ff..df1769d5d4 100644 --- a/api/video/video_adaptation_counters.cc +++ b/api/video/video_adaptation_counters.cc @@ -10,6 +10,8 @@ #include "api/video/video_adaptation_counters.h" +#include "rtc_base/strings/string_builder.h" + namespace webrtc { bool VideoAdaptationCounters::operator==( @@ -30,4 +32,11 @@ VideoAdaptationCounters VideoAdaptationCounters::operator+( fps_adaptations + other.fps_adaptations); } +std::string VideoAdaptationCounters::ToString() const { + rtc::StringBuilder ss; + ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations + << " }"; + return ss.Release(); +} + } // namespace webrtc diff --git a/api/video/video_adaptation_counters.h b/api/video/video_adaptation_counters.h index eff0baaa21..2dea902f2f 100644 --- a/api/video/video_adaptation_counters.h +++ b/api/video/video_adaptation_counters.h @@ -11,6 +11,8 @@ #ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ #define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ +#include + #include "rtc_base/checks.h" namespace webrtc { @@ -33,6 +35,8 @@ struct VideoAdaptationCounters { VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const; + std::string ToString() const; + int resolution_adaptations; int fps_adaptations; }; diff --git a/api/video/video_frame_marking.h b/api/video/video_frame_marking.h deleted file mode 100644 index 2a34852f1d..0000000000 --- a/api/video/video_frame_marking.h +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_VIDEO_VIDEO_FRAME_MARKING_H_ -#define API_VIDEO_VIDEO_FRAME_MARKING_H_ - -namespace webrtc { - -struct FrameMarking { - bool start_of_frame; - bool end_of_frame; - bool independent_frame; - bool discardable_frame; - bool base_layer_sync; - uint8_t temporal_id; - uint8_t layer_id; - uint8_t tl0_pic_idx; -}; - -} // namespace webrtc - -#endif // API_VIDEO_VIDEO_FRAME_MARKING_H_ diff --git a/api/video/video_frame_metadata.cc b/api/video/video_frame_metadata.cc new file mode 100644 index 0000000000..df82875eb9 --- /dev/null +++ b/api/video/video_frame_metadata.cc @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame_metadata.h" + +#include "modules/rtp_rtcp/source/rtp_video_header.h" + +namespace webrtc { + +VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header) + : width_(header.width), height_(header.height) { + if (header.generic) { + frame_id_ = header.generic->frame_id; + spatial_index_ = header.generic->spatial_index; + temporal_index_ = header.generic->temporal_index; + frame_dependencies_ = header.generic->dependencies; + decode_target_indications_ = header.generic->decode_target_indications; + } +} + +} // namespace webrtc diff --git a/api/video/video_frame_metadata.h b/api/video/video_frame_metadata.h new file mode 100644 index 0000000000..2e9309841b --- /dev/null +++ b/api/video/video_frame_metadata.h @@ -0,0 +1,59 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_ +#define API_VIDEO_VIDEO_FRAME_METADATA_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +struct RTPVideoHeader; + +// A subset of metadata from the RTP video header, exposed in insertable streams +// API. +class VideoFrameMetadata { + public: + explicit VideoFrameMetadata(const RTPVideoHeader& header); + VideoFrameMetadata(const VideoFrameMetadata&) = default; + VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default; + + uint16_t GetWidth() const { return width_; } + uint16_t GetHeight() const { return height_; } + absl::optional GetFrameId() const { return frame_id_; } + int GetSpatialIndex() const { return spatial_index_; } + int GetTemporalIndex() const { return temporal_index_; } + + rtc::ArrayView GetFrameDependencies() const { + return frame_dependencies_; + } + + rtc::ArrayView GetDecodeTargetIndications() + const { + return decode_target_indications_; + } + + private: + int16_t width_; + int16_t height_; + absl::optional frame_id_; + int spatial_index_ = 0; + int temporal_index_ = 0; + absl::InlinedVector frame_dependencies_; + absl::InlinedVector decode_target_indications_; +}; +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_ diff --git a/api/video/video_frame_metadata_unittest.cc b/api/video/video_frame_metadata_unittest.cc new file mode 100644 index 0000000000..7a808e1ea9 --- /dev/null +++ b/api/video/video_frame_metadata_unittest.cc @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame_metadata.h" + +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; + +TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) { + RTPVideoHeader video_header; + video_header.width = 1280u; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetWidth(), video_header.width); +} + +TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) { + RTPVideoHeader video_header; + video_header.height = 720u; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetHeight(), video_header.height); +} + +TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.frame_id = 10; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetFrameId().value(), 10); +} + +TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetFrameId(), absl::nullopt); +} + +TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.spatial_index = 2; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetSpatialIndex(), 2); +} + +TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetSpatialIndex(), 0); +} + +TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.temporal_index = 3; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetTemporalIndex(), 3); +} + +TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetTemporalIndex(), 0); +} + +TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.dependencies = {5, 6, 7}; + VideoFrameMetadata metadata(video_header); + EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7)); +} + +TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty()); +} + +TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; + VideoFrameMetadata metadata(video_header); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), + ElementsAre(DecodeTargetIndication::kSwitch)); +} + +TEST(VideoFrameMetadata, + DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty()); +} + +} // namespace +} // namespace webrtc diff --git a/api/video/video_stream_encoder_interface.h b/api/video/video_stream_encoder_interface.h index 8e1df0f858..d8dd8e1599 100644 --- a/api/video/video_stream_encoder_interface.h +++ b/api/video/video_stream_encoder_interface.h @@ -13,8 +13,10 @@ #include +#include "api/adaptation/resource.h" #include "api/fec_controller_override.h" #include "api/rtp_parameters.h" // For DegradationPreference. +#include "api/scoped_refptr.h" #include "api/units/data_rate.h" #include "api/video/video_bitrate_allocator.h" #include "api/video/video_sink_interface.h" @@ -49,6 +51,15 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface { int min_transmit_bitrate_bps) = 0; }; + // If the resource is overusing, the VideoStreamEncoder will try to reduce + // resolution or frame rate until no resource is overusing. + // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor + // is moved to Call this method could be deleted altogether in favor of + // Call-level APIs only. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; + virtual std::vector> + GetAdaptationResources() = 0; + // Sets the source that will provide video frames to the VideoStreamEncoder's // OnFrame method. |degradation_preference| control whether or not resolution // or frame rate may be reduced. The VideoStreamEncoder registers itself with diff --git a/api/video_codecs/BUILD.gn b/api/video_codecs/BUILD.gn index 21a5f6faa0..597478ba0a 100644 --- a/api/video_codecs/BUILD.gn +++ b/api/video_codecs/BUILD.gn @@ -49,6 +49,8 @@ rtc_library("video_codecs_api") { "../video:video_codec_constants", "../video:video_frame", "../video:video_rtp_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings", @@ -100,8 +102,8 @@ rtc_library("builtin_video_encoder_factory") { "../../media:rtc_media_base", "../../rtc_base:checks", "../../rtc_base/system:rtc_export", - "//third_party/abseil-cpp/absl/strings", ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] } rtc_library("vp8_temporal_layers_factory") { @@ -148,6 +150,8 @@ rtc_library("rtc_software_fallback_wrappers") { "../video:video_bitrate_allocation", "../video:video_frame", "../video:video_rtp_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", diff --git a/api/video_codecs/test/BUILD.gn b/api/video_codecs/test/BUILD.gn index 243b78267f..10b18b6e5b 100644 --- a/api/video_codecs/test/BUILD.gn +++ b/api/video_codecs/test/BUILD.gn @@ -40,5 +40,6 @@ if (rtc_include_tests) { "../../video:video_rtp_headers", "//testing/gtest", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc index ee61893563..30d5287c94 100644 --- a/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc +++ b/api/video_codecs/test/video_decoder_software_fallback_wrapper_unittest.cc @@ -218,6 +218,68 @@ TEST_F(VideoDecoderSoftwareFallbackWrapperTest, fallback_wrapper_->Release(); } +TEST_F(VideoDecoderSoftwareFallbackWrapperTest, FallbacksOnTooManyErrors) { + VideoCodec codec = {}; + fallback_wrapper_->InitDecode(&codec, 2); + + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR; + EncodedImage encoded_image; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + // Doesn't fallback from a single error. + fallback_wrapper_->Decode(encoded_image, false, -1); + EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName()); + + // However, many frames with the same error, fallback should happen. + const int kNumFramesToEncode = 10; + for (int i = 0; i < kNumFramesToEncode; ++i) { + fallback_wrapper_->Decode(encoded_image, false, -1); + } + // Hard coded expected value since libvpx is the software implementation name + // for VP8. Change accordingly if the underlying implementation does. + EXPECT_STREQ("libvpx (fallback from: fake-decoder)", + fallback_wrapper_->ImplementationName()); + fallback_wrapper_->Release(); +} + +TEST_F(VideoDecoderSoftwareFallbackWrapperTest, + DoesNotFallbackOnDeltaFramesErrors) { + VideoCodec codec = {}; + fallback_wrapper_->InitDecode(&codec, 2); + + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR; + EncodedImage encoded_image; + encoded_image._frameType = VideoFrameType::kVideoFrameDelta; + + // Many decoded frames with the same error + const int kNumFramesToEncode = 10; + for (int i = 0; i < kNumFramesToEncode; ++i) { + fallback_wrapper_->Decode(encoded_image, false, -1); + } + EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName()); + + fallback_wrapper_->Release(); +} + +TEST_F(VideoDecoderSoftwareFallbackWrapperTest, + DoesNotFallbacksOnNonConsequtiveErrors) { + VideoCodec codec = {}; + fallback_wrapper_->InitDecode(&codec, 2); + + EncodedImage encoded_image; + encoded_image._frameType = VideoFrameType::kVideoFrameKey; + + const int kNumFramesToEncode = 10; + for (int i = 0; i < kNumFramesToEncode; ++i) { + // Interleaved errors and successful decodes. + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_ERROR; + fallback_wrapper_->Decode(encoded_image, false, -1); + fake_decoder_->decode_return_code_ = WEBRTC_VIDEO_CODEC_OK; + fallback_wrapper_->Decode(encoded_image, false, -1); + } + EXPECT_STREQ("fake-decoder", fallback_wrapper_->ImplementationName()); + fallback_wrapper_->Release(); +} + class ForcedSoftwareDecoderFallbackTest : public VideoDecoderSoftwareFallbackWrapperTest { public: diff --git a/api/video_codecs/video_codec.h b/api/video_codecs/video_codec.h index 330bbbce19..c07fae9b8b 100644 --- a/api/video_codecs/video_codec.h +++ b/api/video_codecs/video_codec.h @@ -19,7 +19,7 @@ #include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" #include "api/video/video_codec_type.h" -#include "common_types.h" // NOLINT(build/include) +#include "common_types.h" // NOLINT(build/include_directory) #include "rtc_base/system/rtc_export.h" namespace webrtc { diff --git a/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/api/video_codecs/video_decoder_software_fallback_wrapper.cc index f78d9b885f..128087f207 100644 --- a/api/video_codecs/video_decoder_software_fallback_wrapper.cc +++ b/api/video_codecs/video_decoder_software_fallback_wrapper.cc @@ -30,6 +30,8 @@ namespace webrtc { namespace { +constexpr size_t kMaxConsequtiveHwErrors = 4; + class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { public: VideoDecoderSoftwareFallbackWrapper( @@ -74,6 +76,7 @@ class VideoDecoderSoftwareFallbackWrapper final : public VideoDecoder { const std::string fallback_implementation_name_; DecodedImageCallback* callback_; int32_t hw_decoded_frames_since_last_fallback_; + size_t hw_consequtive_generic_errors_; }; VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper( @@ -86,7 +89,8 @@ VideoDecoderSoftwareFallbackWrapper::VideoDecoderSoftwareFallbackWrapper( std::string(fallback_decoder_->ImplementationName()) + " (fallback from: " + hw_decoder_->ImplementationName() + ")"), callback_(nullptr), - hw_decoded_frames_since_last_fallback_(0) {} + hw_decoded_frames_since_last_fallback_(0), + hw_consequtive_generic_errors_(0) {} VideoDecoderSoftwareFallbackWrapper::~VideoDecoderSoftwareFallbackWrapper() = default; @@ -196,14 +200,24 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Decode( int32_t ret = WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE; ret = hw_decoder_->Decode(input_image, missing_frames, render_time_ms); if (ret != WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE) { - if (ret == WEBRTC_VIDEO_CODEC_OK) { + if (ret != WEBRTC_VIDEO_CODEC_ERROR) { ++hw_decoded_frames_since_last_fallback_; + hw_consequtive_generic_errors_ = 0; + return ret; + } + if (input_image._frameType == VideoFrameType::kVideoFrameKey) { + // Only count errors on key-frames, since generic errors can happen + // with hw decoder due to many arbitrary reasons. + // However, requesting a key-frame is supposed to fix the issue. + ++hw_consequtive_generic_errors_; + } + if (hw_consequtive_generic_errors_ < kMaxConsequtiveHwErrors) { + return ret; } - return ret; } // HW decoder returned WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE or - // initialization failed, fallback to software. + // too many generic errors on key-frames encountered. if (!InitFallbackDecoder()) { return ret; } diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index 630b7aa70c..c396090ea6 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -30,13 +30,13 @@ class VideoEncoderFactory { struct CodecInfo { // |is_hardware_accelerated| is true if the encoders created by this factory // of the given codec will use hardware support. - bool is_hardware_accelerated; + bool is_hardware_accelerated = false; // |has_internal_source| is true if encoders created by this factory of the // given codec will use internal camera sources, meaning that they don't // require/expect frames to be delivered via webrtc::VideoEncoder::Encode. // This flag is used as the internal_source parameter to // webrtc::ViEExternalCodec::RegisterExternalSendCodec. - bool has_internal_source; + bool has_internal_source = false; }; // An injectable class that is continuously updated with encoding conditions diff --git a/api/voip/BUILD.gn b/api/voip/BUILD.gn index 2c5f71c988..6f92ed67f4 100644 --- a/api/voip/BUILD.gn +++ b/api/voip/BUILD.gn @@ -19,8 +19,8 @@ rtc_source_set("voip_api") { deps = [ "..:array_view", "../audio_codecs:audio_codecs_api", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("voip_engine_factory") { diff --git a/audio/BUILD.gn b/audio/BUILD.gn index 725128bb1a..78f6affe84 100644 --- a/audio/BUILD.gn +++ b/audio/BUILD.gn @@ -71,6 +71,7 @@ rtc_library("audio") { "../modules/audio_coding:audio_coding_module_typedefs", "../modules/audio_coding:audio_encoder_cng", "../modules/audio_coding:audio_network_adaptor_config", + "../modules/audio_coding:red", "../modules/audio_device", "../modules/audio_processing", "../modules/audio_processing:api", @@ -89,12 +90,15 @@ rtc_library("audio") { "../rtc_base:rtc_task_queue", "../rtc_base:safe_minmax", "../rtc_base/experiments:field_trial_parser", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../rtc_base/task_utils:to_queued_task", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", "utility:audio_frame_operations", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", ] @@ -202,6 +206,7 @@ if (rtc_include_tests) { "../api:network_emulation_manager_api", "../api:peer_connection_quality_test_fixture_api", "../api:simulated_network_api", + "../api:time_controller", "../call:simulated_network", "../common_audio", "../system_wrappers", diff --git a/audio/audio_level.cc b/audio/audio_level.cc index 06702b4c0d..7874b73f1c 100644 --- a/audio/audio_level.cc +++ b/audio/audio_level.cc @@ -22,7 +22,7 @@ AudioLevel::AudioLevel() AudioLevel::~AudioLevel() {} void AudioLevel::Reset() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); abs_max_ = 0; count_ = 0; current_level_full_range_ = 0; @@ -31,24 +31,24 @@ void AudioLevel::Reset() { } int16_t AudioLevel::LevelFullRange() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return current_level_full_range_; } void AudioLevel::ResetLevelFullRange() { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); abs_max_ = 0; count_ = 0; current_level_full_range_ = 0; } double AudioLevel::TotalEnergy() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return total_energy_; } double AudioLevel::TotalDuration() const { - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); return total_duration_; } @@ -63,7 +63,7 @@ void AudioLevel::ComputeLevel(const AudioFrame& audioFrame, double duration) { // Protect member access using a lock since this method is called on a // dedicated audio thread in the RecordedDataIsAvailable() callback. - rtc::CritScope cs(&crit_sect_); + MutexLock lock(&mutex_); if (abs_value > abs_max_) abs_max_ = abs_value; diff --git a/audio/audio_level.h b/audio/audio_level.h index 430edb1703..acd1231fe2 100644 --- a/audio/audio_level.h +++ b/audio/audio_level.h @@ -11,7 +11,7 @@ #ifndef AUDIO_AUDIO_LEVEL_H_ #define AUDIO_AUDIO_LEVEL_H_ -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -59,14 +59,14 @@ class AudioLevel { private: enum { kUpdateFrequency = 10 }; - rtc::CriticalSection crit_sect_; + mutable Mutex mutex_; - int16_t abs_max_ RTC_GUARDED_BY(crit_sect_); - int16_t count_ RTC_GUARDED_BY(crit_sect_); - int16_t current_level_full_range_ RTC_GUARDED_BY(crit_sect_); + int16_t abs_max_ RTC_GUARDED_BY(mutex_); + int16_t count_ RTC_GUARDED_BY(mutex_); + int16_t current_level_full_range_ RTC_GUARDED_BY(mutex_); - double total_energy_ RTC_GUARDED_BY(crit_sect_) = 0.0; - double total_duration_ RTC_GUARDED_BY(crit_sect_) = 0.0; + double total_energy_ RTC_GUARDED_BY(mutex_) = 0.0; + double total_duration_ RTC_GUARDED_BY(mutex_) = 0.0; }; } // namespace voe diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index 8730c45258..1856902d5e 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -31,6 +31,7 @@ #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" #include "logging/rtc_event_log/rtc_stream_config.h" #include "modules/audio_coding/codecs/cng/audio_encoder_cng.h" +#include "modules/audio_coding/codecs/red/audio_encoder_copy_red.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "rtc_base/checks.h" @@ -115,18 +116,20 @@ AudioSendStream::AudioSendStream( bitrate_allocator, event_log, suspended_rtp_state, - voe::CreateChannelSend(clock, - task_queue_factory, - module_process_thread, - config.send_transport, - rtcp_rtt_stats, - event_log, - config.frame_encryptor, - config.crypto_options, - config.rtp.extmap_allow_mixed, - config.rtcp_report_interval_ms, - config.rtp.ssrc, - config.frame_transformer)) {} + voe::CreateChannelSend( + clock, + task_queue_factory, + module_process_thread, + config.send_transport, + rtcp_rtt_stats, + event_log, + config.frame_encryptor, + config.crypto_options, + config.rtp.extmap_allow_mixed, + config.rtcp_report_interval_ms, + config.rtp.ssrc, + config.frame_transformer, + rtp_transport->transport_feedback_observer())) {} AudioSendStream::AudioSendStream( Clock* clock, @@ -344,7 +347,7 @@ void AudioSendStream::ConfigureStream( // Set currently known overhead (used in ANA, opus only). { - rtc::CritScope cs(&overhead_per_packet_lock_); + MutexLock lock(&overhead_per_packet_lock_); UpdateOverheadForEncoder(); } @@ -419,7 +422,7 @@ void AudioSendStream::SendAudioData(std::unique_ptr audio_frame) { // TODO(https://crbug.com/webrtc/10771): All "media-source" related stats // should move from send-streams to the local audio sources or tracks; a // send-stream should not be required to read the microphone audio levels. - rtc::CritScope cs(&audio_level_lock_); + MutexLock lock(&audio_level_lock_); audio_level_.ComputeLevel(*audio_frame, duration); } channel_send_->ProcessAndEncodeAudio(std::move(audio_frame)); @@ -485,7 +488,7 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( } { - rtc::CritScope cs(&audio_level_lock_); + MutexLock lock(&audio_level_lock_); stats.audio_level = audio_level_.LevelFullRange(); stats.total_input_energy = audio_level_.TotalEnergy(); stats.total_input_duration = audio_level_.TotalDuration(); @@ -505,15 +508,12 @@ webrtc::AudioSendStream::Stats AudioSendStream::GetStats( } void AudioSendStream::DeliverRtcp(const uint8_t* packet, size_t length) { - // TODO(solenberg): Tests call this function on a network thread, libjingle - // calls on the worker thread. We should move towards always using a network - // thread. Then this check can be enabled. - // RTC_DCHECK(!worker_thread_checker_.IsCurrent()); + RTC_DCHECK_RUN_ON(&worker_thread_checker_); channel_send_->ReceivedRTCPPacket(packet, length); worker_queue_->PostTask([&]() { // Poll if overhead has changed, which it can do if ack triggers us to stop // sending mid/rid. - rtc::CritScope cs(&overhead_per_packet_lock_); + MutexLock lock(&overhead_per_packet_lock_); UpdateOverheadForEncoder(); }); } @@ -538,16 +538,18 @@ uint32_t AudioSendStream::OnBitrateUpdated(BitrateAllocationUpdate update) { void AudioSendStream::SetTransportOverhead( int transport_overhead_per_packet_bytes) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope cs(&overhead_per_packet_lock_); + MutexLock lock(&overhead_per_packet_lock_); transport_overhead_per_packet_bytes_ = transport_overhead_per_packet_bytes; UpdateOverheadForEncoder(); } void AudioSendStream::UpdateOverheadForEncoder() { - const size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes(); - if (overhead_per_packet_bytes == 0) { - return; // Overhead is not known yet, do not tell the encoder. + size_t overhead_per_packet_bytes = GetPerPacketOverheadBytes(); + if (overhead_per_packet_ == overhead_per_packet_bytes) { + return; } + overhead_per_packet_ = overhead_per_packet_bytes; + channel_send_->CallEncoder([&](AudioEncoder* encoder) { encoder->OnReceivedOverhead(overhead_per_packet_bytes); }); @@ -568,7 +570,7 @@ void AudioSendStream::UpdateOverheadForEncoder() { } size_t AudioSendStream::TestOnlyGetPerPacketOverheadBytes() const { - rtc::CritScope cs(&overhead_per_packet_lock_); + MutexLock lock(&overhead_per_packet_lock_); return GetPerPacketOverheadBytes(); } @@ -644,7 +646,7 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { } } - // Wrap the encoder in a an AudioEncoderCNG, if VAD is enabled. + // Wrap the encoder in an AudioEncoderCNG, if VAD is enabled. if (spec.cng_payload_type) { AudioEncoderCngConfig cng_config; cng_config.num_channels = encoder->NumChannels(); @@ -657,10 +659,18 @@ bool AudioSendStream::SetupSendCodec(const Config& new_config) { new_config.send_codec_spec->format.clockrate_hz); } + // Wrap the encoder in a RED encoder, if RED is enabled. + if (spec.red_payload_type) { + AudioEncoderCopyRed::Config red_config; + red_config.payload_type = *spec.red_payload_type; + red_config.speech_encoder = std::move(encoder); + encoder = std::make_unique(std::move(red_config)); + } + // Set currently known overhead (used in ANA, opus only). // If overhead changes later, it will be updated in UpdateOverheadForEncoder. { - rtc::CritScope cs(&overhead_per_packet_lock_); + MutexLock lock(&overhead_per_packet_lock_); size_t overhead = GetPerPacketOverheadBytes(); if (overhead > 0) { encoder->OnReceivedOverhead(overhead); diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index 92e9a7fb16..7bc3183123 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -20,10 +20,11 @@ #include "call/audio_send_stream.h" #include "call/audio_state.h" #include "call/bitrate_allocator.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/struct_parameters_parser.h" #include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" @@ -166,16 +167,16 @@ class AudioSendStream final : public webrtc::AudioSendStream, int encoder_sample_rate_hz_ = 0; size_t encoder_num_channels_ = 0; bool sending_ = false; - rtc::CriticalSection audio_level_lock_; + mutable Mutex audio_level_lock_; // Keeps track of audio level, total audio energy and total samples duration. // https://w3c.github.io/webrtc-stats/#dom-rtcaudiohandlerstats-totalaudioenergy - webrtc::voe::AudioLevel audio_level_; + webrtc::voe::AudioLevel audio_level_ RTC_GUARDED_BY(audio_level_lock_); BitrateAllocatorInterface* const bitrate_allocator_ RTC_GUARDED_BY(worker_queue_); RtpTransportControllerSendInterface* const rtp_transport_; - RtpRtcp* const rtp_rtcp_module_; + RtpRtcpInterface* const rtp_rtcp_module_; absl::optional const suspended_rtp_state_; // RFC 5285: Each distinct extension MUST have a unique ID. The value 0 is @@ -194,7 +195,8 @@ class AudioSendStream final : public webrtc::AudioSendStream, const std::vector& extensions); static int TransportSeqNumId(const Config& config); - rtc::CriticalSection overhead_per_packet_lock_; + mutable Mutex overhead_per_packet_lock_; + size_t overhead_per_packet_ RTC_GUARDED_BY(overhead_per_packet_lock_) = 0; // Current transport overhead (ICE, TURN, etc.) size_t transport_overhead_per_packet_bytes_ diff --git a/audio/audio_send_stream_unittest.cc b/audio/audio_send_stream_unittest.cc index 334fdf50f7..d094198721 100644 --- a/audio/audio_send_stream_unittest.cc +++ b/audio/audio_send_stream_unittest.cc @@ -89,7 +89,10 @@ const DataRate kMaxOverheadRate = kOverheadPerPacket / kMinFrameLength; class MockLimitObserver : public BitrateAllocator::LimitObserver { public: - MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits)); + MOCK_METHOD(void, + OnAllocationLimitsChanged, + (BitrateAllocationLimits), + (override)); }; std::unique_ptr SetupAudioEncoderMock( @@ -200,7 +203,7 @@ struct ConfigHelper { return *static_cast( stream_config_.encoder_factory.get()); } - MockRtpRtcp* rtp_rtcp() { return &rtp_rtcp_; } + MockRtpRtcpInterface* rtp_rtcp() { return &rtp_rtcp_; } MockChannelSend* channel_send() { return channel_send_; } RtpTransportControllerSendInterface* transport() { return &rtp_transport_; } @@ -247,12 +250,12 @@ struct ConfigHelper { void SetupMockForSetupSendCodec(bool expect_set_encoder_call) { if (expect_set_encoder_call) { - EXPECT_CALL(*channel_send_, SetEncoderForMock(_, _)) - .WillOnce(Invoke( - [this](int payload_type, std::unique_ptr* encoder) { - this->audio_encoder_ = std::move(*encoder); + EXPECT_CALL(*channel_send_, SetEncoder) + .WillOnce( + [this](int payload_type, std::unique_ptr encoder) { + this->audio_encoder_ = std::move(encoder); return true; - })); + }); } } @@ -329,7 +332,7 @@ struct ConfigHelper { ::testing::StrictMock bandwidth_observer_; ::testing::NiceMock event_log_; ::testing::NiceMock rtp_transport_; - ::testing::NiceMock rtp_rtcp_; + ::testing::NiceMock rtp_rtcp_; ::testing::NiceMock limit_observer_; BitrateAllocator bitrate_allocator_; // |worker_queue| is defined last to ensure all pending tasks are cancelled @@ -368,6 +371,7 @@ TEST(AudioSendStreamTest, ConfigToString) { config.send_codec_spec->nack_enabled = true; config.send_codec_spec->transport_cc_enabled = false; config.send_codec_spec->cng_payload_type = 42; + config.send_codec_spec->red_payload_type = 43; config.encoder_factory = MockAudioEncoderFactory::CreateUnusedFactory(); config.rtp.extmap_allow_mixed = true; config.rtp.extensions.push_back( @@ -380,7 +384,7 @@ TEST(AudioSendStreamTest, ConfigToString) { "send_transport: null, " "min_bitrate_bps: 12000, max_bitrate_bps: 34000, " "send_codec_spec: {nack_enabled: true, transport_cc_enabled: false, " - "cng_payload_type: 42, payload_type: 103, " + "cng_payload_type: 42, red_payload_type: 43, payload_type: 103, " "format: {name: isac, clockrate_hz: 16000, num_channels: 1, " "parameters: {}}}}", config.ToString()); @@ -473,7 +477,7 @@ TEST(AudioSendStreamTest, GetStatsAudioLevel) { ConfigHelper helper(false, true, use_null_audio_processing); auto send_stream = helper.CreateAudioSendStream(); helper.SetupMockForGetStats(use_null_audio_processing); - EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudioForMock(_)) + EXPECT_CALL(*helper.channel_send(), ProcessAndEncodeAudio) .Times(AnyNumber()); constexpr int kSampleRateHz = 48000; @@ -558,15 +562,13 @@ TEST(AudioSendStreamTest, SendCodecCanApplyVad) { helper.config().send_codec_spec = AudioSendStream::Config::SendCodecSpec(9, kG722Format); helper.config().send_codec_spec->cng_payload_type = 105; - using ::testing::Invoke; std::unique_ptr stolen_encoder; - EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _)) - .WillOnce( - Invoke([&stolen_encoder](int payload_type, - std::unique_ptr* encoder) { - stolen_encoder = std::move(*encoder); - return true; - })); + EXPECT_CALL(*helper.channel_send(), SetEncoder) + .WillOnce([&stolen_encoder](int payload_type, + std::unique_ptr encoder) { + stolen_encoder = std::move(encoder); + return true; + }); EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000)); auto send_stream = helper.CreateAudioSendStream(); @@ -748,8 +750,7 @@ TEST(AudioSendStreamTest, DontRecreateEncoder) { // test to be correct, it's instead set-up manually here. Otherwise a simple // change to ConfigHelper (say to WillRepeatedly) would silently make this // test useless. - EXPECT_CALL(*helper.channel_send(), SetEncoderForMock(_, _)) - .WillOnce(Return()); + EXPECT_CALL(*helper.channel_send(), SetEncoder).WillOnce(Return()); EXPECT_CALL(*helper.channel_send(), RegisterCngPayloadType(105, 8000)); @@ -794,7 +795,7 @@ TEST(AudioSendStreamTest, OnTransportOverheadChanged) { auto new_config = helper.config(); // CallEncoder will be called on overhead change. - EXPECT_CALL(*helper.channel_send(), CallEncoder(::testing::_)).Times(1); + EXPECT_CALL(*helper.channel_send(), CallEncoder); const size_t transport_overhead_per_packet_bytes = 333; send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); @@ -804,6 +805,27 @@ TEST(AudioSendStreamTest, OnTransportOverheadChanged) { } } +TEST(AudioSendStreamTest, DoesntCallEncoderWhenOverheadUnchanged) { + for (bool use_null_audio_processing : {false, true}) { + ConfigHelper helper(false, true, use_null_audio_processing); + auto send_stream = helper.CreateAudioSendStream(); + auto new_config = helper.config(); + + // CallEncoder will be called on overhead change. + EXPECT_CALL(*helper.channel_send(), CallEncoder); + const size_t transport_overhead_per_packet_bytes = 333; + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); + + // Set the same overhead again, CallEncoder should not be called again. + EXPECT_CALL(*helper.channel_send(), CallEncoder).Times(0); + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes); + + // New overhead, call CallEncoder again + EXPECT_CALL(*helper.channel_send(), CallEncoder); + send_stream->SetTransportOverhead(transport_overhead_per_packet_bytes + 1); + } +} + TEST(AudioSendStreamTest, AudioOverheadChanged) { for (bool use_null_audio_processing : {false, true}) { ConfigHelper helper(false, true, use_null_audio_processing); diff --git a/audio/audio_state.h b/audio/audio_state.h index f696d5a8fe..70c7208320 100644 --- a/audio/audio_state.h +++ b/audio/audio_state.h @@ -19,7 +19,6 @@ #include "audio/null_audio_poller.h" #include "call/audio_state.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ref_count.h" #include "rtc_base/thread_checker.h" diff --git a/audio/audio_state_unittest.cc b/audio/audio_state_unittest.cc index 76e08c549c..2bbe0fb0b7 100644 --- a/audio/audio_state_unittest.cc +++ b/audio/audio_state_unittest.cc @@ -60,8 +60,10 @@ class FakeAudioSource : public AudioMixer::Source { int PreferredSampleRate() const /*override*/ { return kSampleRate; } - MOCK_METHOD2(GetAudioFrameWithInfo, - AudioFrameInfo(int sample_rate_hz, AudioFrame* audio_frame)); + MOCK_METHOD(AudioFrameInfo, + GetAudioFrameWithInfo, + (int sample_rate_hz, AudioFrame*), + (override)); }; std::vector Create10msTestData(int sample_rate_hz, diff --git a/audio/audio_transport_impl.cc b/audio/audio_transport_impl.cc index 7648fb948f..11b37ffcf1 100644 --- a/audio/audio_transport_impl.cc +++ b/audio/audio_transport_impl.cc @@ -118,7 +118,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( size_t send_num_channels = 0; bool swap_stereo_channels = false; { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); send_sample_rate_hz = send_sample_rate_hz_; send_num_channels = send_num_channels_; swap_stereo_channels = swap_stereo_channels_; @@ -149,7 +149,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( // Copy frame and push to each sending stream. The copy is required since an // encoding task will be posted internally to each stream. { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); typing_noise_detected_ = typing_detected; RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); @@ -237,19 +237,19 @@ void AudioTransportImpl::PullRenderData(int bits_per_sample, void AudioTransportImpl::UpdateAudioSenders(std::vector senders, int send_sample_rate_hz, size_t send_num_channels) { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); audio_senders_ = std::move(senders); send_sample_rate_hz_ = send_sample_rate_hz; send_num_channels_ = send_num_channels; } void AudioTransportImpl::SetStereoChannelSwapping(bool enable) { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); swap_stereo_channels_ = enable; } bool AudioTransportImpl::typing_noise_detected() const { - rtc::CritScope lock(&capture_lock_); + MutexLock lock(&capture_lock_); return typing_noise_detected_; } } // namespace webrtc diff --git a/audio/audio_transport_impl.h b/audio/audio_transport_impl.h index 2d9b4cf3a1..1643a29970 100644 --- a/audio/audio_transport_impl.h +++ b/audio/audio_transport_impl.h @@ -20,7 +20,7 @@ #include "modules/audio_processing/include/audio_processing.h" #include "modules/audio_processing/typing_detection.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -71,7 +71,7 @@ class AudioTransportImpl : public AudioTransport { AudioProcessing* audio_processing_ = nullptr; // Capture side. - rtc::CriticalSection capture_lock_; + mutable Mutex capture_lock_; std::vector audio_senders_ RTC_GUARDED_BY(capture_lock_); int send_sample_rate_hz_ RTC_GUARDED_BY(capture_lock_) = 8000; size_t send_num_channels_ RTC_GUARDED_BY(capture_lock_) = 1; diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index 66b4bb11f5..9cbaabbbb0 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -33,19 +33,19 @@ #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/source/absolute_capture_time_receiver.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" -#include "rtc_base/critical_section.h" #include "rtc_base/format_macros.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" #include "rtc_base/race_checker.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" #include "system_wrappers/include/metrics.h" @@ -188,7 +188,7 @@ class ChannelReceive : public ChannelReceiveInterface { rtc::scoped_refptr frame_transformer); bool Playing() const { - rtc::CritScope lock(&playing_lock_); + MutexLock lock(&playing_lock_); return playing_; } @@ -204,10 +204,10 @@ class ChannelReceive : public ChannelReceiveInterface { // audio thread to another, but access is still sequential. rtc::RaceChecker audio_thread_race_checker_; rtc::RaceChecker video_capture_thread_race_checker_; - rtc::CriticalSection _callbackCritSect; - rtc::CriticalSection volume_settings_critsect_; + Mutex callback_mutex_; + Mutex volume_settings_mutex_; - rtc::CriticalSection playing_lock_; + mutable Mutex playing_lock_; bool playing_ RTC_GUARDED_BY(&playing_lock_) = false; RtcEventLog* const event_log_; @@ -216,12 +216,12 @@ class ChannelReceive : public ChannelReceiveInterface { std::map payload_type_frequencies_; std::unique_ptr rtp_receive_statistics_; - std::unique_ptr _rtpRtcpModule; + std::unique_ptr rtp_rtcp_; const uint32_t remote_ssrc_; // Info for GetSyncInfo is updated on network or worker thread, and queried on // the worker thread. - rtc::CriticalSection sync_info_lock_; + mutable Mutex sync_info_lock_; absl::optional last_received_rtp_timestamp_ RTC_GUARDED_BY(&sync_info_lock_); absl::optional last_received_rtp_system_time_ms_ @@ -237,7 +237,7 @@ class ChannelReceive : public ChannelReceiveInterface { // Timestamp of the audio pulled from NetEq. absl::optional jitter_buffer_playout_timestamp_; - rtc::CriticalSection video_sync_lock_; + mutable Mutex video_sync_lock_; uint32_t playout_timestamp_rtp_ RTC_GUARDED_BY(video_sync_lock_); absl::optional playout_timestamp_rtp_time_ms_ RTC_GUARDED_BY(video_sync_lock_); @@ -247,7 +247,7 @@ class ChannelReceive : public ChannelReceiveInterface { absl::optional playout_timestamp_ntp_time_ms_ RTC_GUARDED_BY(video_sync_lock_); - rtc::CriticalSection ts_stats_lock_; + mutable Mutex ts_stats_lock_; std::unique_ptr rtp_ts_wraparound_handler_; // The rtp timestamp of the first played out audio frame. @@ -259,10 +259,10 @@ class ChannelReceive : public ChannelReceiveInterface { // uses ProcessThread* _moduleProcessThreadPtr; AudioDeviceModule* _audioDeviceModulePtr; - float _outputGain RTC_GUARDED_BY(volume_settings_critsect_); + float _outputGain RTC_GUARDED_BY(volume_settings_mutex_); // An associated send channel. - rtc::CriticalSection assoc_send_channel_lock_; + mutable Mutex assoc_send_channel_lock_; const ChannelSendInterface* associated_send_channel_ RTC_GUARDED_BY(assoc_send_channel_lock_); @@ -297,7 +297,7 @@ void ChannelReceive::OnReceivedPayloadData( } int64_t round_trip_time = 0; - _rtpRtcpModule->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL); + rtp_rtcp_->RTT(remote_ssrc_, &round_trip_time, NULL, NULL, NULL); std::vector nack_list = acm_receiver_.GetNackList(round_trip_time); if (!nack_list.empty()) { @@ -359,7 +359,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( // scaling/panning, as that applies to the mix operation. // External recipients of the audio (e.g. via AudioTrack), will do their // own mixing/dynamic processing. - rtc::CritScope cs(&_callbackCritSect); + MutexLock lock(&callback_mutex_); if (audio_sink_) { AudioSinkInterface::Data data( audio_frame->data(), audio_frame->samples_per_channel_, @@ -371,7 +371,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( float output_gain = 1.0f; { - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); output_gain = _outputGain; } @@ -403,7 +403,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( (GetRtpTimestampRateHz() / 1000); { - rtc::CritScope lock(&ts_stats_lock_); + MutexLock lock(&ts_stats_lock_); // Compute ntp time. audio_frame->ntp_time_ms_ = ntp_estimator_.Estimate(audio_frame->timestamp_); @@ -421,7 +421,7 @@ AudioMixer::Source::AudioFrameInfo ChannelReceive::GetAudioFrameWithInfo( RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.TargetJitterBufferDelayMs", acm_receiver_.TargetDelayMs()); const int jitter_buffer_delay = acm_receiver_.FilteredCurrentDelayMs(); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverDelayEstimateMs", jitter_buffer_delay + playout_delay_ms_); RTC_HISTOGRAM_COUNTS_1000("WebRTC.Audio.ReceiverJitterBufferDelayMs", @@ -495,7 +495,7 @@ ChannelReceive::ChannelReceive( _outputAudioLevel.ResetLevelFullRange(); rtp_receive_statistics_->EnableRetransmitDetection(remote_ssrc_, true); - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.clock = clock; configuration.audio = true; configuration.receiver_only = true; @@ -507,14 +507,14 @@ ChannelReceive::ChannelReceive( if (frame_transformer) InitFrameTransformerDelegate(std::move(frame_transformer)); - _rtpRtcpModule = RtpRtcp::Create(configuration); - _rtpRtcpModule->SetSendingMediaStatus(false); - _rtpRtcpModule->SetRemoteSSRC(remote_ssrc_); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration); + rtp_rtcp_->SetSendingMediaStatus(false); + rtp_rtcp_->SetRemoteSSRC(remote_ssrc_); - _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE); + _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); // Ensure that RTCP is enabled for the created channel. - _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound); + rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); } ChannelReceive::~ChannelReceive() { @@ -527,24 +527,24 @@ ChannelReceive::~ChannelReceive() { StopPlayout(); if (_moduleProcessThreadPtr) - _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); + _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get()); } void ChannelReceive::SetSink(AudioSinkInterface* sink) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope cs(&_callbackCritSect); + MutexLock lock(&callback_mutex_); audio_sink_ = sink; } void ChannelReceive::StartPlayout() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&playing_lock_); + MutexLock lock(&playing_lock_); playing_ = true; } void ChannelReceive::StopPlayout() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&playing_lock_); + MutexLock lock(&playing_lock_); playing_ = false; _outputAudioLevel.ResetLevelFullRange(); } @@ -570,7 +570,7 @@ void ChannelReceive::OnRtpPacket(const RtpPacketReceived& packet) { int64_t now_ms = rtc::TimeMillis(); { - rtc::CritScope cs(&sync_info_lock_); + MutexLock lock(&sync_info_lock_); last_received_rtp_timestamp_ = packet.Timestamp(); last_received_rtp_system_time_ms_ = now_ms; } @@ -659,7 +659,7 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { UpdatePlayoutTimestamp(true, rtc::TimeMillis()); // Deliver RTCP packet to RTP/RTCP module for parsing - _rtpRtcpModule->IncomingRtcpPacket(data, length); + rtp_rtcp_->IncomingRtcpPacket(data, length); int64_t rtt = GetRTT(); if (rtt == 0) { @@ -670,14 +670,14 @@ void ChannelReceive::ReceivedRTCPPacket(const uint8_t* data, size_t length) { uint32_t ntp_secs = 0; uint32_t ntp_frac = 0; uint32_t rtp_timestamp = 0; - if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, - &rtp_timestamp)) { + if (0 != + rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL, &rtp_timestamp)) { // Waiting for RTCP. return; } { - rtc::CritScope lock(&ts_stats_lock_); + MutexLock lock(&ts_stats_lock_); ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); } } @@ -699,7 +699,7 @@ double ChannelReceive::GetTotalOutputDuration() const { void ChannelReceive::SetChannelOutputVolumeScaling(float scaling) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); _outputGain = scaling; } @@ -709,14 +709,14 @@ void ChannelReceive::RegisterReceiverCongestionControlObjects( RTC_DCHECK(packet_router); RTC_DCHECK(!packet_router_); constexpr bool remb_candidate = false; - packet_router->AddReceiveRtpModule(_rtpRtcpModule.get(), remb_candidate); + packet_router->AddReceiveRtpModule(rtp_rtcp_.get(), remb_candidate); packet_router_ = packet_router; } void ChannelReceive::ResetReceiverCongestionControlObjects() { RTC_DCHECK(worker_thread_checker_.IsCurrent()); RTC_DCHECK(packet_router_); - packet_router_->RemoveReceiveRtpModule(_rtpRtcpModule.get()); + packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get()); packet_router_ = nullptr; } @@ -759,7 +759,7 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { // --- Timestamps { - rtc::CritScope lock(&ts_stats_lock_); + MutexLock lock(&ts_stats_lock_); stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_; } return stats; @@ -781,13 +781,13 @@ void ChannelReceive::SetNACKStatus(bool enable, int max_packets) { // Called when we are missing one or more packets. int ChannelReceive::ResendPackets(const uint16_t* sequence_numbers, int length) { - return _rtpRtcpModule->SendNACK(sequence_numbers, length); + return rtp_rtcp_->SendNACK(sequence_numbers, length); } void ChannelReceive::SetAssociatedSendChannel( const ChannelSendInterface* channel) { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&assoc_send_channel_lock_); + MutexLock lock(&assoc_send_channel_lock_); associated_send_channel_ = channel; } @@ -818,7 +818,7 @@ AudioDecodingCallStats ChannelReceive::GetDecodingCallStatistics() const { uint32_t ChannelReceive::GetDelayEstimate() const { RTC_DCHECK(worker_thread_checker_.IsCurrent() || module_process_thread_checker_.IsCurrent()); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); return acm_receiver_.FilteredCurrentDelayMs() + playout_delay_ms_; } @@ -838,7 +838,7 @@ bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const { RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_); { - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); if (!playout_timestamp_rtp_time_ms_) return false; *rtp_timestamp = playout_timestamp_rtp_; @@ -850,7 +850,7 @@ bool ChannelReceive::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, int64_t time_ms) { RTC_DCHECK_RUNS_SERIALIZED(&video_capture_thread_race_checker_); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); playout_timestamp_ntp_ = ntp_timestamp_ms; playout_timestamp_ntp_time_ms_ = time_ms; } @@ -858,7 +858,7 @@ void ChannelReceive::SetEstimatedPlayoutNtpTimestampMs(int64_t ntp_timestamp_ms, absl::optional ChannelReceive::GetCurrentEstimatedPlayoutNtpTimestampMs(int64_t now_ms) const { RTC_DCHECK(worker_thread_checker_.IsCurrent()); - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); if (!playout_timestamp_ntp_ || !playout_timestamp_ntp_time_ms_) return absl::nullopt; @@ -877,13 +877,13 @@ int ChannelReceive::GetBaseMinimumPlayoutDelayMs() const { absl::optional ChannelReceive::GetSyncInfo() const { RTC_DCHECK(module_process_thread_checker_.IsCurrent()); Syncable::Info info; - if (_rtpRtcpModule->RemoteNTP(&info.capture_time_ntp_secs, - &info.capture_time_ntp_frac, nullptr, nullptr, - &info.capture_time_source_clock) != 0) { + if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs, + &info.capture_time_ntp_frac, nullptr, nullptr, + &info.capture_time_source_clock) != 0) { return absl::nullopt; } { - rtc::CritScope cs(&sync_info_lock_); + MutexLock lock(&sync_info_lock_); if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) { return absl::nullopt; } @@ -917,7 +917,7 @@ void ChannelReceive::UpdatePlayoutTimestamp(bool rtcp, int64_t now_ms) { playout_timestamp -= (delay_ms * (GetRtpTimestampRateHz() / 1000)); { - rtc::CritScope lock(&video_sync_lock_); + MutexLock lock(&video_sync_lock_); if (!rtcp && playout_timestamp != playout_timestamp_rtp_) { playout_timestamp_rtp_ = playout_timestamp; playout_timestamp_rtp_time_ms_ = now_ms; @@ -942,12 +942,12 @@ int ChannelReceive::GetRtpTimestampRateHz() const { int64_t ChannelReceive::GetRTT() const { std::vector report_blocks; - _rtpRtcpModule->RemoteRTCPStat(&report_blocks); + rtp_rtcp_->RemoteRTCPStat(&report_blocks); // TODO(nisse): Could we check the return value from the ->RTT() call below, // instead of checking if we have any report blocks? if (report_blocks.empty()) { - rtc::CritScope lock(&assoc_send_channel_lock_); + MutexLock lock(&assoc_send_channel_lock_); // Tries to get RTT from an associated channel. if (!associated_send_channel_) { return 0; @@ -961,8 +961,7 @@ int64_t ChannelReceive::GetRTT() const { int64_t min_rtt = 0; // TODO(nisse): This method computes RTT based on sender reports, even though // a receive stream is not supposed to do that. - if (_rtpRtcpModule->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) != - 0) { + if (rtp_rtcp_->RTT(remote_ssrc_, &rtt, &avg_rtt, &min_rtt, &max_rtt) != 0) { return 0; } return rtt; diff --git a/audio/channel_send.cc b/audio/channel_send.cc index 3387f271ba..80e7ab2f47 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -29,6 +29,7 @@ #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/audio_processing/rms_level.h" #include "modules/pacing/packet_router.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" @@ -38,6 +39,7 @@ #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" @@ -54,7 +56,6 @@ constexpr int64_t kMaxRetransmissionWindowMs = 1000; constexpr int64_t kMinRetransmissionWindowMs = 30; class RtpPacketSenderProxy; -class TransportFeedbackProxy; class TransportSequenceNumberProxy; class VoERtcpObserver; @@ -77,7 +78,8 @@ class ChannelSend : public ChannelSendInterface, bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer); + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer); ~ChannelSend() override; @@ -106,7 +108,7 @@ class ChannelSend : public ChannelSendInterface, ANAStats GetANAStatistics() const override; // Used by AudioSendStream. - RtpRtcp* GetRtpRtcp() const override; + RtpRtcpInterface* GetRtpRtcp() const override; void RegisterCngPayloadType(int payload_type, int payload_frequency) override; @@ -185,13 +187,13 @@ class ChannelSend : public ChannelSendInterface, // audio thread to another, but access is still sequential. rtc::RaceChecker audio_thread_race_checker_; - rtc::CriticalSection volume_settings_critsect_; + mutable Mutex volume_settings_mutex_; bool sending_ RTC_GUARDED_BY(&worker_thread_checker_) = false; RtcEventLog* const event_log_; - std::unique_ptr _rtpRtcpModule; + std::unique_ptr rtp_rtcp_; std::unique_ptr rtp_sender_audio_; std::unique_ptr audio_coding_; @@ -200,7 +202,7 @@ class ChannelSend : public ChannelSendInterface, // uses ProcessThread* const _moduleProcessThreadPtr; RmsLevel rms_level_ RTC_GUARDED_BY(encoder_queue_); - bool input_mute_ RTC_GUARDED_BY(volume_settings_critsect_); + bool input_mute_ RTC_GUARDED_BY(volume_settings_mutex_); bool previous_frame_muted_ RTC_GUARDED_BY(encoder_queue_); // VoeRTP_RTCP // TODO(henrika): can today be accessed on the main thread and on the @@ -212,7 +214,7 @@ class ChannelSend : public ChannelSendInterface, PacketRouter* packet_router_ RTC_GUARDED_BY(&worker_thread_checker_) = nullptr; - const std::unique_ptr feedback_observer_proxy_; + TransportFeedbackObserver* const feedback_observer_; const std::unique_ptr rtp_packet_pacer_proxy_; const std::unique_ptr retransmission_rate_limiter_; @@ -233,8 +235,8 @@ class ChannelSend : public ChannelSendInterface, rtc::scoped_refptr frame_transformer_delegate_ RTC_GUARDED_BY(encoder_queue_); - rtc::CriticalSection bitrate_crit_section_; - int configured_bitrate_bps_ RTC_GUARDED_BY(bitrate_crit_section_) = 0; + mutable Mutex bitrate_mutex_; + int configured_bitrate_bps_ RTC_GUARDED_BY(bitrate_mutex_) = 0; // Defined last to ensure that there are no running tasks when the other // members are destroyed. @@ -243,63 +245,26 @@ class ChannelSend : public ChannelSendInterface, const int kTelephoneEventAttenuationdB = 10; -class TransportFeedbackProxy : public TransportFeedbackObserver { - public: - TransportFeedbackProxy() : feedback_observer_(nullptr) { - pacer_thread_.Detach(); - network_thread_.Detach(); - } - - void SetTransportFeedbackObserver( - TransportFeedbackObserver* feedback_observer) { - RTC_DCHECK(thread_checker_.IsCurrent()); - rtc::CritScope lock(&crit_); - feedback_observer_ = feedback_observer; - } - - // Implements TransportFeedbackObserver. - void OnAddPacket(const RtpPacketSendInfo& packet_info) override { - RTC_DCHECK(pacer_thread_.IsCurrent()); - rtc::CritScope lock(&crit_); - if (feedback_observer_) - feedback_observer_->OnAddPacket(packet_info); - } - - void OnTransportFeedback(const rtcp::TransportFeedback& feedback) override { - RTC_DCHECK(network_thread_.IsCurrent()); - rtc::CritScope lock(&crit_); - if (feedback_observer_) - feedback_observer_->OnTransportFeedback(feedback); - } - - private: - rtc::CriticalSection crit_; - rtc::ThreadChecker thread_checker_; - rtc::ThreadChecker pacer_thread_; - rtc::ThreadChecker network_thread_; - TransportFeedbackObserver* feedback_observer_ RTC_GUARDED_BY(&crit_); -}; - class RtpPacketSenderProxy : public RtpPacketSender { public: RtpPacketSenderProxy() : rtp_packet_pacer_(nullptr) {} void SetPacketPacer(RtpPacketSender* rtp_packet_pacer) { RTC_DCHECK(thread_checker_.IsCurrent()); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); rtp_packet_pacer_ = rtp_packet_pacer; } void EnqueuePackets( std::vector> packets) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); rtp_packet_pacer_->EnqueuePackets(std::move(packets)); } private: rtc::ThreadChecker thread_checker_; - rtc::CriticalSection crit_; - RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&crit_); + Mutex mutex_; + RtpPacketSender* rtp_packet_pacer_ RTC_GUARDED_BY(&mutex_); }; class VoERtcpObserver : public RtcpBandwidthObserver { @@ -309,12 +274,12 @@ class VoERtcpObserver : public RtcpBandwidthObserver { ~VoERtcpObserver() override {} void SetBandwidthObserver(RtcpBandwidthObserver* bandwidth_observer) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); bandwidth_observer_ = bandwidth_observer; } void OnReceivedEstimatedBitrate(uint32_t bitrate) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (bandwidth_observer_) { bandwidth_observer_->OnReceivedEstimatedBitrate(bitrate); } @@ -324,7 +289,7 @@ class VoERtcpObserver : public RtcpBandwidthObserver { int64_t rtt, int64_t now_ms) override { { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (bandwidth_observer_) { bandwidth_observer_->OnReceivedRtcpReceiverReport(report_blocks, rtt, now_ms); @@ -372,8 +337,8 @@ class VoERtcpObserver : public RtcpBandwidthObserver { ChannelSend* owner_; // Maps remote side ssrc to extended highest sequence number received. std::map extended_max_sequence_number_; - rtc::CriticalSection crit_; - RtcpBandwidthObserver* bandwidth_observer_ RTC_GUARDED_BY(crit_); + Mutex mutex_; + RtcpBandwidthObserver* bandwidth_observer_ RTC_GUARDED_BY(mutex_); }; int32_t ChannelSend::SendData(AudioFrameType frameType, @@ -388,9 +353,9 @@ int32_t ChannelSend::SendData(AudioFrameType frameType, // Asynchronously transform the payload before sending it. After the payload // is transformed, the delegate will call SendRtpAudio to send it. frame_transformer_delegate_->Transform( - frameType, payloadType, rtp_timestamp, _rtpRtcpModule->StartTimestamp(), + frameType, payloadType, rtp_timestamp, rtp_rtcp_->StartTimestamp(), payloadData, payloadSize, absolute_capture_timestamp_ms, - _rtpRtcpModule->SSRC()); + rtp_rtcp_->SSRC()); return 0; } return SendRtpAudio(frameType, payloadType, rtp_timestamp, payload, @@ -427,7 +392,7 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // Encrypt the audio payload into the buffer. size_t bytes_written = 0; int encrypt_status = frame_encryptor_->Encrypt( - cricket::MEDIA_TYPE_AUDIO, _rtpRtcpModule->SSRC(), + cricket::MEDIA_TYPE_AUDIO, rtp_rtcp_->SSRC(), /*additional_data=*/nullptr, payload, encrypted_audio_payload, &bytes_written); if (encrypt_status != 0) { @@ -449,12 +414,12 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // Push data from ACM to RTP/RTCP-module to deliver audio frame for // packetization. - if (!_rtpRtcpModule->OnSendingRtpFrame(rtp_timestamp, - // Leaving the time when this frame was - // received from the capture device as - // undefined for voice for now. - -1, payloadType, - /*force_sender_report=*/false)) { + if (!rtp_rtcp_->OnSendingRtpFrame(rtp_timestamp, + // Leaving the time when this frame was + // received from the capture device as + // undefined for voice for now. + -1, payloadType, + /*force_sender_report=*/false)) { return -1; } @@ -466,9 +431,8 @@ int32_t ChannelSend::SendRtpAudio(AudioFrameType frameType, // This call will trigger Transport::SendPacket() from the RTP/RTCP module. if (!rtp_sender_audio_->SendAudio( - frameType, payloadType, - rtp_timestamp + _rtpRtcpModule->StartTimestamp(), payload.data(), - payload.size(), absolute_capture_timestamp_ms)) { + frameType, payloadType, rtp_timestamp + rtp_rtcp_->StartTimestamp(), + payload.data(), payload.size(), absolute_capture_timestamp_ms)) { RTC_DLOG(LS_ERROR) << "ChannelSend::SendData() failed to send data to RTP/RTCP module"; return -1; @@ -489,7 +453,8 @@ ChannelSend::ChannelSend( bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer) + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer) : event_log_(rtc_event_log), _timeStamp(0), // This is just an offset, RTP module will add it's own // random offset @@ -498,7 +463,7 @@ ChannelSend::ChannelSend( previous_frame_muted_(false), _includeAudioLevelIndication(false), rtcp_observer_(new VoERtcpObserver(this)), - feedback_observer_proxy_(new TransportFeedbackProxy()), + feedback_observer_(feedback_observer), rtp_packet_pacer_proxy_(new RtpPacketSenderProxy()), retransmission_rate_limiter_( new RateLimiter(clock, kMaxRetransmissionWindowMs)), @@ -512,9 +477,9 @@ ChannelSend::ChannelSend( audio_coding_.reset(AudioCodingModule::Create(AudioCodingModule::Config())); - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.bandwidth_callback = rtcp_observer_.get(); - configuration.transport_feedback_callback = feedback_observer_proxy_.get(); + configuration.transport_feedback_callback = feedback_observer_; configuration.clock = (clock ? clock : Clock::GetRealTimeClock()); configuration.audio = true; configuration.outgoing_transport = rtp_transport; @@ -530,16 +495,16 @@ ChannelSend::ChannelSend( configuration.local_media_ssrc = ssrc; - _rtpRtcpModule = RtpRtcp::Create(configuration); - _rtpRtcpModule->SetSendingMediaStatus(false); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(configuration); + rtp_rtcp_->SetSendingMediaStatus(false); - rtp_sender_audio_ = std::make_unique( - configuration.clock, _rtpRtcpModule->RtpSender()); + rtp_sender_audio_ = std::make_unique(configuration.clock, + rtp_rtcp_->RtpSender()); - _moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get(), RTC_FROM_HERE); + _moduleProcessThreadPtr->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE); // Ensure that RTCP is enabled by default for the created channel. - _rtpRtcpModule->SetRTCPStatus(RtcpMode::kCompound); + rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); int error = audio_coding_->RegisterTransportCallback(this); RTC_DCHECK_EQ(0, error); @@ -559,7 +524,7 @@ ChannelSend::~ChannelSend() { RTC_DCHECK_EQ(0, error); if (_moduleProcessThreadPtr) - _moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()); + _moduleProcessThreadPtr->DeRegisterModule(rtp_rtcp_.get()); } void ChannelSend::StartSend() { @@ -567,8 +532,8 @@ void ChannelSend::StartSend() { RTC_DCHECK(!sending_); sending_ = true; - _rtpRtcpModule->SetSendingMediaStatus(true); - int ret = _rtpRtcpModule->SetSendingStatus(true); + rtp_rtcp_->SetSendingMediaStatus(true); + int ret = rtp_rtcp_->SetSendingStatus(true); RTC_DCHECK_EQ(0, ret); // It is now OK to start processing on the encoder task queue. encoder_queue_.PostTask([this] { @@ -594,10 +559,10 @@ void ChannelSend::StopSend() { // Reset sending SSRC and sequence number and triggers direct transmission // of RTCP BYE - if (_rtpRtcpModule->SetSendingStatus(false) == -1) { + if (rtp_rtcp_->SetSendingStatus(false) == -1) { RTC_DLOG(LS_ERROR) << "StartSend() RTP/RTCP failed to stop sending"; } - _rtpRtcpModule->SetSendingMediaStatus(false); + rtp_rtcp_->SetSendingMediaStatus(false); } void ChannelSend::SetEncoder(int payload_type, @@ -608,8 +573,8 @@ void ChannelSend::SetEncoder(int payload_type, // The RTP/RTCP module needs to know the RTP timestamp rate (i.e. clockrate) // as well as some other things, so we collect this info and send it along. - _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, - encoder->RtpTimestampRateHz()); + rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, + encoder->RtpTimestampRateHz()); rtp_sender_audio_->RegisterAudioPayload("audio", payload_type, encoder->RtpTimestampRateHz(), encoder->NumChannels(), 0); @@ -642,7 +607,7 @@ void ChannelSend::OnBitrateAllocation(BitrateAllocationUpdate update) { // rules. // RTC_DCHECK(worker_thread_checker_.IsCurrent() || // module_process_thread_checker_.IsCurrent()); - rtc::CritScope lock(&bitrate_crit_section_); + MutexLock lock(&bitrate_mutex_); CallEncoder([&](AudioEncoder* encoder) { encoder->OnReceivedUplinkAllocation(update); @@ -652,7 +617,7 @@ void ChannelSend::OnBitrateAllocation(BitrateAllocationUpdate update) { } int ChannelSend::GetBitrate() const { - rtc::CritScope lock(&bitrate_crit_section_); + MutexLock lock(&bitrate_mutex_); return configured_bitrate_bps_; } @@ -663,8 +628,10 @@ void ChannelSend::OnUplinkPacketLossRate(float packet_loss_rate) { } void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + // Deliver RTCP packet to RTP/RTCP module for parsing - _rtpRtcpModule->IncomingRtcpPacket(data, length); + rtp_rtcp_->IncomingRtcpPacket(data, length); int64_t rtt = GetRTT(); if (rtt == 0) { @@ -685,12 +652,12 @@ void ChannelSend::ReceivedRTCPPacket(const uint8_t* data, size_t length) { void ChannelSend::SetInputMute(bool enable) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); input_mute_ = enable; } bool ChannelSend::InputMute() const { - rtc::CritScope cs(&volume_settings_critsect_); + MutexLock lock(&volume_settings_mutex_); return input_mute_; } @@ -713,7 +680,7 @@ bool ChannelSend::SendTelephoneEventOutband(int event, int duration_ms) { void ChannelSend::RegisterCngPayloadType(int payload_type, int payload_frequency) { - _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency); + rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency); rtp_sender_audio_->RegisterAudioPayload("CN", payload_type, payload_frequency, 1, 0); } @@ -723,7 +690,7 @@ void ChannelSend::SetSendTelephoneEventPayloadType(int payload_type, RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK_LE(0, payload_type); RTC_DCHECK_GE(127, payload_type); - _rtpRtcpModule->RegisterSendPayloadFrequency(payload_type, payload_frequency); + rtp_rtcp_->RegisterSendPayloadFrequency(payload_type, payload_frequency); rtp_sender_audio_->RegisterAudioPayload("telephone-event", payload_type, payload_frequency, 0, 0); } @@ -732,9 +699,9 @@ void ChannelSend::SetSendAudioLevelIndicationStatus(bool enable, int id) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); _includeAudioLevelIndication = enable; if (enable) { - _rtpRtcpModule->RegisterRtpHeaderExtension(AudioLevel::kUri, id); + rtp_rtcp_->RegisterRtpHeaderExtension(AudioLevel::kUri, id); } else { - _rtpRtcpModule->DeregisterSendRtpHeaderExtension(AudioLevel::kUri); + rtp_rtcp_->DeregisterSendRtpHeaderExtension(AudioLevel::kUri); } } @@ -743,31 +710,25 @@ void ChannelSend::RegisterSenderCongestionControlObjects( RtcpBandwidthObserver* bandwidth_observer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RtpPacketSender* rtp_packet_pacer = transport->packet_sender(); - TransportFeedbackObserver* transport_feedback_observer = - transport->transport_feedback_observer(); PacketRouter* packet_router = transport->packet_router(); RTC_DCHECK(rtp_packet_pacer); - RTC_DCHECK(transport_feedback_observer); RTC_DCHECK(packet_router); RTC_DCHECK(!packet_router_); rtcp_observer_->SetBandwidthObserver(bandwidth_observer); - feedback_observer_proxy_->SetTransportFeedbackObserver( - transport_feedback_observer); rtp_packet_pacer_proxy_->SetPacketPacer(rtp_packet_pacer); - _rtpRtcpModule->SetStorePacketsStatus(true, 600); + rtp_rtcp_->SetStorePacketsStatus(true, 600); constexpr bool remb_candidate = false; - packet_router->AddSendRtpModule(_rtpRtcpModule.get(), remb_candidate); + packet_router->AddSendRtpModule(rtp_rtcp_.get(), remb_candidate); packet_router_ = packet_router; } void ChannelSend::ResetSenderCongestionControlObjects() { RTC_DCHECK_RUN_ON(&worker_thread_checker_); RTC_DCHECK(packet_router_); - _rtpRtcpModule->SetStorePacketsStatus(false, 600); + rtp_rtcp_->SetStorePacketsStatus(false, 600); rtcp_observer_->SetBandwidthObserver(nullptr); - feedback_observer_proxy_->SetTransportFeedbackObserver(nullptr); - packet_router_->RemoveSendRtpModule(_rtpRtcpModule.get()); + packet_router_->RemoveSendRtpModule(rtp_rtcp_.get()); packet_router_ = nullptr; rtp_packet_pacer_proxy_->SetPacketPacer(nullptr); } @@ -776,7 +737,7 @@ void ChannelSend::SetRTCP_CNAME(absl::string_view c_name) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); // Note: SetCNAME() accepts a c string of length at most 255. const std::string c_name_limited(c_name.substr(0, 255)); - int ret = _rtpRtcpModule->SetCNAME(c_name_limited.c_str()) != 0; + int ret = rtp_rtcp_->SetCNAME(c_name_limited.c_str()) != 0; RTC_DCHECK_EQ(0, ret) << "SetRTCP_CNAME() failed to set RTCP CNAME"; } @@ -787,7 +748,7 @@ std::vector ChannelSend::GetRemoteRTCPReportBlocks() const { // report block according to RFC 3550. std::vector rtcp_report_blocks; - int ret = _rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks); + int ret = rtp_rtcp_->RemoteRTCPStat(&rtcp_report_blocks); RTC_DCHECK_EQ(0, ret); std::vector report_blocks; @@ -816,7 +777,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { StreamDataCounters rtp_stats; StreamDataCounters rtx_stats; - _rtpRtcpModule->GetSendStreamDataCounters(&rtp_stats, &rtx_stats); + rtp_rtcp_->GetSendStreamDataCounters(&rtp_stats, &rtx_stats); stats.payload_bytes_sent = rtp_stats.transmitted.payload_bytes + rtx_stats.transmitted.payload_bytes; stats.header_and_padding_bytes_sent = @@ -829,7 +790,7 @@ CallSendStatistics ChannelSend::GetRTCPStatistics() const { stats.packetsSent = rtp_stats.transmitted.packets + rtx_stats.transmitted.packets; stats.retransmitted_packets_sent = rtp_stats.retransmitted.packets; - stats.report_block_datas = _rtpRtcpModule->GetLatestReportBlockData(); + stats.report_block_datas = rtp_rtcp_->GetLatestReportBlockData(); return stats; } @@ -894,14 +855,14 @@ ANAStats ChannelSend::GetANAStatistics() const { return audio_coding_->GetANAStats(); } -RtpRtcp* ChannelSend::GetRtpRtcp() const { +RtpRtcpInterface* ChannelSend::GetRtpRtcp() const { RTC_DCHECK(module_process_thread_checker_.IsCurrent()); - return _rtpRtcpModule.get(); + return rtp_rtcp_.get(); } int64_t ChannelSend::GetRTT() const { std::vector report_blocks; - _rtpRtcpModule->RemoteRTCPStat(&report_blocks); + rtp_rtcp_->RemoteRTCPStat(&report_blocks); if (report_blocks.empty()) { return 0; @@ -913,8 +874,8 @@ int64_t ChannelSend::GetRTT() const { int64_t min_rtt = 0; // We don't know in advance the remote ssrc used by the other end's receiver // reports, so use the SSRC of the first report block for calculating the RTT. - if (_rtpRtcpModule->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt, - &min_rtt, &max_rtt) != 0) { + if (rtp_rtcp_->RTT(report_blocks[0].sender_ssrc, &rtt, &avg_rtt, &min_rtt, + &max_rtt) != 0) { return 0; } return rtt; @@ -985,12 +946,13 @@ std::unique_ptr CreateChannelSend( bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer) { + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer) { return std::make_unique( clock, task_queue_factory, module_process_thread, rtp_transport, rtcp_rtt_stats, rtc_event_log, frame_encryptor, crypto_options, extmap_allow_mixed, rtcp_report_interval_ms, ssrc, - std::move(frame_transformer)); + std::move(frame_transformer), feedback_observer); } } // namespace voe diff --git a/audio/channel_send.h b/audio/channel_send.h index cb3b99287b..2e23ef5d2d 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -22,7 +22,7 @@ #include "api/function_view.h" #include "api/task_queue/task_queue_factory.h" #include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_audio.h" namespace webrtc { @@ -30,7 +30,6 @@ namespace webrtc { class FrameEncryptorInterface; class ProcessThread; class RtcEventLog; -class RtpRtcp; class RtpTransportControllerSendInterface; struct CallSendStatistics { @@ -97,7 +96,7 @@ class ChannelSendInterface { virtual void ProcessAndEncodeAudio( std::unique_ptr audio_frame) = 0; - virtual RtpRtcp* GetRtpRtcp() const = 0; + virtual RtpRtcpInterface* GetRtpRtcp() const = 0; // In RTP we currently rely on RTCP packets (|ReceivedRTCPPacket|) to inform // about RTT. @@ -136,7 +135,8 @@ std::unique_ptr CreateChannelSend( bool extmap_allow_mixed, int rtcp_report_interval_ms, uint32_t ssrc, - rtc::scoped_refptr frame_transformer); + rtc::scoped_refptr frame_transformer, + TransportFeedbackObserver* feedback_observer); } // namespace voe } // namespace webrtc diff --git a/audio/channel_send_frame_transformer_delegate.cc b/audio/channel_send_frame_transformer_delegate.cc index 53b573eb8b..72a459d897 100644 --- a/audio/channel_send_frame_transformer_delegate.cc +++ b/audio/channel_send_frame_transformer_delegate.cc @@ -77,7 +77,7 @@ void ChannelSendFrameTransformerDelegate::Reset() { frame_transformer_->UnregisterTransformedFrameCallback(); frame_transformer_ = nullptr; - rtc::CritScope lock(&send_lock_); + MutexLock lock(&send_lock_); send_frame_callback_ = SendFrameCallback(); } @@ -97,7 +97,7 @@ void ChannelSendFrameTransformerDelegate::Transform( void ChannelSendFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { - rtc::CritScope lock(&send_lock_); + MutexLock lock(&send_lock_); if (!send_frame_callback_) return; rtc::scoped_refptr delegate = this; @@ -109,7 +109,7 @@ void ChannelSendFrameTransformerDelegate::OnTransformedFrame( void ChannelSendFrameTransformerDelegate::SendFrame( std::unique_ptr frame) const { - rtc::CritScope lock(&send_lock_); + MutexLock lock(&send_lock_); RTC_DCHECK_RUN_ON(encoder_queue_); if (!send_frame_callback_) return; diff --git a/audio/channel_send_frame_transformer_delegate.h b/audio/channel_send_frame_transformer_delegate.h index 5added7b31..531d1bc110 100644 --- a/audio/channel_send_frame_transformer_delegate.h +++ b/audio/channel_send_frame_transformer_delegate.h @@ -16,7 +16,7 @@ #include "api/frame_transformer_interface.h" #include "modules/audio_coding/include/audio_coding_module_typedefs.h" #include "rtc_base/buffer.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/synchronization/sequence_checker.h" #include "rtc_base/task_queue.h" @@ -72,7 +72,7 @@ class ChannelSendFrameTransformerDelegate : public TransformedFrameCallback { ~ChannelSendFrameTransformerDelegate() override = default; private: - rtc::CriticalSection send_lock_; + mutable Mutex send_lock_; SendFrameCallback send_frame_callback_ RTC_GUARDED_BY(send_lock_); rtc::scoped_refptr frame_transformer_; rtc::TaskQueue* encoder_queue_ RTC_GUARDED_BY(send_lock_); diff --git a/audio/mock_voe_channel_proxy.h b/audio/mock_voe_channel_proxy.h index 38ad208e1a..542358f687 100644 --- a/audio/mock_voe_channel_proxy.h +++ b/audio/mock_voe_channel_proxy.h @@ -28,102 +28,144 @@ namespace test { class MockChannelReceive : public voe::ChannelReceiveInterface { public: - MOCK_METHOD2(SetNACKStatus, void(bool enable, int max_packets)); - MOCK_METHOD1(RegisterReceiverCongestionControlObjects, - void(PacketRouter* packet_router)); - MOCK_METHOD0(ResetReceiverCongestionControlObjects, void()); - MOCK_CONST_METHOD0(GetRTCPStatistics, CallReceiveStatistics()); - MOCK_CONST_METHOD0(GetNetworkStatistics, NetworkStatistics()); - MOCK_CONST_METHOD0(GetDecodingCallStatistics, AudioDecodingCallStats()); - MOCK_CONST_METHOD0(GetSpeechOutputLevelFullRange, int()); - MOCK_CONST_METHOD0(GetTotalOutputEnergy, double()); - MOCK_CONST_METHOD0(GetTotalOutputDuration, double()); - MOCK_CONST_METHOD0(GetDelayEstimate, uint32_t()); - MOCK_METHOD1(SetSink, void(AudioSinkInterface* sink)); - MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived& packet)); - MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length)); - MOCK_METHOD1(SetChannelOutputVolumeScaling, void(float scaling)); - MOCK_METHOD2(GetAudioFrameWithInfo, - AudioMixer::Source::AudioFrameInfo(int sample_rate_hz, - AudioFrame* audio_frame)); - MOCK_CONST_METHOD0(PreferredSampleRate, int()); - MOCK_METHOD1(SetAssociatedSendChannel, - void(const voe::ChannelSendInterface* send_channel)); - MOCK_CONST_METHOD2(GetPlayoutRtpTimestamp, - bool(uint32_t* rtp_timestamp, int64_t* time_ms)); - MOCK_METHOD2(SetEstimatedPlayoutNtpTimestampMs, - void(int64_t ntp_timestamp_ms, int64_t time_ms)); - MOCK_CONST_METHOD1(GetCurrentEstimatedPlayoutNtpTimestampMs, - absl::optional(int64_t now_ms)); - MOCK_CONST_METHOD0(GetSyncInfo, absl::optional()); - MOCK_METHOD1(SetMinimumPlayoutDelay, void(int delay_ms)); - MOCK_METHOD1(SetBaseMinimumPlayoutDelayMs, bool(int delay_ms)); - MOCK_CONST_METHOD0(GetBaseMinimumPlayoutDelayMs, int()); - MOCK_CONST_METHOD0(GetReceiveCodec, - absl::optional>()); - MOCK_METHOD1(SetReceiveCodecs, - void(const std::map& codecs)); - MOCK_CONST_METHOD0(GetSources, std::vector()); - MOCK_METHOD0(StartPlayout, void()); - MOCK_METHOD0(StopPlayout, void()); - MOCK_METHOD1(SetDepacketizerToDecoderFrameTransformer, - void(rtc::scoped_refptr - frame_transformer)); + MOCK_METHOD(void, SetNACKStatus, (bool enable, int max_packets), (override)); + MOCK_METHOD(void, + RegisterReceiverCongestionControlObjects, + (PacketRouter*), + (override)); + MOCK_METHOD(void, ResetReceiverCongestionControlObjects, (), (override)); + MOCK_METHOD(CallReceiveStatistics, GetRTCPStatistics, (), (const, override)); + MOCK_METHOD(NetworkStatistics, GetNetworkStatistics, (), (const, override)); + MOCK_METHOD(AudioDecodingCallStats, + GetDecodingCallStatistics, + (), + (const, override)); + MOCK_METHOD(int, GetSpeechOutputLevelFullRange, (), (const, override)); + MOCK_METHOD(double, GetTotalOutputEnergy, (), (const, override)); + MOCK_METHOD(double, GetTotalOutputDuration, (), (const, override)); + MOCK_METHOD(uint32_t, GetDelayEstimate, (), (const, override)); + MOCK_METHOD(void, SetSink, (AudioSinkInterface*), (override)); + MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived& packet), (override)); + MOCK_METHOD(void, + ReceivedRTCPPacket, + (const uint8_t*, size_t length), + (override)); + MOCK_METHOD(void, SetChannelOutputVolumeScaling, (float scaling), (override)); + MOCK_METHOD(AudioMixer::Source::AudioFrameInfo, + GetAudioFrameWithInfo, + (int sample_rate_hz, AudioFrame*), + (override)); + MOCK_METHOD(int, PreferredSampleRate, (), (const, override)); + MOCK_METHOD(void, + SetAssociatedSendChannel, + (const voe::ChannelSendInterface*), + (override)); + MOCK_METHOD(bool, + GetPlayoutRtpTimestamp, + (uint32_t*, int64_t*), + (const, override)); + MOCK_METHOD(void, + SetEstimatedPlayoutNtpTimestampMs, + (int64_t ntp_timestamp_ms, int64_t time_ms), + (override)); + MOCK_METHOD(absl::optional, + GetCurrentEstimatedPlayoutNtpTimestampMs, + (int64_t now_ms), + (const, override)); + MOCK_METHOD(absl::optional, + GetSyncInfo, + (), + (const, override)); + MOCK_METHOD(void, SetMinimumPlayoutDelay, (int delay_ms), (override)); + MOCK_METHOD(bool, SetBaseMinimumPlayoutDelayMs, (int delay_ms), (override)); + MOCK_METHOD(int, GetBaseMinimumPlayoutDelayMs, (), (const, override)); + MOCK_METHOD((absl::optional>), + GetReceiveCodec, + (), + (const, override)); + MOCK_METHOD(void, + SetReceiveCodecs, + ((const std::map& codecs)), + (override)); + MOCK_METHOD(void, StartPlayout, (), (override)); + MOCK_METHOD(void, StopPlayout, (), (override)); + MOCK_METHOD( + void, + SetDepacketizerToDecoderFrameTransformer, + (rtc::scoped_refptr frame_transformer), + (override)); }; class MockChannelSend : public voe::ChannelSendInterface { public: - // GMock doesn't like move-only types, like std::unique_ptr. - virtual void SetEncoder(int payload_type, - std::unique_ptr encoder) { - return SetEncoderForMock(payload_type, &encoder); - } - MOCK_METHOD2(SetEncoderForMock, - void(int payload_type, std::unique_ptr* encoder)); - MOCK_METHOD1( + MOCK_METHOD(void, + SetEncoder, + (int payload_type, std::unique_ptr encoder), + (override)); + MOCK_METHOD( + void, ModifyEncoder, - void(rtc::FunctionView*)> modifier)); - MOCK_METHOD1(CallEncoder, - void(rtc::FunctionView modifier)); - MOCK_METHOD1(SetRTCP_CNAME, void(absl::string_view c_name)); - MOCK_METHOD2(SetSendAudioLevelIndicationStatus, void(bool enable, int id)); - MOCK_METHOD2(RegisterSenderCongestionControlObjects, - void(RtpTransportControllerSendInterface* transport, - RtcpBandwidthObserver* bandwidth_observer)); - MOCK_METHOD0(ResetSenderCongestionControlObjects, void()); - MOCK_CONST_METHOD0(GetRTCPStatistics, CallSendStatistics()); - MOCK_CONST_METHOD0(GetRemoteRTCPReportBlocks, std::vector()); - MOCK_CONST_METHOD0(GetANAStatistics, ANAStats()); - MOCK_METHOD2(RegisterCngPayloadType, - void(int payload_type, int payload_frequency)); - MOCK_METHOD2(SetSendTelephoneEventPayloadType, - void(int payload_type, int payload_frequency)); - MOCK_METHOD2(SendTelephoneEventOutband, bool(int event, int duration_ms)); - MOCK_METHOD1(OnBitrateAllocation, void(BitrateAllocationUpdate update)); - MOCK_METHOD1(SetInputMute, void(bool muted)); - MOCK_METHOD2(ReceivedRTCPPacket, void(const uint8_t* packet, size_t length)); - // GMock doesn't like move-only types, like std::unique_ptr. - virtual void ProcessAndEncodeAudio(std::unique_ptr audio_frame) { - ProcessAndEncodeAudioForMock(&audio_frame); - } - MOCK_METHOD1(ProcessAndEncodeAudioForMock, - void(std::unique_ptr* audio_frame)); - MOCK_METHOD1(SetTransportOverhead, - void(size_t transport_overhead_per_packet)); - MOCK_CONST_METHOD0(GetRtpRtcp, RtpRtcp*()); - MOCK_CONST_METHOD0(GetBitrate, int()); - MOCK_METHOD1(OnTwccBasedUplinkPacketLossRate, void(float packet_loss_rate)); - MOCK_METHOD1(OnRecoverableUplinkPacketLossRate, - void(float recoverable_packet_loss_rate)); - MOCK_CONST_METHOD0(GetRTT, int64_t()); - MOCK_METHOD0(StartSend, void()); - MOCK_METHOD0(StopSend, void()); - MOCK_METHOD1( - SetFrameEncryptor, - void(rtc::scoped_refptr frame_encryptor)); - MOCK_METHOD1(SetEncoderToPacketizerFrameTransformer, - void(rtc::scoped_refptr - frame_transformer)); + (rtc::FunctionView*)> modifier), + (override)); + MOCK_METHOD(void, + CallEncoder, + (rtc::FunctionView modifier), + (override)); + MOCK_METHOD(void, SetRTCP_CNAME, (absl::string_view c_name), (override)); + MOCK_METHOD(void, + SetSendAudioLevelIndicationStatus, + (bool enable, int id), + (override)); + MOCK_METHOD(void, + RegisterSenderCongestionControlObjects, + (RtpTransportControllerSendInterface*, RtcpBandwidthObserver*), + (override)); + MOCK_METHOD(void, ResetSenderCongestionControlObjects, (), (override)); + MOCK_METHOD(CallSendStatistics, GetRTCPStatistics, (), (const, override)); + MOCK_METHOD(std::vector, + GetRemoteRTCPReportBlocks, + (), + (const, override)); + MOCK_METHOD(ANAStats, GetANAStatistics, (), (const, override)); + MOCK_METHOD(void, + RegisterCngPayloadType, + (int payload_type, int payload_frequency), + (override)); + MOCK_METHOD(void, + SetSendTelephoneEventPayloadType, + (int payload_type, int payload_frequency), + (override)); + MOCK_METHOD(bool, + SendTelephoneEventOutband, + (int event, int duration_ms), + (override)); + MOCK_METHOD(void, + OnBitrateAllocation, + (BitrateAllocationUpdate update), + (override)); + MOCK_METHOD(void, SetInputMute, (bool muted), (override)); + MOCK_METHOD(void, + ReceivedRTCPPacket, + (const uint8_t*, size_t length), + (override)); + MOCK_METHOD(void, + ProcessAndEncodeAudio, + (std::unique_ptr), + (override)); + MOCK_METHOD(RtpRtcpInterface*, GetRtpRtcp, (), (const, override)); + MOCK_METHOD(int, GetBitrate, (), (const, override)); + MOCK_METHOD(int64_t, GetRTT, (), (const, override)); + MOCK_METHOD(void, StartSend, (), (override)); + MOCK_METHOD(void, StopSend, (), (override)); + MOCK_METHOD(void, + SetFrameEncryptor, + (rtc::scoped_refptr frame_encryptor), + (override)); + MOCK_METHOD( + void, + SetEncoderToPacketizerFrameTransformer, + (rtc::scoped_refptr frame_transformer), + (override)); }; } // namespace test } // namespace webrtc diff --git a/audio/test/low_bandwidth_audio_test.cc b/audio/test/low_bandwidth_audio_test.cc index 049b5e5150..50cf499920 100644 --- a/audio/test/low_bandwidth_audio_test.cc +++ b/audio/test/low_bandwidth_audio_test.cc @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#include "absl/flags/declare.h" #include "absl/flags/flag.h" #include "api/test/simulated_network.h" #include "audio/test/audio_end_to_end_test.h" diff --git a/audio/test/pc_low_bandwidth_audio_test.cc b/audio/test/pc_low_bandwidth_audio_test.cc index aafb65f15d..95a32238c5 100644 --- a/audio/test/pc_low_bandwidth_audio_test.cc +++ b/audio/test/pc_low_bandwidth_audio_test.cc @@ -10,12 +10,14 @@ #include +#include "absl/flags/declare.h" #include "absl/flags/flag.h" #include "api/test/create_network_emulation_manager.h" #include "api/test/create_peerconnection_quality_test_fixture.h" #include "api/test/network_emulation_manager.h" #include "api/test/peerconnection_quality_test_fixture.h" #include "api/test/simulated_network.h" +#include "api/test/time_controller.h" #include "call/simulated_network.h" #include "test/gtest.h" #include "test/pc/e2e/network_quality_metrics_reporter.h" @@ -70,12 +72,13 @@ CreateTwoNetworkLinks(NetworkEmulationManager* emulation, std::unique_ptr CreateTestFixture(const std::string& test_case_name, + TimeController& time_controller, std::pair network_links, rtc::FunctionView alice_configurer, rtc::FunctionView bob_configurer) { auto fixture = webrtc_pc_e2e::CreatePeerConnectionE2EQualityTestFixture( - test_case_name, /*audio_quality_analyzer=*/nullptr, + test_case_name, time_controller, /*audio_quality_analyzer=*/nullptr, /*video_quality_analyzer=*/nullptr); fixture->AddPeer(network_links.first->network_thread(), network_links.first->network_manager(), alice_configurer); @@ -127,7 +130,7 @@ TEST(PCLowBandwidthAudioTest, PCGoodNetworkHighBitrate) { std::unique_ptr network_emulation_manager = CreateNetworkEmulationManager(); auto fixture = CreateTestFixture( - GetMetricTestCaseName(), + GetMetricTestCaseName(), *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), BuiltInNetworkBehaviorConfig()), [](PeerConfigurer* alice) { @@ -154,7 +157,7 @@ TEST(PCLowBandwidthAudioTest, PC40kbpsNetwork) { config.queue_delay_ms = 400; config.loss_percent = 1; auto fixture = CreateTestFixture( - GetMetricTestCaseName(), + GetMetricTestCaseName(), *network_emulation_manager->time_controller(), CreateTwoNetworkLinks(network_emulation_manager.get(), config), [](PeerConfigurer* alice) { AudioConfig audio; diff --git a/audio/utility/audio_frame_operations_unittest.cc b/audio/utility/audio_frame_operations_unittest.cc index 1d38875add..1a2c16e45f 100644 --- a/audio/utility/audio_frame_operations_unittest.cc +++ b/audio/utility/audio_frame_operations_unittest.cc @@ -27,6 +27,8 @@ class AudioFrameOperationsTest : public ::testing::Test { AudioFrame frame_; }; +class AudioFrameOperationsDeathTest : public AudioFrameOperationsTest {}; + void SetFrameData(int16_t ch1, int16_t ch2, int16_t ch3, @@ -105,7 +107,7 @@ void VerifyFrameDataBounds(const AudioFrame& frame, } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(AudioFrameOperationsTest, MonoToStereoFailsWithBadParameters) { +TEST_F(AudioFrameOperationsDeathTest, MonoToStereoFailsWithBadParameters) { EXPECT_DEATH(AudioFrameOperations::UpmixChannels(2, &frame_), ""); frame_.samples_per_channel_ = AudioFrame::kMaxDataSizeSamples; frame_.num_channels_ = 1; @@ -136,7 +138,7 @@ TEST_F(AudioFrameOperationsTest, MonoToStereoMuted) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(AudioFrameOperationsTest, StereoToMonoFailsWithBadParameters) { +TEST_F(AudioFrameOperationsDeathTest, StereoToMonoFailsWithBadParameters) { frame_.num_channels_ = 1; EXPECT_DEATH(AudioFrameOperations::DownmixChannels(1, &frame_), ""); } diff --git a/audio/voip/BUILD.gn b/audio/voip/BUILD.gn index 60232d5144..52f9d07f17 100644 --- a/audio/voip/BUILD.gn +++ b/audio/voip/BUILD.gn @@ -26,8 +26,9 @@ rtc_library("voip_core") { "../../modules/utility:utility", "../../rtc_base:criticalsection", "../../rtc_base:logging", - "//third_party/abseil-cpp/absl/types:optional", + "../../rtc_base/synchronization:mutex", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("audio_channel") { @@ -74,6 +75,7 @@ rtc_library("audio_ingress") { "../../rtc_base:logging", "../../rtc_base:safe_minmax", "../../rtc_base:timeutils", + "../../rtc_base/synchronization:mutex", "../utility:audio_frame_operations", ] } @@ -95,6 +97,7 @@ rtc_library("audio_egress") { "../../rtc_base:rtc_task_queue", "../../rtc_base:thread_checker", "../../rtc_base:timeutils", + "../../rtc_base/synchronization:mutex", "../utility:audio_frame_operations", ] } diff --git a/audio/voip/audio_channel.cc b/audio/voip/audio_channel.cc index b9ce7accd1..d9c89fcdc4 100644 --- a/audio/voip/audio_channel.cc +++ b/audio/voip/audio_channel.cc @@ -16,7 +16,7 @@ #include "api/audio_codecs/audio_format.h" #include "api/task_queue/task_queue_factory.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" @@ -43,7 +43,7 @@ AudioChannel::AudioChannel( Clock* clock = Clock::GetRealTimeClock(); receive_statistics_ = ReceiveStatistics::Create(clock); - RtpRtcp::Configuration rtp_config; + RtpRtcpInterface::Configuration rtp_config; rtp_config.clock = clock; rtp_config.audio = true; rtp_config.receive_statistics = receive_statistics_.get(); @@ -51,7 +51,7 @@ AudioChannel::AudioChannel( rtp_config.outgoing_transport = transport; rtp_config.local_media_ssrc = local_ssrc; - rtp_rtcp_ = RtpRtcp::Create(rtp_config); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config); rtp_rtcp_->SetSendingMediaStatus(false); rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); diff --git a/audio/voip/audio_channel.h b/audio/voip/audio_channel.h index 8b6f1a8e59..659e990c30 100644 --- a/audio/voip/audio_channel.h +++ b/audio/voip/audio_channel.h @@ -20,9 +20,8 @@ #include "api/voip/voip_base.h" #include "audio/voip/audio_egress.h" #include "audio/voip/audio_ingress.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/utility/include/process_thread.h" -#include "rtc_base/critical_section.h" #include "rtc_base/ref_count.h" namespace webrtc { @@ -88,7 +87,7 @@ class AudioChannel : public rtc::RefCountInterface { // Listed in order for safe destruction of AudioChannel object. // Synchronization for these are handled internally. std::unique_ptr receive_statistics_; - std::unique_ptr rtp_rtcp_; + std::unique_ptr rtp_rtcp_; std::unique_ptr ingress_; std::unique_ptr egress_; }; diff --git a/audio/voip/audio_egress.cc b/audio/voip/audio_egress.cc index a7bc202a41..305f712624 100644 --- a/audio/voip/audio_egress.cc +++ b/audio/voip/audio_egress.cc @@ -17,7 +17,7 @@ namespace webrtc { -AudioEgress::AudioEgress(RtpRtcp* rtp_rtcp, +AudioEgress::AudioEgress(RtpRtcpInterface* rtp_rtcp, Clock* clock, TaskQueueFactory* task_queue_factory) : rtp_rtcp_(rtp_rtcp), diff --git a/audio/voip/audio_egress.h b/audio/voip/audio_egress.h index e5632cde32..8ec048f915 100644 --- a/audio/voip/audio_egress.h +++ b/audio/voip/audio_egress.h @@ -20,8 +20,9 @@ #include "call/audio_sender.h" #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/rtp_rtcp/include/report_block_data.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "modules/rtp_rtcp/source/rtp_sender_audio.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_checker.h" #include "rtc_base/time_utils.h" @@ -43,7 +44,7 @@ namespace webrtc { // smaller footprint. class AudioEgress : public AudioSender, public AudioPacketizationCallback { public: - AudioEgress(RtpRtcp* rtp_rtcp, + AudioEgress(RtpRtcpInterface* rtp_rtcp, Clock* clock, TaskQueueFactory* task_queue_factory); ~AudioEgress() override; @@ -72,7 +73,7 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback { // Retrieve current encoder format info. This returns encoder format set // by SetEncoder() and if encoder is not set, this will return nullopt. absl::optional GetEncoderFormat() const { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); return encoder_format_; } @@ -99,17 +100,17 @@ class AudioEgress : public AudioSender, public AudioPacketizationCallback { private: void SetEncoderFormat(const SdpAudioFormat& encoder_format) { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); encoder_format_ = encoder_format; } - rtc::CriticalSection lock_; + mutable Mutex lock_; // Current encoder format selected by caller. absl::optional encoder_format_ RTC_GUARDED_BY(lock_); // Synchronization is handled internally by RtpRtcp. - RtpRtcp* const rtp_rtcp_; + RtpRtcpInterface* const rtp_rtcp_; // Synchronization is handled internally by RTPSenderAudio. RTPSenderAudio rtp_sender_audio_; diff --git a/audio/voip/audio_ingress.cc b/audio/voip/audio_ingress.cc index fb43fcd753..560055d4f4 100644 --- a/audio/voip/audio_ingress.cc +++ b/audio/voip/audio_ingress.cc @@ -17,7 +17,6 @@ #include "api/audio_codecs/audio_format.h" #include "audio/utility/audio_frame_operations.h" #include "modules/audio_coding/include/audio_coding_module.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_minmax.h" @@ -36,7 +35,7 @@ AudioCodingModule::Config CreateAcmConfig( } // namespace AudioIngress::AudioIngress( - RtpRtcp* rtp_rtcp, + RtpRtcpInterface* rtp_rtcp, Clock* clock, ReceiveStatistics* receive_statistics, rtc::scoped_refptr decoder_factory) @@ -83,7 +82,7 @@ AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo( // Compute elapsed and NTP times. int64_t unwrap_timestamp; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); unwrap_timestamp = timestamp_wrap_handler_.Unwrap(audio_frame->timestamp_); audio_frame->ntp_time_ms_ = @@ -107,7 +106,7 @@ AudioMixer::Source::AudioFrameInfo AudioIngress::GetAudioFrameWithInfo( void AudioIngress::SetReceiveCodecs( const std::map& codecs) { { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); for (const auto& kv : codecs) { receive_codec_info_[kv.first] = kv.second.clockrate_hz; } @@ -125,7 +124,7 @@ void AudioIngress::ReceivedRTPPacket(rtc::ArrayView rtp_packet) { // Set payload type's sampling rate before we feed it into ReceiveStatistics. { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); const auto& it = receive_codec_info_.find(rtp_packet_received.PayloadType()); // If sampling rate info is not available in our received codec set, it @@ -185,7 +184,7 @@ void AudioIngress::ReceivedRTCPPacket( } { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp); } } diff --git a/audio/voip/audio_ingress.h b/audio/voip/audio_ingress.h index 99766741d6..5a8df21f7a 100644 --- a/audio/voip/audio_ingress.h +++ b/audio/voip/audio_ingress.h @@ -26,9 +26,9 @@ #include "modules/audio_coding/include/audio_coding_module.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/include/remote_ntp_time_estimator.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "rtc_base/critical_section.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/time_utils.h" namespace webrtc { @@ -44,7 +44,7 @@ namespace webrtc { // smaller footprint. class AudioIngress : public AudioMixer::Source { public: - AudioIngress(RtpRtcp* rtp_rtcp, + AudioIngress(RtpRtcpInterface* rtp_rtcp, Clock* clock, ReceiveStatistics* receive_statistics, rtc::scoped_refptr decoder_factory); @@ -122,8 +122,8 @@ class AudioIngress : public AudioMixer::Source { // Synchronizaton is handled internally by ReceiveStatistics. ReceiveStatistics* const rtp_receive_statistics_; - // Synchronizaton is handled internally by RtpRtcp. - RtpRtcp* const rtp_rtcp_; + // Synchronizaton is handled internally by RtpRtcpInterface. + RtpRtcpInterface* const rtp_rtcp_; // Synchronizaton is handled internally by acm2::AcmReceiver. acm2::AcmReceiver acm_receiver_; @@ -131,7 +131,7 @@ class AudioIngress : public AudioMixer::Source { // Synchronizaton is handled internally by voe::AudioLevel. voe::AudioLevel output_audio_level_; - rtc::CriticalSection lock_; + Mutex lock_; RemoteNtpTimeEstimator ntp_estimator_ RTC_GUARDED_BY(lock_); diff --git a/audio/voip/test/BUILD.gn b/audio/voip/test/BUILD.gn index 39f100a3aa..d698b3321d 100644 --- a/audio/voip/test/BUILD.gn +++ b/audio/voip/test/BUILD.gn @@ -36,6 +36,7 @@ if (rtc_include_tests) { "../../../api/task_queue:default_task_queue_factory", "../../../modules/audio_mixer:audio_mixer_impl", "../../../modules/audio_mixer:audio_mixer_test_utils", + "../../../modules/rtp_rtcp:rtp_rtcp", "../../../modules/rtp_rtcp:rtp_rtcp_format", "../../../modules/utility", "../../../rtc_base:logging", @@ -56,6 +57,7 @@ if (rtc_include_tests) { "../../../api/audio_codecs:builtin_audio_encoder_factory", "../../../api/task_queue:default_task_queue_factory", "../../../modules/audio_mixer:audio_mixer_test_utils", + "../../../modules/rtp_rtcp:rtp_rtcp", "../../../rtc_base:logging", "../../../rtc_base:rtc_event", "../../../test:mock_transport", @@ -72,6 +74,7 @@ if (rtc_include_tests) { "../../../api/audio_codecs:builtin_audio_encoder_factory", "../../../api/task_queue:default_task_queue_factory", "../../../modules/audio_mixer:audio_mixer_test_utils", + "../../../modules/rtp_rtcp:rtp_rtcp", "../../../modules/rtp_rtcp:rtp_rtcp_format", "../../../rtc_base:logging", "../../../rtc_base:rtc_event", diff --git a/audio/voip/test/audio_egress_unittest.cc b/audio/voip/test/audio_egress_unittest.cc index 3391265880..70fb6dcf36 100644 --- a/audio/voip/test/audio_egress_unittest.cc +++ b/audio/voip/test/audio_egress_unittest.cc @@ -14,6 +14,7 @@ #include "api/task_queue/default_task_queue_factory.h" #include "modules/audio_mixer/sine_wave_generator.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "test/gmock.h" @@ -27,16 +28,16 @@ using ::testing::Invoke; using ::testing::NiceMock; using ::testing::Unused; -std::unique_ptr CreateRtpStack(Clock* clock, - Transport* transport, - uint32_t remote_ssrc) { - RtpRtcp::Configuration rtp_config; +std::unique_ptr CreateRtpStack(Clock* clock, + Transport* transport, + uint32_t remote_ssrc) { + RtpRtcpInterface::Configuration rtp_config; rtp_config.clock = clock; rtp_config.audio = true; rtp_config.rtcp_report_interval_ms = 5000; rtp_config.outgoing_transport = transport; rtp_config.local_media_ssrc = remote_ssrc; - auto rtp_rtcp = RtpRtcp::Create(rtp_config); + auto rtp_rtcp = ModuleRtpRtcpImpl2::Create(rtp_config); rtp_rtcp->SetSendingMediaStatus(false); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); return rtp_rtcp; @@ -100,7 +101,7 @@ class AudioEgressTest : public ::testing::Test { SimulatedClock fake_clock_; NiceMock transport_; SineWaveGenerator wave_generator_; - std::unique_ptr rtp_rtcp_; + std::unique_ptr rtp_rtcp_; std::unique_ptr task_queue_factory_; rtc::scoped_refptr encoder_factory_; std::unique_ptr egress_; diff --git a/audio/voip/test/audio_ingress_unittest.cc b/audio/voip/test/audio_ingress_unittest.cc index bedb82e211..3a2a66a325 100644 --- a/audio/voip/test/audio_ingress_unittest.cc +++ b/audio/voip/test/audio_ingress_unittest.cc @@ -15,6 +15,7 @@ #include "api/task_queue/default_task_queue_factory.h" #include "audio/voip/audio_egress.h" #include "modules/audio_mixer/sine_wave_generator.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "test/gmock.h" @@ -38,14 +39,14 @@ class AudioIngressTest : public ::testing::Test { : fake_clock_(123456789), wave_generator_(1000.0, kAudioLevel) { receive_statistics_ = ReceiveStatistics::Create(&fake_clock_); - RtpRtcp::Configuration rtp_config; + RtpRtcpInterface::Configuration rtp_config; rtp_config.clock = &fake_clock_; rtp_config.audio = true; rtp_config.receive_statistics = receive_statistics_.get(); rtp_config.rtcp_report_interval_ms = 5000; rtp_config.outgoing_transport = &transport_; rtp_config.local_media_ssrc = 0xdeadc0de; - rtp_rtcp_ = RtpRtcp::Create(rtp_config); + rtp_rtcp_ = ModuleRtpRtcpImpl2::Create(rtp_config); rtp_rtcp_->SetSendingMediaStatus(false); rtp_rtcp_->SetRTCPStatus(RtcpMode::kCompound); @@ -94,7 +95,7 @@ class AudioIngressTest : public ::testing::Test { SineWaveGenerator wave_generator_; NiceMock transport_; std::unique_ptr receive_statistics_; - std::unique_ptr rtp_rtcp_; + std::unique_ptr rtp_rtcp_; rtc::scoped_refptr encoder_factory_; rtc::scoped_refptr decoder_factory_; std::unique_ptr task_queue_factory_; diff --git a/audio/voip/voip_core.cc b/audio/voip/voip_core.cc index 3275f028cd..7292644648 100644 --- a/audio/voip/voip_core.cc +++ b/audio/voip/voip_core.cc @@ -15,7 +15,6 @@ #include #include "api/audio_codecs/audio_format.h" -#include "rtc_base/critical_section.h" #include "rtc_base/logging.h" namespace webrtc { @@ -134,7 +133,7 @@ absl::optional VoipCore::CreateChannel( process_thread_.get(), audio_mixer_.get(), decoder_factory_); { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); channel = static_cast(next_channel_id_); channels_[*channel] = audio_channel; @@ -154,7 +153,7 @@ void VoipCore::ReleaseChannel(ChannelId channel) { // Destroy channel outside of the lock. rtc::scoped_refptr audio_channel; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); auto iter = channels_.find(channel); if (iter != channels_.end()) { @@ -170,7 +169,7 @@ void VoipCore::ReleaseChannel(ChannelId channel) { rtc::scoped_refptr VoipCore::GetChannel(ChannelId channel) { rtc::scoped_refptr audio_channel; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); auto iter = channels_.find(channel); if (iter != channels_.end()) { audio_channel = iter->second; @@ -191,7 +190,7 @@ bool VoipCore::UpdateAudioTransportWithSenders() { int max_sampling_rate = 8000; size_t max_num_channels = 1; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); // Reserve to prevent run time vector re-allocation. audio_senders.reserve(channels_.size()); for (auto kv : channels_) { @@ -290,7 +289,7 @@ bool VoipCore::StopPlayout(ChannelId channel) { bool stop_device = true; { - rtc::CritScope lock(&lock_); + MutexLock lock(&lock_); for (auto kv : channels_) { rtc::scoped_refptr& channel = kv.second; if (channel->IsPlaying()) { diff --git a/audio/voip/voip_core.h b/audio/voip/voip_core.h index 08929d3afd..22a6559981 100644 --- a/audio/voip/voip_core.h +++ b/audio/voip/voip_core.h @@ -31,7 +31,7 @@ #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/audio_processing/include/audio_processing.h" #include "modules/utility/include/process_thread.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -123,7 +123,7 @@ class VoipCore : public VoipEngine, // Must be placed before |channels_| for proper destruction. std::unique_ptr process_thread_; - rtc::CriticalSection lock_; + Mutex lock_; // Member to track a next ChannelId for new AudioChannel. int next_channel_id_ RTC_GUARDED_BY(lock_) = 0; diff --git a/build_overrides/build.gni b/build_overrides/build.gni index 669044db81..8a9dfacd77 100644 --- a/build_overrides/build.gni +++ b/build_overrides/build.gni @@ -16,19 +16,19 @@ linux_use_bundled_binutils_override = true # only needed to support both WebRTC standalone and Chromium builds. build_with_chromium = false +# WebRTC checks out google_benchmark by default since it is always used. +checkout_google_benchmark = true + # Use our own suppressions files. asan_suppressions_file = "//build/sanitizers/asan_suppressions.cc" lsan_suppressions_file = "//tools_webrtc/sanitizers/lsan_suppressions_webrtc.cc" tsan_suppressions_file = "//tools_webrtc/sanitizers/tsan_suppressions_webrtc.cc" msan_blacklist_path = - rebase_path("//tools_webrtc/msan/blacklist.txt", root_build_dir) + rebase_path("//tools_webrtc/msan/suppressions.txt", root_build_dir) ubsan_blacklist_path = - rebase_path("//tools_webrtc/ubsan/blacklist.txt", root_build_dir) + rebase_path("//tools_webrtc/ubsan/suppressions.txt", root_build_dir) ubsan_vptr_blacklist_path = - rebase_path("//tools_webrtc/ubsan/vptr_blacklist.txt", root_build_dir) - -# Android lint suppressions file -lint_suppressions_file = "//tools_webrtc/android/suppressions.xml" + rebase_path("//tools_webrtc/ubsan/vptr_suppressions.txt", root_build_dir) # For Chromium, Android 32-bit non-component, non-clang builds hit a 4GiB size # limit, making them requiring symbol_level=2. WebRTC doesn't hit that problem @@ -46,3 +46,14 @@ if (host_os == "mac") { "hermetic toolchain if the minimum OS version is not met.") use_system_xcode = _result == 0 } + +declare_args() { + # WebRTC doesn't depend on //base from production code but only for testing + # purposes. In any case, it doesn't depend on //third_party/perfetto which + # is used for base tracing, so this feature is disabled. + enable_base_tracing = false + + # If true, it assumes that //third_party/abseil-cpp is an available + # dependency for googletest. + gtest_enable_absl_printers = true +} diff --git a/call/BUILD.gn b/call/BUILD.gn index a9037c3819..65b545c11e 100644 --- a/call/BUILD.gn +++ b/call/BUILD.gn @@ -39,6 +39,7 @@ rtc_library("call_interfaces") { "../api:rtp_parameters", "../api:scoped_refptr", "../api:transport_api", + "../api/adaptation:resource_adaptation_api", "../api/audio:audio_mixer_api", "../api/audio_codecs:audio_codecs_api", "../api/crypto:frame_decryptor_interface", @@ -61,8 +62,8 @@ rtc_library("call_interfaces") { "../rtc_base:checks", "../rtc_base:rtc_base_approved", "../rtc_base/network:sent_packet", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("audio_sender_interface") { @@ -80,7 +81,6 @@ rtc_library("rtp_interfaces") { # client code gets updated. visibility = [ "*" ] sources = [ - "rtcp_packet_sink_interface.h", "rtp_config.cc", "rtp_config.h", "rtp_packet_sink_interface.h", @@ -100,6 +100,8 @@ rtc_library("rtp_interfaces") { "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/types:optional", ] @@ -108,17 +110,12 @@ rtc_library("rtp_interfaces") { rtc_library("rtp_receiver") { visibility = [ "*" ] sources = [ - "rtcp_demuxer.cc", - "rtcp_demuxer.h", "rtp_demuxer.cc", "rtp_demuxer.h", - "rtp_rtcp_demuxer_helper.cc", - "rtp_rtcp_demuxer_helper.h", "rtp_stream_receiver_controller.cc", "rtp_stream_receiver_controller.h", "rtx_receive_stream.cc", "rtx_receive_stream.h", - "ssrc_binding_observer.h", ] deps = [ ":rtp_interfaces", @@ -128,8 +125,8 @@ rtc_library("rtp_receiver") { "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("rtp_sender") { @@ -171,6 +168,7 @@ rtc_library("rtp_sender") { "../modules/rtp_rtcp:rtp_rtcp_format", "../modules/rtp_rtcp:rtp_video_header", "../modules/utility", + "../modules/video_coding:chain_diff_calculator", "../modules/video_coding:codec_globals_headers", "../modules/video_coding:frame_dependencies_calculator", "../modules/video_coding:video_codec_interface", @@ -179,7 +177,10 @@ rtc_library("rtp_sender") { "../rtc_base:rate_limiter", "../rtc_base:rtc_base_approved", "../rtc_base:rtc_task_queue", + "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings:strings", @@ -202,8 +203,8 @@ rtc_library("bitrate_configurator") { "../api/units:data_rate", "../rtc_base:checks", "../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("bitrate_allocator") { @@ -223,8 +224,8 @@ rtc_library("bitrate_allocator") { "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/algorithm:container", ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } rtc_library("call") { @@ -279,14 +280,15 @@ rtc_library("call") { "../rtc_base:safe_minmax", "../rtc_base/experiments:field_trial_parser", "../rtc_base/network:sent_packet", - "../rtc_base/synchronization:rw_lock_wrapper", "../rtc_base/synchronization:sequence_checker", + "../rtc_base/task_utils:pending_task_safety_flag", "../system_wrappers", "../system_wrappers:field_trial", "../system_wrappers:metrics", "../video", - "//third_party/abseil-cpp/absl/types:optional", + "adaptation:resource_adaptation", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("video_stream_api") { @@ -301,7 +303,9 @@ rtc_library("video_stream_api") { "../api:frame_transformer_interface", "../api:rtp_headers", "../api:rtp_parameters", + "../api:scoped_refptr", "../api:transport_api", + "../api/adaptation:resource_adaptation_api", "../api/crypto:frame_decryptor_interface", "../api/crypto:frame_encryptor_interface", "../api/crypto:options", @@ -315,8 +319,8 @@ rtc_library("video_stream_api") { "../modules/rtp_rtcp:rtp_rtcp_format", "../rtc_base:checks", "../rtc_base:rtc_base_approved", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_library("simulated_network") { @@ -332,9 +336,10 @@ rtc_library("simulated_network") { "../api/units:timestamp", "../rtc_base:checks", "../rtc_base:rtc_base_approved", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("simulated_packet_receiver") { @@ -360,6 +365,7 @@ rtc_library("fake_network") { "../modules/utility", "../rtc_base:checks", "../rtc_base:rtc_base_approved", + "../rtc_base/synchronization:mutex", "../rtc_base/synchronization:sequence_checker", "../system_wrappers", ] @@ -375,11 +381,9 @@ if (rtc_include_tests) { "call_unittest.cc", "flexfec_receive_stream_unittest.cc", "receive_time_calculator_unittest.cc", - "rtcp_demuxer_unittest.cc", "rtp_bitrate_configurator_unittest.cc", "rtp_demuxer_unittest.cc", "rtp_payload_params_unittest.cc", - "rtp_rtcp_demuxer_helper_unittest.cc", "rtp_video_sender_unittest.cc", "rtx_receive_stream_unittest.cc", ] @@ -402,7 +406,9 @@ if (rtc_include_tests) { "../api/audio_codecs:builtin_audio_decoder_factory", "../api/rtc_event_log", "../api/task_queue:default_task_queue_factory", + "../api/test/video:function_video_factory", "../api/transport:field_trial_based_config", + "../api/video:builtin_video_bitrate_allocator_factory", "../api/video:video_frame", "../api/video:video_rtp_headers", "../audio", @@ -423,6 +429,7 @@ if (rtc_include_tests) { "../rtc_base:rate_limiter", "../rtc_base:rtc_base_approved", "../rtc_base:task_queue_for_test", + "../rtc_base/synchronization:mutex", "../system_wrappers", "../test:audio_codec_mocks", "../test:direct_transport", @@ -436,12 +443,16 @@ if (rtc_include_tests) { "../test:video_test_common", "../test/time_controller:time_controller", "../video", + "adaptation:resource_adaptation_test_utilities", "//test/scenario:scenario", "//testing/gmock", "//testing/gtest", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/memory", "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/types:variant", ] } @@ -478,6 +489,7 @@ if (rtc_include_tests) { "../rtc_base:rtc_base_approved", "../rtc_base:task_queue_for_test", "../rtc_base:task_queue_for_test", + "../rtc_base/synchronization:mutex", "../rtc_base/task_utils:repeating_task", "../system_wrappers", "../system_wrappers:metrics", @@ -494,8 +506,8 @@ if (rtc_include_tests) { "../test:video_test_common", "../video", "//testing/gtest", - "//third_party/abseil-cpp/absl/flags:flag", ] + absl_deps = [ "//third_party/abseil-cpp/absl/flags:flag" ] } # TODO(eladalon): This should be moved, as with the TODO for |rtp_interfaces|. @@ -553,7 +565,7 @@ if (rtc_include_tests) { "../system_wrappers", "../test:test_support", "//testing/gtest", - "//third_party/abseil-cpp/absl/algorithm:container", ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] } } diff --git a/call/adaptation/BUILD.gn b/call/adaptation/BUILD.gn index deac3156d6..94944d6820 100644 --- a/call/adaptation/BUILD.gn +++ b/call/adaptation/BUILD.gn @@ -10,10 +10,16 @@ import("../../webrtc.gni") rtc_library("resource_adaptation") { sources = [ + "adaptation_constraint.cc", + "adaptation_constraint.h", + "adaptation_listener.cc", + "adaptation_listener.h", + "broadcast_resource_listener.cc", + "broadcast_resource_listener.h", + "degradation_preference_provider.cc", + "degradation_preference_provider.h", "encoder_settings.cc", "encoder_settings.h", - "resource.cc", - "resource.h", "resource_adaptation_processor.cc", "resource_adaptation_processor.h", "resource_adaptation_processor_interface.cc", @@ -29,6 +35,9 @@ rtc_library("resource_adaptation") { ] deps = [ "../../api:rtp_parameters", + "../../api:scoped_refptr", + "../../api/adaptation:resource_adaptation_api", + "../../api/task_queue:task_queue", "../../api/video:video_adaptation", "../../api/video:video_frame", "../../api/video:video_stream_encoder", @@ -36,9 +45,16 @@ rtc_library("resource_adaptation") { "../../modules/video_coding:video_coding_utility", "../../rtc_base:checks", "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_task_queue", "../../rtc_base/experiments:balanced_degradation_settings", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/synchronization:sequence_checker", + "../../rtc_base/task_utils:to_queued_task", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container", "//third_party/abseil-cpp/absl/types:optional", + "//third_party/abseil-cpp/absl/types:variant", ] } @@ -47,6 +63,7 @@ if (rtc_include_tests) { testonly = true sources = [ + "broadcast_resource_listener_unittest.cc", "resource_adaptation_processor_unittest.cc", "resource_unittest.cc", "video_source_restrictions_unittest.cc", @@ -56,31 +73,50 @@ if (rtc_include_tests) { deps = [ ":resource_adaptation", ":resource_adaptation_test_utilities", + "../../api:scoped_refptr", + "../../api/adaptation:resource_adaptation_api", + "../../api/task_queue:default_task_queue_factory", + "../../api/task_queue:task_queue", "../../api/video:video_adaptation", "../../api/video_codecs:video_codecs_api", "../../rtc_base:checks", + "../../rtc_base:gunit_helpers", "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_task_queue", + "../../rtc_base:task_queue_for_test", + "../../rtc_base/synchronization:mutex", "../../test:field_trial", "../../test:rtc_expect_death", "../../test:test_support", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } rtc_source_set("resource_adaptation_test_utilities") { testonly = true sources = [ + "test/fake_adaptation_constraint.cc", + "test/fake_adaptation_constraint.h", + "test/fake_adaptation_listener.cc", + "test/fake_adaptation_listener.h", "test/fake_frame_rate_provider.cc", "test/fake_frame_rate_provider.h", "test/fake_resource.cc", "test/fake_resource.h", + "test/mock_resource_listener.h", ] deps = [ ":resource_adaptation", + "../../api:scoped_refptr", + "../../api/adaptation:resource_adaptation_api", + "../../api/task_queue:task_queue", "../../api/video:video_stream_encoder", "../../rtc_base:rtc_base_approved", + "../../rtc_base/synchronization:sequence_checker", + "../../rtc_base/task_utils:to_queued_task", "../../test:test_support", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } } diff --git a/call/adaptation/adaptation_constraint.cc b/call/adaptation/adaptation_constraint.cc new file mode 100644 index 0000000000..d62bb74f87 --- /dev/null +++ b/call/adaptation/adaptation_constraint.cc @@ -0,0 +1,17 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/adaptation_constraint.h" + +namespace webrtc { + +AdaptationConstraint::~AdaptationConstraint() {} + +} // namespace webrtc diff --git a/call/adaptation/adaptation_constraint.h b/call/adaptation/adaptation_constraint.h new file mode 100644 index 0000000000..9ff15d6b86 --- /dev/null +++ b/call/adaptation/adaptation_constraint.h @@ -0,0 +1,43 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_ +#define CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_ + +#include + +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" + +namespace webrtc { + +// Adaptation constraints have the ability to prevent applying a proposed +// adaptation (expressed as restrictions before/after adaptation). +class AdaptationConstraint { + public: + virtual ~AdaptationConstraint(); + + virtual std::string Name() const = 0; + + // TODO(https://crbug.com/webrtc/11172): When we have multi-stream adaptation + // support, this interface needs to indicate which stream the adaptation + // applies to. + virtual bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource) const = 0; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_ADAPTATION_CONSTRAINT_H_ diff --git a/call/adaptation/adaptation_listener.cc b/call/adaptation/adaptation_listener.cc new file mode 100644 index 0000000000..acc1564f77 --- /dev/null +++ b/call/adaptation/adaptation_listener.cc @@ -0,0 +1,17 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/adaptation_listener.h" + +namespace webrtc { + +AdaptationListener::~AdaptationListener() {} + +} // namespace webrtc diff --git a/call/adaptation/adaptation_listener.h b/call/adaptation/adaptation_listener.h new file mode 100644 index 0000000000..4a96baef8e --- /dev/null +++ b/call/adaptation/adaptation_listener.h @@ -0,0 +1,41 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_ADAPTATION_LISTENER_H_ +#define CALL_ADAPTATION_ADAPTATION_LISTENER_H_ + +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" + +namespace webrtc { + +// TODO(hbos): Can this be consolidated with +// ResourceAdaptationProcessorListener::OnVideoSourceRestrictionsUpdated()? Both +// listen to adaptations being applied, but on different layers with different +// arguments. +class AdaptationListener { + public: + virtual ~AdaptationListener(); + + // TODO(https://crbug.com/webrtc/11172): When we have multi-stream adaptation + // support, this interface needs to indicate which stream the adaptation + // applies to. + virtual void OnAdaptationApplied( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource) = 0; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_ADAPTATION_LISTENER_H_ diff --git a/call/adaptation/broadcast_resource_listener.cc b/call/adaptation/broadcast_resource_listener.cc new file mode 100644 index 0000000000..59bd1e0c7f --- /dev/null +++ b/call/adaptation/broadcast_resource_listener.cc @@ -0,0 +1,120 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/broadcast_resource_listener.h" + +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +// The AdapterResource redirects resource usage measurements from its parent to +// a single ResourceListener. +class BroadcastResourceListener::AdapterResource : public Resource { + public: + explicit AdapterResource(std::string name) : name_(std::move(name)) {} + ~AdapterResource() override { RTC_DCHECK(!listener_); } + + // The parent is letting us know we have a usage neasurement. + void OnResourceUsageStateMeasured(ResourceUsageState usage_state) { + MutexLock lock(&lock_); + if (!listener_) + return; + listener_->OnResourceUsageStateMeasured(this, usage_state); + } + + // Resource implementation. + std::string Name() const override { return name_; } + void SetResourceListener(ResourceListener* listener) override { + MutexLock lock(&lock_); + RTC_DCHECK(!listener_ || !listener); + listener_ = listener; + } + + private: + const std::string name_; + Mutex lock_; + ResourceListener* listener_ RTC_GUARDED_BY(lock_) = nullptr; +}; + +BroadcastResourceListener::BroadcastResourceListener( + rtc::scoped_refptr source_resource) + : source_resource_(source_resource), is_listening_(false) { + RTC_DCHECK(source_resource_); +} + +BroadcastResourceListener::~BroadcastResourceListener() { + RTC_DCHECK(!is_listening_); +} + +rtc::scoped_refptr BroadcastResourceListener::SourceResource() const { + return source_resource_; +} + +void BroadcastResourceListener::StartListening() { + MutexLock lock(&lock_); + RTC_DCHECK(!is_listening_); + source_resource_->SetResourceListener(this); + is_listening_ = true; +} + +void BroadcastResourceListener::StopListening() { + MutexLock lock(&lock_); + RTC_DCHECK(is_listening_); + RTC_DCHECK(adapters_.empty()); + source_resource_->SetResourceListener(nullptr); + is_listening_ = false; +} + +rtc::scoped_refptr +BroadcastResourceListener::CreateAdapterResource() { + MutexLock lock(&lock_); + RTC_DCHECK(is_listening_); + rtc::scoped_refptr adapter = + new rtc::RefCountedObject(source_resource_->Name() + + "Adapter"); + adapters_.push_back(adapter); + return adapter; +} + +void BroadcastResourceListener::RemoveAdapterResource( + rtc::scoped_refptr resource) { + MutexLock lock(&lock_); + auto it = std::find(adapters_.begin(), adapters_.end(), resource); + RTC_DCHECK(it != adapters_.end()); + adapters_.erase(it); +} + +std::vector> +BroadcastResourceListener::GetAdapterResources() { + std::vector> resources; + MutexLock lock(&lock_); + for (const auto& adapter : adapters_) { + resources.push_back(adapter); + } + return resources; +} + +void BroadcastResourceListener::OnResourceUsageStateMeasured( + rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + RTC_DCHECK_EQ(resource, source_resource_); + MutexLock lock(&lock_); + for (const auto& adapter : adapters_) { + adapter->OnResourceUsageStateMeasured(usage_state); + } +} + +} // namespace webrtc diff --git a/call/adaptation/broadcast_resource_listener.h b/call/adaptation/broadcast_resource_listener.h new file mode 100644 index 0000000000..2c5a5c703b --- /dev/null +++ b/call/adaptation/broadcast_resource_listener.h @@ -0,0 +1,75 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_ +#define CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_ + +#include + +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +// Responsible for forwarding 1 resource usage measurement to N listeners by +// creating N "adapter" resources. +// +// Example: +// If we have ResourceA, ResourceListenerX and ResourceListenerY we can create a +// BroadcastResourceListener that listens to ResourceA, use CreateAdapter() to +// spawn adapter resources ResourceX and ResourceY and let ResourceListenerX +// listen to ResourceX and ResourceListenerY listen to ResourceY. When ResourceA +// makes a measurement it will be echoed by both ResourceX and ResourceY. +// +// TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor is +// moved to call there will only be one ResourceAdaptationProcessor that needs +// to listen to the injected resources. When this is the case, delete this class +// and DCHECK that a Resource's listener is never overwritten. +class BroadcastResourceListener : public ResourceListener { + public: + explicit BroadcastResourceListener( + rtc::scoped_refptr source_resource); + ~BroadcastResourceListener() override; + + rtc::scoped_refptr SourceResource() const; + void StartListening(); + void StopListening(); + + // Creates a Resource that redirects any resource usage measurements that + // BroadcastResourceListener receives to its listener. + rtc::scoped_refptr CreateAdapterResource(); + + // Unregister the adapter from the BroadcastResourceListener; it will no + // longer receive resource usage measurement and will no longer be referenced. + // Use this to prevent memory leaks of old adapters. + void RemoveAdapterResource(rtc::scoped_refptr resource); + std::vector> GetAdapterResources(); + + // ResourceListener implementation. + void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) override; + + private: + class AdapterResource; + friend class AdapterResource; + + const rtc::scoped_refptr source_resource_; + Mutex lock_; + bool is_listening_ RTC_GUARDED_BY(lock_); + // The AdapterResource unregisters itself prior to destruction, guaranteeing + // that these pointers are safe to use. + std::vector> adapters_ + RTC_GUARDED_BY(lock_); +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_BROADCAST_RESOURCE_LISTENER_H_ diff --git a/call/adaptation/broadcast_resource_listener_unittest.cc b/call/adaptation/broadcast_resource_listener_unittest.cc new file mode 100644 index 0000000000..9cd80500c2 --- /dev/null +++ b/call/adaptation/broadcast_resource_listener_unittest.cc @@ -0,0 +1,121 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/broadcast_resource_listener.h" + +#include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/test/mock_resource_listener.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +using ::testing::_; +using ::testing::StrictMock; + +TEST(BroadcastResourceListenerTest, CreateAndRemoveAdapterResource) { + rtc::scoped_refptr source_resource = + FakeResource::Create("SourceResource"); + BroadcastResourceListener broadcast_resource_listener(source_resource); + broadcast_resource_listener.StartListening(); + + EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty()); + rtc::scoped_refptr adapter = + broadcast_resource_listener.CreateAdapterResource(); + StrictMock listener; + adapter->SetResourceListener(&listener); + EXPECT_EQ(std::vector>{adapter}, + broadcast_resource_listener.GetAdapterResources()); + + // The removed adapter is not referenced by the broadcaster. + broadcast_resource_listener.RemoveAdapterResource(adapter); + EXPECT_TRUE(broadcast_resource_listener.GetAdapterResources().empty()); + // The removed adapter is not forwarding measurements. + EXPECT_CALL(listener, OnResourceUsageStateMeasured(_, _)).Times(0); + source_resource->SetUsageState(ResourceUsageState::kOveruse); + // Cleanup. + adapter->SetResourceListener(nullptr); + broadcast_resource_listener.StopListening(); +} + +TEST(BroadcastResourceListenerTest, AdapterNameIsBasedOnSourceResourceName) { + rtc::scoped_refptr source_resource = + FakeResource::Create("FooBarResource"); + BroadcastResourceListener broadcast_resource_listener(source_resource); + broadcast_resource_listener.StartListening(); + + rtc::scoped_refptr adapter = + broadcast_resource_listener.CreateAdapterResource(); + EXPECT_EQ("FooBarResourceAdapter", adapter->Name()); + + broadcast_resource_listener.RemoveAdapterResource(adapter); + broadcast_resource_listener.StopListening(); +} + +TEST(BroadcastResourceListenerTest, AdaptersForwardsUsageMeasurements) { + rtc::scoped_refptr source_resource = + FakeResource::Create("SourceResource"); + BroadcastResourceListener broadcast_resource_listener(source_resource); + broadcast_resource_listener.StartListening(); + + StrictMock destination_listener1; + StrictMock destination_listener2; + rtc::scoped_refptr adapter1 = + broadcast_resource_listener.CreateAdapterResource(); + adapter1->SetResourceListener(&destination_listener1); + rtc::scoped_refptr adapter2 = + broadcast_resource_listener.CreateAdapterResource(); + adapter2->SetResourceListener(&destination_listener2); + + // Expect kOveruse to be echoed. + EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter1, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter2, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + source_resource->SetUsageState(ResourceUsageState::kOveruse); + + // Expect kUnderuse to be echoed. + EXPECT_CALL(destination_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter1, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + EXPECT_CALL(destination_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([adapter2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(adapter2, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + source_resource->SetUsageState(ResourceUsageState::kUnderuse); + + // Adapters have to be unregistered before they or the broadcaster is + // destroyed, ensuring safe use of raw pointers. + adapter1->SetResourceListener(nullptr); + adapter2->SetResourceListener(nullptr); + + broadcast_resource_listener.RemoveAdapterResource(adapter1); + broadcast_resource_listener.RemoveAdapterResource(adapter2); + broadcast_resource_listener.StopListening(); +} + +} // namespace webrtc diff --git a/call/adaptation/degradation_preference_provider.cc b/call/adaptation/degradation_preference_provider.cc new file mode 100644 index 0000000000..c87e49f366 --- /dev/null +++ b/call/adaptation/degradation_preference_provider.cc @@ -0,0 +1,14 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/degradation_preference_provider.h" + +webrtc::DegradationPreferenceProvider::~DegradationPreferenceProvider() = + default; diff --git a/call/adaptation/degradation_preference_provider.h b/call/adaptation/degradation_preference_provider.h new file mode 100644 index 0000000000..035fed1e55 --- /dev/null +++ b/call/adaptation/degradation_preference_provider.h @@ -0,0 +1,28 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_ +#define CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_ + +#include "api/rtp_parameters.h" + +namespace webrtc { + +// Thread-safe retrieval of degradation preferences. +class DegradationPreferenceProvider { + public: + virtual ~DegradationPreferenceProvider(); + + virtual DegradationPreference degradation_preference() const = 0; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_DEGRADATION_PREFERENCE_PROVIDER_H_ diff --git a/call/adaptation/resource.cc b/call/adaptation/resource.cc deleted file mode 100644 index 1f90934258..0000000000 --- a/call/adaptation/resource.cc +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/adaptation/resource.h" - -#include "absl/algorithm/container.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -ResourceListener::~ResourceListener() {} - -Resource::Resource() : usage_state_(absl::nullopt), listener_(nullptr) {} - -Resource::~Resource() {} - -void Resource::SetResourceListener(ResourceListener* listener) { - // If you want to change listener you need to unregister the old listener by - // setting it to null first. - RTC_DCHECK(!listener_ || !listener) << "A listener is already set"; - listener_ = listener; -} - -absl::optional Resource::usage_state() const { - return usage_state_; -} - -void Resource::ClearUsageState() { - usage_state_ = absl::nullopt; -} - -bool Resource::IsAdaptationUpAllowed( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource) const { - return true; -} - -void Resource::OnAdaptationApplied( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource) {} - -void Resource::OnResourceUsageStateMeasured(ResourceUsageState usage_state) { - usage_state_ = usage_state; - if (!listener_) - return; - listener_->OnResourceUsageStateMeasured(*this); -} - -} // namespace webrtc diff --git a/call/adaptation/resource.h b/call/adaptation/resource.h deleted file mode 100644 index 1f58dc127c..0000000000 --- a/call/adaptation/resource.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright 2019 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_ADAPTATION_RESOURCE_H_ -#define CALL_ADAPTATION_RESOURCE_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "call/adaptation/video_source_restrictions.h" -#include "call/adaptation/video_stream_input_state.h" - -namespace webrtc { - -class Resource; - -enum class ResourceUsageState { - // Action is needed to minimze the load on this resource. - kOveruse, - // Increasing the load on this resource is desired, if possible. - kUnderuse, -}; - -class ResourceListener { - public: - virtual ~ResourceListener(); - - // Informs the listener of a new measurement of resource usage. This means - // that |resource.usage_state()| is now up-to-date. - virtual void OnResourceUsageStateMeasured(const Resource& resource) = 0; -}; - -class Resource { - public: - // By default, usage_state() is null until a measurement is made. - Resource(); - virtual ~Resource(); - - void SetResourceListener(ResourceListener* listener); - - absl::optional usage_state() const; - void ClearUsageState(); - - // This method allows the Resource to reject a proposed adaptation in the "up" - // direction if it predicts this would cause overuse of this resource. The - // default implementation unconditionally returns true (= allowed). - virtual bool IsAdaptationUpAllowed( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource) const; - virtual void OnAdaptationApplied( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource); - - virtual std::string name() const = 0; - - protected: - // Updates the usage state and informs all registered listeners. - void OnResourceUsageStateMeasured(ResourceUsageState usage_state); - - private: - absl::optional usage_state_; - ResourceListener* listener_; -}; - -} // namespace webrtc - -#endif // CALL_ADAPTATION_RESOURCE_H_ diff --git a/call/adaptation/resource_adaptation_processor.cc b/call/adaptation/resource_adaptation_processor.cc index 79fb9daab2..b3095ed857 100644 --- a/call/adaptation/resource_adaptation_processor.cc +++ b/call/adaptation/resource_adaptation_processor.cc @@ -10,204 +10,304 @@ #include "call/adaptation/resource_adaptation_processor.h" +#include +#include #include #include "absl/algorithm/container.h" +#include "api/video/video_adaptation_counters.h" +#include "call/adaptation/video_stream_adapter.h" +#include "rtc_base/logging.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { +ResourceAdaptationProcessor::ResourceListenerDelegate::ResourceListenerDelegate( + ResourceAdaptationProcessor* processor) + : resource_adaptation_queue_(nullptr), processor_(processor) {} + +void ResourceAdaptationProcessor::ResourceListenerDelegate:: + SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue) { + RTC_DCHECK(!resource_adaptation_queue_); + RTC_DCHECK(resource_adaptation_queue); + resource_adaptation_queue_ = resource_adaptation_queue; + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); +} + +void ResourceAdaptationProcessor::ResourceListenerDelegate:: + OnProcessorDestroyed() { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + processor_ = nullptr; +} + +void ResourceAdaptationProcessor::ResourceListenerDelegate:: + OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + if (!resource_adaptation_queue_->IsCurrent()) { + resource_adaptation_queue_->PostTask(ToQueuedTask( + [this_ref = rtc::scoped_refptr(this), + resource, usage_state] { + this_ref->OnResourceUsageStateMeasured(resource, usage_state); + })); + return; + } + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + if (processor_) { + processor_->OnResourceUsageStateMeasured(resource, usage_state); + } +} + +ResourceAdaptationProcessor::MitigationResultAndLogMessage:: + MitigationResultAndLogMessage() + : result(MitigationResult::kAdaptationApplied), message() {} + +ResourceAdaptationProcessor::MitigationResultAndLogMessage:: + MitigationResultAndLogMessage(MitigationResult result, std::string message) + : result(result), message(std::move(message)) {} + ResourceAdaptationProcessor::ResourceAdaptationProcessor( - VideoStreamInputStateProvider* input_state_provider, - VideoStreamEncoderObserver* encoder_stats_observer) - : input_state_provider_(input_state_provider), + VideoStreamEncoderObserver* encoder_stats_observer, + VideoStreamAdapter* stream_adapter) + : resource_adaptation_queue_(nullptr), + resource_listener_delegate_( + new rtc::RefCountedObject(this)), encoder_stats_observer_(encoder_stats_observer), resources_(), - degradation_preference_(DegradationPreference::DISABLED), - effective_degradation_preference_(DegradationPreference::DISABLED), - is_screenshare_(false), - stream_adapter_(std::make_unique()), + stream_adapter_(stream_adapter), last_reported_source_restrictions_(), - processing_in_progress_(false) {} - -ResourceAdaptationProcessor::~ResourceAdaptationProcessor() = default; - -DegradationPreference ResourceAdaptationProcessor::degradation_preference() - const { - return degradation_preference_; + previous_mitigation_results_(), + processing_in_progress_(false) { + RTC_DCHECK(stream_adapter_); } -DegradationPreference -ResourceAdaptationProcessor::effective_degradation_preference() const { - return effective_degradation_preference_; +ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK(resources_.empty()) + << "There are resource(s) attached to a ResourceAdaptationProcessor " + << "being destroyed."; + stream_adapter_->RemoveRestrictionsListener(this); + resource_listener_delegate_->OnProcessorDestroyed(); } -void ResourceAdaptationProcessor::StartResourceAdaptation() { - for (auto* resource : resources_) { - resource->SetResourceListener(this); +void ResourceAdaptationProcessor::SetResourceAdaptationQueue( + TaskQueueBase* resource_adaptation_queue) { + RTC_DCHECK(!resource_adaptation_queue_); + RTC_DCHECK(resource_adaptation_queue); + resource_adaptation_queue_ = resource_adaptation_queue; + resource_listener_delegate_->SetResourceAdaptationQueue( + resource_adaptation_queue); + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + // Now that we have the adaptation queue we can attach as adaptation listener. + stream_adapter_->AddRestrictionsListener(this); +} + +void ResourceAdaptationProcessor::AddResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK(std::find(resource_limitations_listeners_.begin(), + resource_limitations_listeners_.end(), + limitations_listener) == + resource_limitations_listeners_.end()); + resource_limitations_listeners_.push_back(limitations_listener); +} +void ResourceAdaptationProcessor::RemoveResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + auto it = + std::find(resource_limitations_listeners_.begin(), + resource_limitations_listeners_.end(), limitations_listener); + RTC_DCHECK(it != resource_limitations_listeners_.end()); + resource_limitations_listeners_.erase(it); +} + +void ResourceAdaptationProcessor::AddResource( + rtc::scoped_refptr resource) { + RTC_DCHECK(resource); + { + MutexLock crit(&resources_lock_); + RTC_DCHECK(absl::c_find(resources_, resource) == resources_.end()) + << "Resource \"" << resource->Name() << "\" was already registered."; + resources_.push_back(resource); } + resource->SetResourceListener(resource_listener_delegate_); } -void ResourceAdaptationProcessor::StopResourceAdaptation() { - for (auto* resource : resources_) { - resource->SetResourceListener(nullptr); +std::vector> +ResourceAdaptationProcessor::GetResources() const { + MutexLock crit(&resources_lock_); + return resources_; +} + +void ResourceAdaptationProcessor::RemoveResource( + rtc::scoped_refptr resource) { + RTC_DCHECK(resource); + RTC_LOG(INFO) << "Removing resource \"" << resource->Name() << "\"."; + resource->SetResourceListener(nullptr); + { + MutexLock crit(&resources_lock_); + auto it = absl::c_find(resources_, resource); + RTC_DCHECK(it != resources_.end()) << "Resource \"" << resource->Name() + << "\" was not a registered resource."; + resources_.erase(it); } + RemoveLimitationsImposedByResource(std::move(resource)); } -void ResourceAdaptationProcessor::AddAdaptationListener( - ResourceAdaptationProcessorListener* adaptation_listener) { - adaptation_listeners_.push_back(adaptation_listener); -} - -void ResourceAdaptationProcessor::AddResource(Resource* resource) { - resources_.push_back(resource); -} - -void ResourceAdaptationProcessor::SetDegradationPreference( - DegradationPreference degradation_preference) { - degradation_preference_ = degradation_preference; - MaybeUpdateEffectiveDegradationPreference(); -} - -void ResourceAdaptationProcessor::SetIsScreenshare(bool is_screenshare) { - is_screenshare_ = is_screenshare; - MaybeUpdateEffectiveDegradationPreference(); -} - -void ResourceAdaptationProcessor::MaybeUpdateEffectiveDegradationPreference() { - effective_degradation_preference_ = - (is_screenshare_ && - degradation_preference_ == DegradationPreference::BALANCED) - ? DegradationPreference::MAINTAIN_RESOLUTION - : degradation_preference_; - stream_adapter_->SetDegradationPreference(effective_degradation_preference_); - MaybeUpdateVideoSourceRestrictions(nullptr); -} - -void ResourceAdaptationProcessor::ResetVideoSourceRestrictions() { - stream_adapter_->ClearRestrictions(); - adaptations_counts_by_resource_.clear(); - MaybeUpdateVideoSourceRestrictions(nullptr); -} - -void ResourceAdaptationProcessor::MaybeUpdateVideoSourceRestrictions( - const Resource* reason) { - VideoSourceRestrictions new_source_restrictions = - FilterRestrictionsByDegradationPreference( - stream_adapter_->source_restrictions(), - effective_degradation_preference_); - if (last_reported_source_restrictions_ != new_source_restrictions) { - last_reported_source_restrictions_ = std::move(new_source_restrictions); - for (auto* adaptation_listener : adaptation_listeners_) { - adaptation_listener->OnVideoSourceRestrictionsUpdated( - last_reported_source_restrictions_, - stream_adapter_->adaptation_counters(), reason); +void ResourceAdaptationProcessor::RemoveLimitationsImposedByResource( + rtc::scoped_refptr resource) { + if (!resource_adaptation_queue_->IsCurrent()) { + resource_adaptation_queue_->PostTask(ToQueuedTask( + [this, resource]() { RemoveLimitationsImposedByResource(resource); })); + return; + } + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + auto resource_adaptation_limits = + adaptation_limits_by_resources_.find(resource); + if (resource_adaptation_limits != adaptation_limits_by_resources_.end()) { + VideoStreamAdapter::RestrictionsWithCounters adaptation_limits = + resource_adaptation_limits->second; + adaptation_limits_by_resources_.erase(resource_adaptation_limits); + if (adaptation_limits_by_resources_.empty()) { + // Only the resource being removed was adapted so clear restrictions. + stream_adapter_->ClearRestrictions(); + return; } - if (reason) { - UpdateResourceDegradationCounts(reason); + + VideoStreamAdapter::RestrictionsWithCounters most_limited = + FindMostLimitedResources().second; + + if (adaptation_limits.counters.Total() <= most_limited.counters.Total()) { + // The removed limitations were less limited than the most limited + // resource. Don't change the current restrictions. + return; } + + // Apply the new most limited resource as the next restrictions. + Adaptation adapt_to = stream_adapter_->GetAdaptationTo( + most_limited.counters, most_limited.restrictions); + RTC_DCHECK_EQ(adapt_to.status(), Adaptation::Status::kValid); + stream_adapter_->ApplyAdaptation(adapt_to, nullptr); + + RTC_LOG(INFO) << "Most limited resource removed. Restoring restrictions to " + "next most limited restrictions: " + << most_limited.restrictions.ToString() << " with counters " + << most_limited.counters.ToString(); } } void ResourceAdaptationProcessor::OnResourceUsageStateMeasured( - const Resource& resource) { - RTC_DCHECK(resource.usage_state().has_value()); - switch (resource.usage_state().value()) { + rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + RTC_DCHECK(resource); + // |resource| could have been removed after signalling. + { + MutexLock crit(&resources_lock_); + if (absl::c_find(resources_, resource) == resources_.end()) { + RTC_LOG(INFO) << "Ignoring signal from removed resource \"" + << resource->Name() << "\"."; + return; + } + } + MitigationResultAndLogMessage result_and_message; + switch (usage_state) { case ResourceUsageState::kOveruse: - OnResourceOveruse(resource); + result_and_message = OnResourceOveruse(resource); break; case ResourceUsageState::kUnderuse: - OnResourceUnderuse(resource); + result_and_message = OnResourceUnderuse(resource); break; } + // Maybe log the result of the operation. + auto it = previous_mitigation_results_.find(resource.get()); + if (it != previous_mitigation_results_.end() && + it->second == result_and_message.result) { + // This resource has previously reported the same result and we haven't + // successfully adapted since - don't log to avoid spam. + return; + } + RTC_LOG(INFO) << "Resource \"" << resource->Name() << "\" signalled " + << ResourceUsageStateToString(usage_state) << ". " + << result_and_message.message; + if (result_and_message.result == MitigationResult::kAdaptationApplied) { + previous_mitigation_results_.clear(); + } else { + previous_mitigation_results_.insert( + std::make_pair(resource.get(), result_and_message.result)); + } } -bool ResourceAdaptationProcessor::HasSufficientInputForAdaptation( - const VideoStreamInputState& input_state) const { - return input_state.HasInputFrameSizeAndFramesPerSecond() && - (effective_degradation_preference_ != - DegradationPreference::MAINTAIN_RESOLUTION || - input_state.frames_per_second() >= kMinFrameRateFps); -} - -void ResourceAdaptationProcessor::OnResourceUnderuse( - const Resource& reason_resource) { +ResourceAdaptationProcessor::MitigationResultAndLogMessage +ResourceAdaptationProcessor::OnResourceUnderuse( + rtc::scoped_refptr reason_resource) { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); RTC_DCHECK(!processing_in_progress_); processing_in_progress_ = true; - // Clear all usage states. In order to re-run adaptation logic, resources need - // to provide new resource usage measurements. - // TODO(hbos): Support not unconditionally clearing usage states by having the - // ResourceAdaptationProcessor check in on its resources at certain intervals. - for (Resource* resource : resources_) { - resource->ClearUsageState(); - } - VideoStreamInputState input_state = input_state_provider_->InputState(); - if (effective_degradation_preference_ == DegradationPreference::DISABLED || - !HasSufficientInputForAdaptation(input_state)) { - processing_in_progress_ = false; - return; - } - if (!IsResourceAllowedToAdaptUp(&reason_resource)) { - processing_in_progress_ = false; - return; - } - // Update video input states and encoder settings for accurate adaptation. - stream_adapter_->SetInput(input_state); // How can this stream be adapted up? - Adaptation adaptation = stream_adapter_->GetAdaptationUp(); + Adaptation adaptation = stream_adapter_->GetAdaptationUp(reason_resource); if (adaptation.status() != Adaptation::Status::kValid) { processing_in_progress_ = false; - return; + rtc::StringBuilder message; + message << "Not adapting up because VideoStreamAdapter returned " + << Adaptation::StatusToString(adaptation.status()); + return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter, + message.Release()); } - // Are all resources OK with this adaptation being applied? - VideoSourceRestrictions restrictions_before = - stream_adapter_->source_restrictions(); - VideoSourceRestrictions restrictions_after = - stream_adapter_->PeekNextRestrictions(adaptation); - if (!absl::c_all_of(resources_, [&input_state, &restrictions_before, - &restrictions_after, - &reason_resource](const Resource* resource) { - return resource->IsAdaptationUpAllowed(input_state, restrictions_before, - restrictions_after, - reason_resource); - })) { - processing_in_progress_ = false; - return; + // Check that resource is most limited. + std::vector> most_limited_resources; + VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions; + std::tie(most_limited_resources, most_limited_restrictions) = + FindMostLimitedResources(); + + // If the most restricted resource is less limited than current restrictions + // then proceed with adapting up. + if (!most_limited_resources.empty() && + most_limited_restrictions.counters.Total() >= + stream_adapter_->adaptation_counters().Total()) { + // If |reason_resource| is not one of the most limiting resources then abort + // adaptation. + if (absl::c_find(most_limited_resources, reason_resource) == + most_limited_resources.end()) { + processing_in_progress_ = false; + rtc::StringBuilder message; + message << "Resource \"" << reason_resource->Name() + << "\" was not the most limited resource."; + return MitigationResultAndLogMessage( + MitigationResult::kNotMostLimitedResource, message.Release()); + } + + if (most_limited_resources.size() > 1) { + // If there are multiple most limited resources, all must signal underuse + // before the adaptation is applied. + UpdateResourceLimitations(reason_resource, adaptation.restrictions(), + adaptation.counters()); + processing_in_progress_ = false; + rtc::StringBuilder message; + message << "Resource \"" << reason_resource->Name() + << "\" was not the only most limited resource."; + return MitigationResultAndLogMessage( + MitigationResult::kSharedMostLimitedResource, message.Release()); + } } // Apply adaptation. - stream_adapter_->ApplyAdaptation(adaptation); - for (Resource* resource : resources_) { - resource->OnAdaptationApplied(input_state, restrictions_before, - restrictions_after, reason_resource); - } - // Update VideoSourceRestrictions based on adaptation. This also informs the - // |adaptation_listeners_|. - MaybeUpdateVideoSourceRestrictions(&reason_resource); + stream_adapter_->ApplyAdaptation(adaptation, reason_resource); processing_in_progress_ = false; + rtc::StringBuilder message; + message << "Adapted up successfully. Unfiltered adaptations: " + << stream_adapter_->adaptation_counters().ToString(); + return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied, + message.Release()); } -void ResourceAdaptationProcessor::OnResourceOveruse( - const Resource& reason_resource) { +ResourceAdaptationProcessor::MitigationResultAndLogMessage +ResourceAdaptationProcessor::OnResourceOveruse( + rtc::scoped_refptr reason_resource) { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); RTC_DCHECK(!processing_in_progress_); processing_in_progress_ = true; - // Clear all usage states. In order to re-run adaptation logic, resources need - // to provide new resource usage measurements. - // TODO(hbos): Support not unconditionally clearing usage states by having the - // ResourceAdaptationProcessor check in on its resources at certain intervals. - for (Resource* resource : resources_) { - resource->ClearUsageState(); - } - VideoStreamInputState input_state = input_state_provider_->InputState(); - if (!input_state.has_input()) { - processing_in_progress_ = false; - return; - } - if (effective_degradation_preference_ == DegradationPreference::DISABLED || - !HasSufficientInputForAdaptation(input_state)) { - processing_in_progress_ = false; - return; - } - // Update video input states and encoder settings for accurate adaptation. - stream_adapter_->SetInput(input_state); // How can this stream be adapted up? Adaptation adaptation = stream_adapter_->GetAdaptationDown(); if (adaptation.min_pixel_limit_reached()) { @@ -215,61 +315,87 @@ void ResourceAdaptationProcessor::OnResourceOveruse( } if (adaptation.status() != Adaptation::Status::kValid) { processing_in_progress_ = false; - return; + rtc::StringBuilder message; + message << "Not adapting down because VideoStreamAdapter returned " + << Adaptation::StatusToString(adaptation.status()); + return MitigationResultAndLogMessage(MitigationResult::kRejectedByAdapter, + message.Release()); } // Apply adaptation. - VideoSourceRestrictions restrictions_before = - stream_adapter_->source_restrictions(); - VideoSourceRestrictions restrictions_after = - stream_adapter_->PeekNextRestrictions(adaptation); - stream_adapter_->ApplyAdaptation(adaptation); - for (Resource* resource : resources_) { - resource->OnAdaptationApplied(input_state, restrictions_before, - restrictions_after, reason_resource); - } - // Update VideoSourceRestrictions based on adaptation. This also informs the - // |adaptation_listeners_|. - MaybeUpdateVideoSourceRestrictions(&reason_resource); + UpdateResourceLimitations(reason_resource, adaptation.restrictions(), + adaptation.counters()); + stream_adapter_->ApplyAdaptation(adaptation, reason_resource); processing_in_progress_ = false; + rtc::StringBuilder message; + message << "Adapted down successfully. Unfiltered adaptations: " + << stream_adapter_->adaptation_counters().ToString(); + return MitigationResultAndLogMessage(MitigationResult::kAdaptationApplied, + message.Release()); } -void ResourceAdaptationProcessor::TriggerAdaptationDueToFrameDroppedDueToSize( - const Resource& reason_resource) { - VideoAdaptationCounters counters_before = - stream_adapter_->adaptation_counters(); - OnResourceOveruse(reason_resource); - if (degradation_preference_ == DegradationPreference::BALANCED && - stream_adapter_->adaptation_counters().fps_adaptations > - counters_before.fps_adaptations) { - // Oops, we adapted frame rate. Adapt again, maybe it will adapt resolution! - // Though this is not guaranteed... - OnResourceOveruse(reason_resource); +std::pair>, + VideoStreamAdapter::RestrictionsWithCounters> +ResourceAdaptationProcessor::FindMostLimitedResources() const { + std::vector> most_limited_resources; + VideoStreamAdapter::RestrictionsWithCounters most_limited_restrictions{ + VideoSourceRestrictions(), VideoAdaptationCounters()}; + + for (const auto& resource_and_adaptation_limit_ : + adaptation_limits_by_resources_) { + const auto& restrictions_with_counters = + resource_and_adaptation_limit_.second; + if (restrictions_with_counters.counters.Total() > + most_limited_restrictions.counters.Total()) { + most_limited_restrictions = restrictions_with_counters; + most_limited_resources.clear(); + most_limited_resources.push_back(resource_and_adaptation_limit_.first); + } else if (most_limited_restrictions.counters == + restrictions_with_counters.counters) { + most_limited_resources.push_back(resource_and_adaptation_limit_.first); + } } - if (stream_adapter_->adaptation_counters().resolution_adaptations > - counters_before.resolution_adaptations) { - encoder_stats_observer_->OnInitialQualityResolutionAdaptDown(); + return std::make_pair(std::move(most_limited_resources), + most_limited_restrictions); +} + +void ResourceAdaptationProcessor::UpdateResourceLimitations( + rtc::scoped_refptr reason_resource, + const VideoSourceRestrictions& restrictions, + const VideoAdaptationCounters& counters) { + auto& adaptation_limits = adaptation_limits_by_resources_[reason_resource]; + if (adaptation_limits.restrictions == restrictions && + adaptation_limits.counters == counters) { + return; + } + adaptation_limits = {restrictions, counters}; + + std::map, VideoAdaptationCounters> limitations; + for (const auto& p : adaptation_limits_by_resources_) { + limitations.insert(std::make_pair(p.first, p.second.counters)); + } + for (auto limitations_listener : resource_limitations_listeners_) { + limitations_listener->OnResourceLimitationChanged(reason_resource, + limitations); } } -void ResourceAdaptationProcessor::UpdateResourceDegradationCounts( - const Resource* resource) { - RTC_DCHECK(resource); - int delta = stream_adapter_->adaptation_counters().Total(); - for (const auto& adaptations : adaptations_counts_by_resource_) { - delta -= adaptations.second; +void ResourceAdaptationProcessor::OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) { + RTC_DCHECK_RUN_ON(resource_adaptation_queue_); + if (reason) { + UpdateResourceLimitations(reason, unfiltered_restrictions, + adaptation_counters); + } else if (adaptation_counters.Total() == 0) { + // Adaptations are cleared. + adaptation_limits_by_resources_.clear(); + previous_mitigation_results_.clear(); + for (auto limitations_listener : resource_limitations_listeners_) { + limitations_listener->OnResourceLimitationChanged(nullptr, {}); + } } - - // Default value is 0, inserts the value if missing. - adaptations_counts_by_resource_[resource] += delta; - RTC_DCHECK_GE(adaptations_counts_by_resource_[resource], 0); -} - -bool ResourceAdaptationProcessor::IsResourceAllowedToAdaptUp( - const Resource* resource) const { - RTC_DCHECK(resource); - const auto& adaptations = adaptations_counts_by_resource_.find(resource); - return adaptations != adaptations_counts_by_resource_.end() && - adaptations->second > 0; } } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor.h b/call/adaptation/resource_adaptation_processor.h index de38751399..7ba871e104 100644 --- a/call/adaptation/resource_adaptation_processor.h +++ b/call/adaptation/resource_adaptation_processor.h @@ -13,13 +13,18 @@ #include #include +#include +#include #include #include "absl/types/optional.h" +#include "api/adaptation/resource.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" +#include "api/video/video_adaptation_counters.h" #include "api/video/video_frame.h" #include "api/video/video_stream_encoder_observer.h" -#include "call/adaptation/resource.h" #include "call/adaptation/resource_adaptation_processor_interface.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_adapter.h" @@ -28,80 +33,141 @@ namespace webrtc { +// The Resource Adaptation Processor is responsible for reacting to resource +// usage measurements (e.g. overusing or underusing CPU). When a resource is +// overused the Processor is responsible for performing mitigations in order to +// consume less resources. +// +// Today we have one Processor per VideoStreamEncoder and the Processor is only +// capable of restricting resolution or frame rate of the encoded stream. In the +// future we should have a single Processor responsible for all encoded streams, +// and it should be capable of reconfiguring other things than just +// VideoSourceRestrictions (e.g. reduce render frame rate). +// See Resource-Adaptation hotlist: +// https://bugs.chromium.org/u/590058293/hotlists/Resource-Adaptation +// +// The ResourceAdaptationProcessor is single-threaded. It may be constructed on +// any thread but MUST subsequently be used and destroyed on a single sequence, +// i.e. the "resource adaptation task queue". Resources can be added and removed +// from any thread. class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, + public VideoSourceRestrictionsListener, public ResourceListener { public: ResourceAdaptationProcessor( - VideoStreamInputStateProvider* input_state_provider, - VideoStreamEncoderObserver* encoder_stats_observer); + VideoStreamEncoderObserver* encoder_stats_observer, + VideoStreamAdapter* video_stream_adapter); ~ResourceAdaptationProcessor() override; + void SetResourceAdaptationQueue( + TaskQueueBase* resource_adaptation_queue) override; + // ResourceAdaptationProcessorInterface implementation. - DegradationPreference degradation_preference() const override; - DegradationPreference effective_degradation_preference() const override; - - void StartResourceAdaptation() override; - void StopResourceAdaptation() override; - void AddAdaptationListener( - ResourceAdaptationProcessorListener* adaptation_listener) override; - void AddResource(Resource* resource) override; - - void SetDegradationPreference( - DegradationPreference degradation_preference) override; - void SetIsScreenshare(bool is_screenshare) override; - void ResetVideoSourceRestrictions() override; + void AddResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) override; + void RemoveResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) override; + void AddResource(rtc::scoped_refptr resource) override; + std::vector> GetResources() const override; + void RemoveResource(rtc::scoped_refptr resource) override; // ResourceListener implementation. // Triggers OnResourceUnderuse() or OnResourceOveruse(). - void OnResourceUsageStateMeasured(const Resource& resource) override; + void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) override; - // May trigger 1-2 adaptations. It is meant to reduce resolution but this is - // not guaranteed. It may adapt frame rate, which does not address the issue. - // TODO(hbos): Can we get rid of this? - void TriggerAdaptationDueToFrameDroppedDueToSize( - const Resource& reason_resource) override; + // VideoSourceRestrictionsListener implementation. + void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override; private: - bool HasSufficientInputForAdaptation( - const VideoStreamInputState& input_state) const; + // If resource usage measurements happens off the adaptation task queue, this + // class takes care of posting the measurement for the processor to handle it + // on the adaptation task queue. + class ResourceListenerDelegate : public rtc::RefCountInterface, + public ResourceListener { + public: + explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor); + + void SetResourceAdaptationQueue(TaskQueueBase* resource_adaptation_queue); + void OnProcessorDestroyed(); + + // ResourceListener implementation. + void OnResourceUsageStateMeasured(rtc::scoped_refptr resource, + ResourceUsageState usage_state) override; + + private: + TaskQueueBase* resource_adaptation_queue_; + ResourceAdaptationProcessor* processor_ + RTC_GUARDED_BY(resource_adaptation_queue_); + }; + + enum class MitigationResult { + kNotMostLimitedResource, + kSharedMostLimitedResource, + kRejectedByAdapter, + kAdaptationApplied, + }; + + struct MitigationResultAndLogMessage { + MitigationResultAndLogMessage(); + MitigationResultAndLogMessage(MitigationResult result, std::string message); + MitigationResult result; + std::string message; + }; // Performs the adaptation by getting the next target, applying it and // informing listeners of the new VideoSourceRestriction and adaptation // counters. - void OnResourceUnderuse(const Resource& reason_resource); - void OnResourceOveruse(const Resource& reason_resource); + MitigationResultAndLogMessage OnResourceUnderuse( + rtc::scoped_refptr reason_resource); + MitigationResultAndLogMessage OnResourceOveruse( + rtc::scoped_refptr reason_resource); - // Needs to be invoked any time |degradation_preference_| or |is_screenshare_| - // changes to ensure |effective_degradation_preference_| is up-to-date. - void MaybeUpdateEffectiveDegradationPreference(); - // If the filtered source restrictions are different than - // |last_reported_source_restrictions_|, inform the listeners. - void MaybeUpdateVideoSourceRestrictions(const Resource* reason); - // Updates the number of times the resource has degraded based on the latest - // degradation applied. - void UpdateResourceDegradationCounts(const Resource* resource); - // Returns true if a Resource has been overused in the pass and is responsible - // for creating a VideoSourceRestriction. The current algorithm counts the - // number of times the resource caused an adaptation and allows adapting up - // if that number is non-zero. This is consistent with how adaptation has - // traditionally been handled. - // TODO(crbug.com/webrtc/11553) Change this algorithm to look at the resources - // restrictions rather than just the counters. - bool IsResourceAllowedToAdaptUp(const Resource* resource) const; + void UpdateResourceLimitations(rtc::scoped_refptr reason_resource, + const VideoSourceRestrictions& restrictions, + const VideoAdaptationCounters& counters) + RTC_RUN_ON(resource_adaptation_queue_); + // Searches |adaptation_limits_by_resources_| for each resource with the + // highest total adaptation counts. Adaptation up may only occur if the + // resource performing the adaptation is the only most limited resource. This + // function returns the list of all most limited resources as well as the + // corresponding adaptation of that resource. + std::pair>, + VideoStreamAdapter::RestrictionsWithCounters> + FindMostLimitedResources() const RTC_RUN_ON(resource_adaptation_queue_); + + void RemoveLimitationsImposedByResource( + rtc::scoped_refptr resource); + + TaskQueueBase* resource_adaptation_queue_; + rtc::scoped_refptr resource_listener_delegate_; // Input and output. - VideoStreamInputStateProvider* const input_state_provider_; - VideoStreamEncoderObserver* const encoder_stats_observer_; - std::vector adaptation_listeners_; - std::vector resources_; - std::map adaptations_counts_by_resource_; - // Adaptation strategy settings. - DegradationPreference degradation_preference_; - DegradationPreference effective_degradation_preference_; - bool is_screenshare_; + VideoStreamEncoderObserver* const encoder_stats_observer_ + RTC_GUARDED_BY(resource_adaptation_queue_); + mutable Mutex resources_lock_; + std::vector> resources_ + RTC_GUARDED_BY(resources_lock_); + std::vector resource_limitations_listeners_ + RTC_GUARDED_BY(resource_adaptation_queue_); + // Purely used for statistics, does not ensure mapped resources stay alive. + std::map, + VideoStreamAdapter::RestrictionsWithCounters> + adaptation_limits_by_resources_ + RTC_GUARDED_BY(resource_adaptation_queue_); // Responsible for generating and applying possible adaptations. - const std::unique_ptr stream_adapter_; - VideoSourceRestrictions last_reported_source_restrictions_; + VideoStreamAdapter* const stream_adapter_ + RTC_GUARDED_BY(resource_adaptation_queue_); + VideoSourceRestrictions last_reported_source_restrictions_ + RTC_GUARDED_BY(resource_adaptation_queue_); + // Keeps track of previous mitigation results per resource since the last + // successful adaptation. Used to avoid RTC_LOG spam. + std::map previous_mitigation_results_ + RTC_GUARDED_BY(resource_adaptation_queue_); // Prevents recursion. // // This is used to prevent triggering resource adaptation in the process of @@ -113,7 +179,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, // Resource::OnAdaptationApplied() -> // Resource::OnResourceUsageStateMeasured() -> // ResourceAdaptationProcessor::OnResourceOveruse() // Boom, not allowed. - bool processing_in_progress_; + bool processing_in_progress_ RTC_GUARDED_BY(resource_adaptation_queue_); }; } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor_interface.cc b/call/adaptation/resource_adaptation_processor_interface.cc index 4e5251ce90..79f099b267 100644 --- a/call/adaptation/resource_adaptation_processor_interface.cc +++ b/call/adaptation/resource_adaptation_processor_interface.cc @@ -12,8 +12,9 @@ namespace webrtc { -ResourceAdaptationProcessorListener::~ResourceAdaptationProcessorListener() {} +ResourceAdaptationProcessorInterface::~ResourceAdaptationProcessorInterface() = + default; -ResourceAdaptationProcessorInterface::~ResourceAdaptationProcessorInterface() {} +ResourceLimitationsListener::~ResourceLimitationsListener() = default; } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor_interface.h b/call/adaptation/resource_adaptation_processor_interface.h index 6984273a29..6b9afccf3f 100644 --- a/call/adaptation/resource_adaptation_processor_interface.h +++ b/call/adaptation/resource_adaptation_processor_interface.h @@ -11,70 +11,59 @@ #ifndef CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_ #define CALL_ADAPTATION_RESOURCE_ADAPTATION_PROCESSOR_INTERFACE_H_ +#include +#include + #include "absl/types/optional.h" +#include "api/adaptation/resource.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/task_queue_base.h" #include "api/video/video_adaptation_counters.h" #include "api/video/video_frame.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/adaptation_listener.h" #include "call/adaptation/encoder_settings.h" -#include "call/adaptation/resource.h" #include "call/adaptation/video_source_restrictions.h" namespace webrtc { -// The listener is responsible for carrying out the reconfiguration of the video -// source such that the VideoSourceRestrictions are fulfilled. -class ResourceAdaptationProcessorListener { +class ResourceLimitationsListener { public: - virtual ~ResourceAdaptationProcessorListener(); + virtual ~ResourceLimitationsListener(); - // The |restrictions| are filtered by degradation preference but not the - // |adaptation_counters|, which are currently only reported for legacy stats - // calculation purposes. - virtual void OnVideoSourceRestrictionsUpdated( - VideoSourceRestrictions restrictions, - const VideoAdaptationCounters& adaptation_counters, - const Resource* reason) = 0; + // The limitations on a resource were changed. This does not mean the current + // video restrictions have changed. + virtual void OnResourceLimitationChanged( + rtc::scoped_refptr resource, + const std::map, VideoAdaptationCounters>& + resource_limitations) = 0; }; -// Responsible for reconfiguring encoded streams based on resource consumption, -// such as scaling down resolution or frame rate when CPU is overused. This -// interface is meant to be injectable into VideoStreamEncoder. +// The Resource Adaptation Processor is responsible for reacting to resource +// usage measurements (e.g. overusing or underusing CPU). When a resource is +// overused the Processor is responsible for performing mitigations in order to +// consume less resources. class ResourceAdaptationProcessorInterface { public: virtual ~ResourceAdaptationProcessorInterface(); - virtual DegradationPreference degradation_preference() const = 0; - // Reinterprets "balanced + screenshare" as "maintain-resolution". - // TODO(hbos): Don't do this. This is not what "balanced" means. If the - // application wants to maintain resolution it should set that degradation - // preference rather than depend on non-standard behaviors. - virtual DegradationPreference effective_degradation_preference() const = 0; + virtual void SetResourceAdaptationQueue( + TaskQueueBase* resource_adaptation_queue) = 0; + virtual void AddResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) = 0; + virtual void RemoveResourceLimitationsListener( + ResourceLimitationsListener* limitations_listener) = 0; // Starts or stops listening to resources, effectively enabling or disabling - // processing. + // processing. May be called from anywhere. // TODO(https://crbug.com/webrtc/11172): Automatically register and unregister // with AddResource() and RemoveResource() instead. When the processor is // multi-stream aware, stream-specific resouces will get added and removed // over time. - virtual void StartResourceAdaptation() = 0; - virtual void StopResourceAdaptation() = 0; - virtual void AddAdaptationListener( - ResourceAdaptationProcessorListener* adaptation_listener) = 0; - virtual void AddResource(Resource* resource) = 0; - - virtual void SetDegradationPreference( - DegradationPreference degradation_preference) = 0; - virtual void SetIsScreenshare(bool is_screenshare) = 0; - virtual void ResetVideoSourceRestrictions() = 0; - - // May trigger one or more adaptations. It is meant to reduce resolution - - // useful if a frame was dropped due to its size - however, the implementation - // may not guarantee this (see resource_adaptation_processor.h). - // TODO(hbos): This is only part of the interface for backwards-compatiblity - // reasons. Can we replace this by something which actually satisfies the - // resolution constraints or get rid of it altogether? - virtual void TriggerAdaptationDueToFrameDroppedDueToSize( - const Resource& reason_resource) = 0; + virtual void AddResource(rtc::scoped_refptr resource) = 0; + virtual std::vector> GetResources() const = 0; + virtual void RemoveResource(rtc::scoped_refptr resource) = 0; }; } // namespace webrtc diff --git a/call/adaptation/resource_adaptation_processor_unittest.cc b/call/adaptation/resource_adaptation_processor_unittest.cc index 7e7fe590dc..69b224e711 100644 --- a/call/adaptation/resource_adaptation_processor_unittest.cc +++ b/call/adaptation/resource_adaptation_processor_unittest.cc @@ -10,13 +10,19 @@ #include "call/adaptation/resource_adaptation_processor.h" +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" #include "api/video/video_adaptation_counters.h" -#include "call/adaptation/resource.h" #include "call/adaptation/resource_adaptation_processor_interface.h" +#include "call/adaptation/test/fake_adaptation_listener.h" #include "call/adaptation/test/fake_frame_rate_provider.h" #include "call/adaptation/test/fake_resource.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state_provider.h" +#include "rtc_base/event.h" +#include "rtc_base/gunit.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/task_queue_for_test.h" #include "test/gtest.h" namespace webrtc { @@ -25,31 +31,42 @@ namespace { const int kDefaultFrameRate = 30; const int kDefaultFrameSize = 1280 * 720; +const int kDefaultTimeoutMs = 5000; -class ResourceAdaptationProcessorListenerForTesting - : public ResourceAdaptationProcessorListener { +class VideoSourceRestrictionsListenerForTesting + : public VideoSourceRestrictionsListener { public: - ResourceAdaptationProcessorListenerForTesting() + VideoSourceRestrictionsListenerForTesting() : restrictions_updated_count_(0), restrictions_(), adaptation_counters_(), reason_(nullptr) {} - ~ResourceAdaptationProcessorListenerForTesting() override {} + ~VideoSourceRestrictionsListenerForTesting() override {} size_t restrictions_updated_count() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); return restrictions_updated_count_; } - const VideoSourceRestrictions& restrictions() const { return restrictions_; } - const VideoAdaptationCounters& adaptation_counters() const { + VideoSourceRestrictions restrictions() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return restrictions_; + } + VideoAdaptationCounters adaptation_counters() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); return adaptation_counters_; } - const Resource* reason() const { return reason_; } + rtc::scoped_refptr reason() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return reason_; + } - // ResourceAdaptationProcessorListener implementation. + // VideoSourceRestrictionsListener implementation. void OnVideoSourceRestrictionsUpdated( VideoSourceRestrictions restrictions, const VideoAdaptationCounters& adaptation_counters, - const Resource* reason) override { + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) override { + RTC_DCHECK_RUN_ON(&sequence_checker_); ++restrictions_updated_count_; restrictions_ = restrictions; adaptation_counters_ = adaptation_counters; @@ -57,10 +74,12 @@ class ResourceAdaptationProcessorListenerForTesting } private: - size_t restrictions_updated_count_; - VideoSourceRestrictions restrictions_; - VideoAdaptationCounters adaptation_counters_; - const Resource* reason_; + SequenceChecker sequence_checker_; + size_t restrictions_updated_count_ RTC_GUARDED_BY(&sequence_checker_); + VideoSourceRestrictions restrictions_ RTC_GUARDED_BY(&sequence_checker_); + VideoAdaptationCounters adaptation_counters_ + RTC_GUARDED_BY(&sequence_checker_); + rtc::scoped_refptr reason_ RTC_GUARDED_BY(&sequence_checker_); }; class ResourceAdaptationProcessorTest : public ::testing::Test { @@ -68,16 +87,24 @@ class ResourceAdaptationProcessorTest : public ::testing::Test { ResourceAdaptationProcessorTest() : frame_rate_provider_(), input_state_provider_(&frame_rate_provider_), - resource_("FakeResource"), - other_resource_("OtherFakeResource"), - processor_(&input_state_provider_, - /*encoder_stats_observer=*/&frame_rate_provider_) { - processor_.AddAdaptationListener(&processor_listener_); - processor_.AddResource(&resource_); - processor_.AddResource(&other_resource_); + resource_(FakeResource::Create("FakeResource")), + other_resource_(FakeResource::Create("OtherFakeResource")), + adaptation_listener_(), + video_stream_adapter_( + std::make_unique(&input_state_provider_)), + processor_(std::make_unique( + /*encoder_stats_observer=*/&frame_rate_provider_, + video_stream_adapter_.get())) { + processor_->SetResourceAdaptationQueue(TaskQueueBase::Current()); + video_stream_adapter_->AddRestrictionsListener(&restrictions_listener_); + processor_->AddResource(resource_); + processor_->AddResource(other_resource_); + video_stream_adapter_->AddAdaptationListener(&adaptation_listener_); } ~ResourceAdaptationProcessorTest() override { - processor_.StopResourceAdaptation(); + if (processor_) { + DestroyProcessor(); + } } void SetInputStates(bool has_input, int fps, int frame_size) { @@ -94,42 +121,54 @@ class ResourceAdaptationProcessorTest : public ::testing::Test { : restrictions.max_pixels_per_frame().value_or(kDefaultFrameSize)); } + void DestroyProcessor() { + if (resource_) { + processor_->RemoveResource(resource_); + } + if (other_resource_) { + processor_->RemoveResource(other_resource_); + } + video_stream_adapter_->RemoveAdaptationListener(&adaptation_listener_); + video_stream_adapter_->RemoveRestrictionsListener(&restrictions_listener_); + processor_.reset(); + } + + static void WaitUntilTaskQueueIdle() { + ASSERT_TRUE(rtc::Thread::Current()->ProcessMessages(0)); + } + protected: FakeFrameRateProvider frame_rate_provider_; VideoStreamInputStateProvider input_state_provider_; - FakeResource resource_; - FakeResource other_resource_; - ResourceAdaptationProcessor processor_; - ResourceAdaptationProcessorListenerForTesting processor_listener_; + rtc::scoped_refptr resource_; + rtc::scoped_refptr other_resource_; + FakeAdaptationListener adaptation_listener_; + std::unique_ptr video_stream_adapter_; + std::unique_ptr processor_; + VideoSourceRestrictionsListenerForTesting restrictions_listener_; }; } // namespace TEST_F(ResourceAdaptationProcessorTest, DisabledByDefault) { - EXPECT_EQ(DegradationPreference::DISABLED, - processor_.degradation_preference()); - EXPECT_EQ(DegradationPreference::DISABLED, - processor_.effective_degradation_preference()); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - processor_.StartResourceAdaptation(); // Adaptation does not happen when disabled. - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(0u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); } TEST_F(ResourceAdaptationProcessorTest, InsufficientInput) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); // Adaptation does not happen if input is insufficient. // When frame size is missing (OnFrameSizeObserved not called yet). input_state_provider_.OnHasInputChanged(true); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(0u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); // When "has input" is missing. SetInputStates(false, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(0u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); // Note: frame rate cannot be missing, if unset it is 0. } @@ -139,212 +178,548 @@ TEST_F(ResourceAdaptationProcessorTest, InsufficientInput) { // restrictions. For that, see video_stream_adapter_unittest.cc. TEST_F(ResourceAdaptationProcessorTest, OveruseTriggersRestrictingResolutionInMaintainFrameRate) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); EXPECT_TRUE( - processor_listener_.restrictions().max_pixels_per_frame().has_value()); + restrictions_listener_.restrictions().max_pixels_per_frame().has_value()); } TEST_F(ResourceAdaptationProcessorTest, OveruseTriggersRestrictingFrameRateInMaintainResolution) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_RESOLUTION); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); - EXPECT_TRUE(processor_listener_.restrictions().max_frame_rate().has_value()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + EXPECT_TRUE( + restrictions_listener_.restrictions().max_frame_rate().has_value()); } TEST_F(ResourceAdaptationProcessorTest, OveruseTriggersRestrictingFrameRateAndResolutionInBalanced) { - processor_.SetDegradationPreference(DegradationPreference::BALANCED); - processor_.StartResourceAdaptation(); + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::BALANCED); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - // Adapting multiple times eventually resticts both frame rate and resolution. - // Exactly many times we need to adapt depends on BalancedDegradationSettings, - // VideoStreamAdapter and default input states. This test requires it to be - // achieved within 4 adaptations. + // Adapting multiple times eventually resticts both frame rate and + // resolution. Exactly many times we need to adapt depends on + // BalancedDegradationSettings, VideoStreamAdapter and default input + // states. This test requires it to be achieved within 4 adaptations. for (size_t i = 0; i < 4; ++i) { - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(i + 1, processor_listener_.restrictions_updated_count()); - RestrictSource(processor_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(i + 1, restrictions_listener_.restrictions_updated_count()); + RestrictSource(restrictions_listener_.restrictions()); } EXPECT_TRUE( - processor_listener_.restrictions().max_pixels_per_frame().has_value()); - EXPECT_TRUE(processor_listener_.restrictions().max_frame_rate().has_value()); + restrictions_listener_.restrictions().max_pixels_per_frame().has_value()); + EXPECT_TRUE( + restrictions_listener_.restrictions().max_frame_rate().has_value()); } TEST_F(ResourceAdaptationProcessorTest, AwaitingPreviousAdaptation) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); - // If we don't restrict the source then adaptation will not happen again due - // to "awaiting previous adaptation". This prevents "double-adapt". - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + // If we don't restrict the source then adaptation will not happen again + // due to "awaiting previous adaptation". This prevents "double-adapt". + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); } TEST_F(ResourceAdaptationProcessorTest, CannotAdaptUpWhenUnrestricted) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(0u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); } TEST_F(ResourceAdaptationProcessorTest, UnderuseTakesUsBackToUnrestricted) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); - RestrictSource(processor_listener_.restrictions()); - resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(2u, processor_listener_.restrictions_updated_count()); - EXPECT_EQ(VideoSourceRestrictions(), processor_listener_.restrictions()); -} - -TEST_F(ResourceAdaptationProcessorTest, ResourcesCanPreventAdaptingUp) { - processor_.SetDegradationPreference( - DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); - SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - // Adapt down so that we can adapt up. - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); - RestrictSource(processor_listener_.restrictions()); - // Adapting up is prevented. - resource_.set_is_adaptation_up_allowed(false); - resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2u, restrictions_listener_.restrictions_updated_count()); + EXPECT_EQ(VideoSourceRestrictions(), restrictions_listener_.restrictions()); } TEST_F(ResourceAdaptationProcessorTest, ResourcesCanNotAdaptUpIfNeverAdaptedDown) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); - RestrictSource(processor_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); + RestrictSource(restrictions_listener_.restrictions()); // Other resource signals under-use - other_resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); } TEST_F(ResourceAdaptationProcessorTest, ResourcesCanNotAdaptUpIfNotAdaptedDownAfterReset) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, restrictions_listener_.restrictions_updated_count()); - processor_.ResetVideoSourceRestrictions(); - EXPECT_EQ(0, processor_listener_.adaptation_counters().Total()); - other_resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); + video_stream_adapter_->ClearRestrictions(); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); - // resource_ did not overuse after we reset the restrictions, so adapt up - // should be disallowed. - resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(1, processor_listener_.adaptation_counters().Total()); + // resource_ did not overuse after we reset the restrictions, so adapt + // up should be disallowed. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); +} + +TEST_F(ResourceAdaptationProcessorTest, OnlyMostLimitedResourceMayAdaptUp) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // |other_resource_| is most limited, resource_ can't adapt up. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + // |resource_| and |other_resource_| are now most limited, so both must + // signal underuse to adapt up. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); } TEST_F(ResourceAdaptationProcessorTest, MultipleResourcesCanTriggerMultipleAdaptations) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); - other_resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(2, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); - other_resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(3, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); - resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(2, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); - // Does not trigger adaptation since resource has no adaptations left. - resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(2, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); + // resource_ is not most limited so can't adapt from underuse. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // resource_ is still not most limited so can't adapt from underuse. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); - other_resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(1, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); - other_resource_.set_usage_state(ResourceUsageState::kUnderuse); - EXPECT_EQ(0, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); -} + // However it will be after overuse + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); -TEST_F(ResourceAdaptationProcessorTest, AdaptingTriggersOnAdaptationApplied) { - processor_.SetDegradationPreference( - DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); - SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, resource_.num_adaptations_applied()); -} + // Now other_resource_ can't adapt up as it is not most restricted. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(3, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); -TEST_F(ResourceAdaptationProcessorTest, AdaptingClearsResourceUsageState) { - processor_.SetDegradationPreference( - DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); - SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(1u, processor_listener_.restrictions_updated_count()); - EXPECT_FALSE(resource_.usage_state().has_value()); + // resource_ is limited at 3 adaptations and other_resource_ 2. + // With the most limited resource signalling underuse in the following + // order we get back to unrestricted video. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // Both resource_ and other_resource_ are most limited. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // Again both are most limited. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); } TEST_F(ResourceAdaptationProcessorTest, - FailingAdaptingAlsoClearsResourceUsageState) { - processor_.SetDegradationPreference(DegradationPreference::DISABLED); - processor_.StartResourceAdaptation(); - resource_.set_usage_state(ResourceUsageState::kOveruse); - EXPECT_EQ(0u, processor_listener_.restrictions_updated_count()); - EXPECT_FALSE(resource_.usage_state().has_value()); + MostLimitedResourceAdaptationWorksAfterChangingDegradataionPreference) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + // Adapt down until we can't anymore. + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + int last_total = restrictions_listener_.adaptation_counters().Total(); + + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_RESOLUTION); + // resource_ can not adapt up since we have never reduced FPS. + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total()); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(last_total + 1, + restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + // other_resource_ is most limited so should be able to adapt up. + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(last_total, restrictions_listener_.adaptation_counters().Total()); +} + +TEST_F(ResourceAdaptationProcessorTest, AdaptingTriggersOnAdaptationApplied) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1u, adaptation_listener_.num_adaptations_applied()); } TEST_F(ResourceAdaptationProcessorTest, AdaptsDownWhenOtherResourceIsAlwaysUnderused) { - processor_.SetDegradationPreference( + video_stream_adapter_->SetDegradationPreference( DegradationPreference::MAINTAIN_FRAMERATE); - processor_.StartResourceAdaptation(); SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); - other_resource_.set_usage_state(ResourceUsageState::kUnderuse); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); // Does not trigger adapataion because there's no restriction. - EXPECT_EQ(0, processor_listener_.adaptation_counters().Total()); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); - resource_.set_usage_state(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); // Adapts down even if other resource asked for adapting up. - EXPECT_EQ(1, processor_listener_.adaptation_counters().Total()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); - other_resource_.set_usage_state(ResourceUsageState::kUnderuse); + RestrictSource(restrictions_listener_.restrictions()); + other_resource_->SetUsageState(ResourceUsageState::kUnderuse); // Doesn't adapt up because adaptation is due to another resource. - EXPECT_EQ(1, processor_listener_.adaptation_counters().Total()); - RestrictSource(processor_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); +} + +TEST_F(ResourceAdaptationProcessorTest, + TriggerOveruseNotOnAdaptationTaskQueue) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + TaskQueueForTest resource_task_queue("ResourceTaskQueue"); + resource_task_queue.PostTask(ToQueuedTask( + [&]() { resource_->SetUsageState(ResourceUsageState::kOveruse); })); + + EXPECT_EQ_WAIT(1u, restrictions_listener_.restrictions_updated_count(), + kDefaultTimeoutMs); +} + +TEST_F(ResourceAdaptationProcessorTest, + DestroyProcessorWhileResourceListenerDelegateHasTaskInFlight) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + // Wait for |resource_| to signal oversue first so we know that the delegate + // has passed it on to the processor's task queue. + rtc::Event resource_event; + TaskQueueForTest resource_task_queue("ResourceTaskQueue"); + resource_task_queue.PostTask(ToQueuedTask([&]() { + resource_->SetUsageState(ResourceUsageState::kOveruse); + resource_event.Set(); + })); + + EXPECT_TRUE(resource_event.Wait(kDefaultTimeoutMs)); + // Now destroy the processor while handling the overuse is in flight. + DestroyProcessor(); + + // Because the processor was destroyed by the time the delegate's task ran, + // the overuse signal must not have been handled. + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); +} + +TEST_F(ResourceAdaptationProcessorTest, + ResourceOveruseIgnoredWhenSignalledDuringRemoval) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + rtc::Event overuse_event; + TaskQueueForTest resource_task_queue("ResourceTaskQueue"); + // Queues task for |resource_| overuse while |processor_| is still listening. + resource_task_queue.PostTask(ToQueuedTask([&]() { + resource_->SetUsageState(ResourceUsageState::kOveruse); + overuse_event.Set(); + })); + EXPECT_TRUE(overuse_event.Wait(kDefaultTimeoutMs)); + // Once we know the overuse task is queued, remove |resource_| so that + // |processor_| is not listening to it. + processor_->RemoveResource(resource_); + + // Runs the queued task so |processor_| gets signalled kOveruse from + // |resource_| even though |processor_| was not listening. + WaitUntilTaskQueueIdle(); + + // No restrictions should change even though |resource_| signaled |kOveruse|. + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingOnlyAdaptedResourceResetsAdaptation) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + RestrictSource(restrictions_listener_.restrictions()); + + processor_->RemoveResource(resource_); + EXPECT_EQ(0, restrictions_listener_.adaptation_counters().Total()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceSetsAdaptationToNextLimitedLevel) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::BALANCED); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Removing most limited |resource_| should revert us back to + processor_->RemoveResource(resource_); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceSetsAdaptationIfInputStateUnchanged) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + + // Overuse twice and underuse once. After the underuse we don't restrict the + // source. Normally this would block future underuses. + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Removing most limited |resource_| should revert us back to, even though we + // did not call RestrictSource() after |resource_| was overused. Normally + // adaptation for MAINTAIN_FRAMERATE would be blocked here but for removal we + // allow this anyways. + processor_->RemoveResource(resource_); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingResourceNotMostLimitedHasNoEffectOnLimitations) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::BALANCED); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + VideoSourceRestrictions current_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters current_counters = + restrictions_listener_.adaptation_counters(); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Removing most limited |resource_| should revert us back to + processor_->RemoveResource(other_resource_); + EXPECT_EQ(current_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(current_counters, restrictions_listener_.adaptation_counters()); + + // Delete |other_resource_| for cleanup. + other_resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceAfterSwitchingDegradationPreferences) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_RESOLUTION); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + // Revert to |other_resource_| when removing |resource_| even though the + // degradation preference was different when it was overused. + processor_->RemoveResource(resource_); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // After switching back to MAINTAIN_FRAMERATE, the next most limited settings + // are restored. + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingMostLimitedResourceSetsNextLimitationsInDisabled) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + VideoSourceRestrictions next_limited_restrictions = + restrictions_listener_.restrictions(); + VideoAdaptationCounters next_limited_counters = + restrictions_listener_.adaptation_counters(); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(2, restrictions_listener_.adaptation_counters().Total()); + + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::DISABLED); + + // Revert to |other_resource_| when removing |resource_| even though the + // current degradataion preference is disabled. + processor_->RemoveResource(resource_); + + // After switching back to MAINTAIN_FRAMERATE, the next most limited settings + // are restored. + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + EXPECT_EQ(next_limited_restrictions, restrictions_listener_.restrictions()); + EXPECT_EQ(next_limited_counters, + restrictions_listener_.adaptation_counters()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovedResourceSignalsIgnoredByProcessor) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + processor_->RemoveResource(resource_); + resource_->SetUsageState(ResourceUsageState::kOveruse); + EXPECT_EQ(0u, restrictions_listener_.restrictions_updated_count()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; +} + +TEST_F(ResourceAdaptationProcessorTest, + RemovingResourceWhenMultipleMostLimtedHasNoEffect) { + video_stream_adapter_->SetDegradationPreference( + DegradationPreference::MAINTAIN_FRAMERATE); + SetInputStates(true, kDefaultFrameRate, kDefaultFrameSize); + + other_resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + // Adapt |resource_| up and then down so that both resource's are most + // limited at 1 adaptation. + resource_->SetUsageState(ResourceUsageState::kOveruse); + RestrictSource(restrictions_listener_.restrictions()); + resource_->SetUsageState(ResourceUsageState::kUnderuse); + RestrictSource(restrictions_listener_.restrictions()); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + + // Removing |resource_| has no effect since both |resource_| and + // |other_resource_| are most limited. + processor_->RemoveResource(resource_); + EXPECT_EQ(1, restrictions_listener_.adaptation_counters().Total()); + + // Delete |resource_| for cleanup. + resource_ = nullptr; } } // namespace webrtc diff --git a/call/adaptation/resource_unittest.cc b/call/adaptation/resource_unittest.cc index d864005a72..a2291dfdce 100644 --- a/call/adaptation/resource_unittest.cc +++ b/call/adaptation/resource_unittest.cc @@ -8,9 +8,13 @@ * be found in the AUTHORS file in the root of the source tree. */ -#include "call/adaptation/resource.h" +#include "api/adaptation/resource.h" +#include + +#include "api/scoped_refptr.h" #include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/test/mock_resource_listener.h" #include "test/gmock.h" #include "test/gtest.h" @@ -19,31 +23,33 @@ namespace webrtc { using ::testing::_; using ::testing::StrictMock; -class MockResourceListener : public ResourceListener { +class ResourceTest : public ::testing::Test { public: - MOCK_METHOD(void, OnResourceUsageStateMeasured, (const Resource& resource)); + ResourceTest() : fake_resource_(FakeResource::Create("FakeResource")) {} + + protected: + rtc::scoped_refptr fake_resource_; }; -TEST(ResourceTest, RegisteringListenerReceivesCallbacks) { +TEST_F(ResourceTest, RegisteringListenerReceivesCallbacks) { StrictMock resource_listener; - FakeResource fake_resource("FakeResource"); - fake_resource.SetResourceListener(&resource_listener); - EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_)) + fake_resource_->SetResourceListener(&resource_listener); + EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _)) .Times(1) - .WillOnce([](const Resource& resource) { - EXPECT_EQ(ResourceUsageState::kOveruse, resource.usage_state()); + .WillOnce([](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); }); - fake_resource.set_usage_state(ResourceUsageState::kOveruse); - fake_resource.SetResourceListener(nullptr); + fake_resource_->SetUsageState(ResourceUsageState::kOveruse); + fake_resource_->SetResourceListener(nullptr); } -TEST(ResourceTest, UnregisteringListenerStopsCallbacks) { +TEST_F(ResourceTest, UnregisteringListenerStopsCallbacks) { StrictMock resource_listener; - FakeResource fake_resource("FakeResource"); - fake_resource.SetResourceListener(&resource_listener); - fake_resource.SetResourceListener(nullptr); - EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_)).Times(0); - fake_resource.set_usage_state(ResourceUsageState::kOveruse); + fake_resource_->SetResourceListener(&resource_listener); + fake_resource_->SetResourceListener(nullptr); + EXPECT_CALL(resource_listener, OnResourceUsageStateMeasured(_, _)).Times(0); + fake_resource_->SetUsageState(ResourceUsageState::kOveruse); } } // namespace webrtc diff --git a/call/adaptation/test/fake_adaptation_constraint.cc b/call/adaptation/test/fake_adaptation_constraint.cc new file mode 100644 index 0000000000..983885e58a --- /dev/null +++ b/call/adaptation/test/fake_adaptation_constraint.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/test/fake_adaptation_constraint.h" + +#include + +namespace webrtc { + +FakeAdaptationConstraint::FakeAdaptationConstraint(std::string name) + : name_(std::move(name)), is_adaptation_up_allowed_(true) {} + +FakeAdaptationConstraint::~FakeAdaptationConstraint() {} + +void FakeAdaptationConstraint::set_is_adaptation_up_allowed( + bool is_adaptation_up_allowed) { + is_adaptation_up_allowed_ = is_adaptation_up_allowed; +} + +std::string FakeAdaptationConstraint::Name() const { + return name_; +} + +bool FakeAdaptationConstraint::IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource) const { + return is_adaptation_up_allowed_; +} + +} // namespace webrtc diff --git a/call/adaptation/test/fake_adaptation_constraint.h b/call/adaptation/test/fake_adaptation_constraint.h new file mode 100644 index 0000000000..74637f48fd --- /dev/null +++ b/call/adaptation/test/fake_adaptation_constraint.h @@ -0,0 +1,42 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_ +#define CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_ + +#include + +#include "call/adaptation/adaptation_constraint.h" + +namespace webrtc { + +class FakeAdaptationConstraint : public AdaptationConstraint { + public: + explicit FakeAdaptationConstraint(std::string name); + ~FakeAdaptationConstraint() override; + + void set_is_adaptation_up_allowed(bool is_adaptation_up_allowed); + + // AdaptationConstraint implementation. + std::string Name() const override; + bool IsAdaptationUpAllowed( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource) const override; + + private: + const std::string name_; + bool is_adaptation_up_allowed_; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_TEST_FAKE_ADAPTATION_CONSTRAINT_H_ diff --git a/call/adaptation/test/fake_adaptation_listener.cc b/call/adaptation/test/fake_adaptation_listener.cc new file mode 100644 index 0000000000..7feecd6367 --- /dev/null +++ b/call/adaptation/test/fake_adaptation_listener.cc @@ -0,0 +1,32 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "call/adaptation/test/fake_adaptation_listener.h" + +namespace webrtc { + +FakeAdaptationListener::FakeAdaptationListener() + : num_adaptations_applied_(0) {} + +FakeAdaptationListener::~FakeAdaptationListener() {} + +size_t FakeAdaptationListener::num_adaptations_applied() const { + return num_adaptations_applied_; +} + +void FakeAdaptationListener::OnAdaptationApplied( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource) { + ++num_adaptations_applied_; +} + +} // namespace webrtc diff --git a/call/adaptation/test/fake_adaptation_listener.h b/call/adaptation/test/fake_adaptation_listener.h new file mode 100644 index 0000000000..c60ba3089b --- /dev/null +++ b/call/adaptation/test/fake_adaptation_listener.h @@ -0,0 +1,38 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_ +#define CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_ + +#include "call/adaptation/adaptation_listener.h" + +namespace webrtc { + +class FakeAdaptationListener : public AdaptationListener { + public: + FakeAdaptationListener(); + ~FakeAdaptationListener() override; + + size_t num_adaptations_applied() const; + + // AdaptationListener implementation. + void OnAdaptationApplied( + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource) override; + + private: + size_t num_adaptations_applied_; +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_TEST_FAKE_ADAPTATION_LISTENER_H_ diff --git a/call/adaptation/test/fake_frame_rate_provider.h b/call/adaptation/test/fake_frame_rate_provider.h index a08e162321..3638f478f3 100644 --- a/call/adaptation/test/fake_frame_rate_provider.h +++ b/call/adaptation/test/fake_frame_rate_provider.h @@ -21,29 +21,41 @@ namespace webrtc { class MockVideoStreamEncoderObserver : public VideoStreamEncoderObserver { public: - MOCK_METHOD2(OnEncodedFrameTimeMeasured, void(int, int)); - MOCK_METHOD2(OnIncomingFrame, void(int, int)); - MOCK_METHOD2(OnSendEncodedImage, - void(const EncodedImage&, const CodecSpecificInfo*)); - MOCK_METHOD1(OnEncoderImplementationChanged, void(const std::string&)); - MOCK_METHOD1(OnFrameDropped, void(DropReason)); - MOCK_METHOD2(OnEncoderReconfigured, - void(const VideoEncoderConfig&, - const std::vector&)); - MOCK_METHOD3(OnAdaptationChanged, - void(VideoAdaptationReason, - const VideoAdaptationCounters&, - const VideoAdaptationCounters&)); - MOCK_METHOD0(ClearAdaptationStats, void()); - MOCK_METHOD2(UpdateAdaptationSettings, - void(AdaptationSettings, AdaptationSettings)); - MOCK_METHOD0(OnMinPixelLimitReached, void()); - MOCK_METHOD0(OnInitialQualityResolutionAdaptDown, void()); - MOCK_METHOD1(OnSuspendChange, void(bool)); - MOCK_METHOD2(OnBitrateAllocationUpdated, - void(const VideoCodec&, const VideoBitrateAllocation&)); - MOCK_METHOD1(OnEncoderInternalScalerUpdate, void(bool)); - MOCK_CONST_METHOD0(GetInputFrameRate, int()); + MOCK_METHOD(void, OnEncodedFrameTimeMeasured, (int, int), (override)); + MOCK_METHOD(void, OnIncomingFrame, (int, int), (override)); + MOCK_METHOD(void, + OnSendEncodedImage, + (const EncodedImage&, const CodecSpecificInfo*), + (override)); + MOCK_METHOD(void, + OnEncoderImplementationChanged, + (const std::string&), + (override)); + MOCK_METHOD(void, OnFrameDropped, (DropReason), (override)); + MOCK_METHOD(void, + OnEncoderReconfigured, + (const VideoEncoderConfig&, const std::vector&), + (override)); + MOCK_METHOD(void, + OnAdaptationChanged, + (VideoAdaptationReason, + const VideoAdaptationCounters&, + const VideoAdaptationCounters&), + (override)); + MOCK_METHOD(void, ClearAdaptationStats, (), (override)); + MOCK_METHOD(void, + UpdateAdaptationSettings, + (AdaptationSettings, AdaptationSettings), + (override)); + MOCK_METHOD(void, OnMinPixelLimitReached, (), (override)); + MOCK_METHOD(void, OnInitialQualityResolutionAdaptDown, (), (override)); + MOCK_METHOD(void, OnSuspendChange, (bool), (override)); + MOCK_METHOD(void, + OnBitrateAllocationUpdated, + (const VideoCodec&, const VideoBitrateAllocation&), + (override)); + MOCK_METHOD(void, OnEncoderInternalScalerUpdate, (bool), (override)); + MOCK_METHOD(int, GetInputFrameRate, (), (const, override)); }; class FakeFrameRateProvider : public MockVideoStreamEncoderObserver { diff --git a/call/adaptation/test/fake_resource.cc b/call/adaptation/test/fake_resource.cc index bd7ad5431f..fa69e886bf 100644 --- a/call/adaptation/test/fake_resource.cc +++ b/call/adaptation/test/fake_resource.cc @@ -10,44 +10,35 @@ #include "call/adaptation/test/fake_resource.h" +#include #include +#include "rtc_base/ref_counted_object.h" + namespace webrtc { +// static +rtc::scoped_refptr FakeResource::Create(std::string name) { + return new rtc::RefCountedObject(name); +} + FakeResource::FakeResource(std::string name) - : Resource(), - name_(std::move(name)), - is_adaptation_up_allowed_(true), - num_adaptations_applied_(0) {} + : Resource(), name_(std::move(name)), listener_(nullptr) {} FakeResource::~FakeResource() {} -void FakeResource::set_usage_state(ResourceUsageState usage_state) { - OnResourceUsageStateMeasured(usage_state); +void FakeResource::SetUsageState(ResourceUsageState usage_state) { + if (listener_) { + listener_->OnResourceUsageStateMeasured(this, usage_state); + } } -void FakeResource::set_is_adaptation_up_allowed(bool is_adaptation_up_allowed) { - is_adaptation_up_allowed_ = is_adaptation_up_allowed; +std::string FakeResource::Name() const { + return name_; } -size_t FakeResource::num_adaptations_applied() const { - return num_adaptations_applied_; -} - -bool FakeResource::IsAdaptationUpAllowed( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource) const { - return is_adaptation_up_allowed_; -} - -void FakeResource::OnAdaptationApplied( - const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource) { - ++num_adaptations_applied_; +void FakeResource::SetResourceListener(ResourceListener* listener) { + listener_ = listener; } } // namespace webrtc diff --git a/call/adaptation/test/fake_resource.h b/call/adaptation/test/fake_resource.h index 0d9b1f46bb..e88d97db7a 100644 --- a/call/adaptation/test/fake_resource.h +++ b/call/adaptation/test/fake_resource.h @@ -12,36 +12,31 @@ #define CALL_ADAPTATION_TEST_FAKE_RESOURCE_H_ #include +#include -#include "call/adaptation/resource.h" +#include "absl/types/optional.h" +#include "api/adaptation/resource.h" +#include "api/scoped_refptr.h" namespace webrtc { // Fake resource used for testing. class FakeResource : public Resource { public: + static rtc::scoped_refptr Create(std::string name); + explicit FakeResource(std::string name); ~FakeResource() override; - void set_usage_state(ResourceUsageState usage_state); - void set_is_adaptation_up_allowed(bool is_adaptation_up_allowed); - size_t num_adaptations_applied() const; + void SetUsageState(ResourceUsageState usage_state); // Resource implementation. - std::string name() const override { return name_; } - bool IsAdaptationUpAllowed(const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource) const override; - void OnAdaptationApplied(const VideoStreamInputState& input_state, - const VideoSourceRestrictions& restrictions_before, - const VideoSourceRestrictions& restrictions_after, - const Resource& reason_resource) override; + std::string Name() const override; + void SetResourceListener(ResourceListener* listener) override; private: const std::string name_; - bool is_adaptation_up_allowed_; - size_t num_adaptations_applied_; + ResourceListener* listener_; }; } // namespace webrtc diff --git a/call/adaptation/test/mock_resource_listener.h b/call/adaptation/test/mock_resource_listener.h new file mode 100644 index 0000000000..f0f998f2e3 --- /dev/null +++ b/call/adaptation/test/mock_resource_listener.h @@ -0,0 +1,31 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_ +#define CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_ + +#include "api/adaptation/resource.h" + +#include "test/gmock.h" + +namespace webrtc { + +class MockResourceListener : public ResourceListener { + public: + MOCK_METHOD(void, + OnResourceUsageStateMeasured, + (rtc::scoped_refptr resource, + ResourceUsageState usage_state), + (override)); +}; + +} // namespace webrtc + +#endif // CALL_ADAPTATION_TEST_MOCK_RESOURCE_LISTENER_H_ diff --git a/call/adaptation/video_source_restrictions.cc b/call/adaptation/video_source_restrictions.cc index 6fbdcb42a6..e9d6c26137 100644 --- a/call/adaptation/video_source_restrictions.cc +++ b/call/adaptation/video_source_restrictions.cc @@ -13,6 +13,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/strings/string_builder.h" namespace webrtc { @@ -36,6 +37,19 @@ VideoSourceRestrictions::VideoSourceRestrictions( RTC_DCHECK(!max_frame_rate_.has_value() || max_frame_rate_.value() > 0.0); } +std::string VideoSourceRestrictions::ToString() const { + rtc::StringBuilder ss; + ss << "{"; + if (max_frame_rate_) + ss << " max_fps=" << max_frame_rate_.value(); + if (max_pixels_per_frame_) + ss << " max_pixels_per_frame=" << max_pixels_per_frame_.value(); + if (target_pixels_per_frame_) + ss << " target_pixels_per_frame=" << target_pixels_per_frame_.value(); + ss << " }"; + return ss.Release(); +} + const absl::optional& VideoSourceRestrictions::max_pixels_per_frame() const { return max_pixels_per_frame_; diff --git a/call/adaptation/video_source_restrictions.h b/call/adaptation/video_source_restrictions.h index 506bae6133..7f79a48e5d 100644 --- a/call/adaptation/video_source_restrictions.h +++ b/call/adaptation/video_source_restrictions.h @@ -11,6 +11,7 @@ #ifndef CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_ #define CALL_ADAPTATION_VIDEO_SOURCE_RESTRICTIONS_H_ +#include #include #include "absl/types/optional.h" @@ -38,6 +39,8 @@ class VideoSourceRestrictions { return !(*this == rhs); } + std::string ToString() const; + // The source must produce a resolution less than or equal to // max_pixels_per_frame(). const absl::optional& max_pixels_per_frame() const; diff --git a/call/adaptation/video_stream_adapter.cc b/call/adaptation/video_stream_adapter.cc index b224e3e4d2..ec80a13a08 100644 --- a/call/adaptation/video_stream_adapter.cc +++ b/call/adaptation/video_stream_adapter.cc @@ -15,11 +15,17 @@ #include #include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/video/video_adaptation_counters.h" #include "api/video/video_adaptation_reason.h" #include "api/video_codecs/video_encoder.h" +#include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" +#include "rtc_base/checks.h" #include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { @@ -27,13 +33,6 @@ const int kMinFrameRateFps = 2; namespace { -// Generate suggested higher and lower frame rates and resolutions, to be -// applied to the VideoSourceRestrictor. These are used in "maintain-resolution" -// and "maintain-framerate". The "balanced" degradation preference also makes -// use of BalancedDegradationPreference when generating suggestions. The -// VideoSourceRestrictor decidedes whether or not a proposed adaptation is -// valid. - // For frame rate, the steps we take are 2/3 (down) and 3/2 (up). int GetLowerFrameRateThan(int fps) { RTC_DCHECK(fps != std::numeric_limits::max()); @@ -59,8 +58,60 @@ int GetLowerResolutionThan(int pixel_count) { return (pixel_count * 3) / 5; } +int GetIncreasedMaxPixelsWanted(int target_pixels) { + if (target_pixels == std::numeric_limits::max()) + return std::numeric_limits::max(); + // When we decrease resolution, we go down to at most 3/5 of current pixels. + // Thus to increase resolution, we need 3/5 to get back to where we started. + // When going up, the desired max_pixels_per_frame() has to be significantly + // higher than the target because the source's native resolutions might not + // match the target. We pick 12/5 of the target. + // + // (This value was historically 4 times the old target, which is (3/5)*4 of + // the new target - or 12/5 - assuming the target is adjusted according to + // the above steps.) + RTC_DCHECK(target_pixels != std::numeric_limits::max()); + return (target_pixels * 12) / 5; +} + +bool CanDecreaseResolutionTo(int target_pixels, + const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions) { + int max_pixels_per_frame = + rtc::dchecked_cast(restrictions.max_pixels_per_frame().value_or( + std::numeric_limits::max())); + return target_pixels < max_pixels_per_frame && + target_pixels >= input_state.min_pixels_per_frame(); +} + +bool CanIncreaseResolutionTo(int target_pixels, + const VideoSourceRestrictions& restrictions) { + int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels); + int max_pixels_per_frame = + rtc::dchecked_cast(restrictions.max_pixels_per_frame().value_or( + std::numeric_limits::max())); + return max_pixels_wanted > max_pixels_per_frame; +} + +bool CanDecreaseFrameRateTo(int max_frame_rate, + const VideoSourceRestrictions& restrictions) { + const int fps_wanted = std::max(kMinFrameRateFps, max_frame_rate); + return fps_wanted < + rtc::dchecked_cast(restrictions.max_frame_rate().value_or( + std::numeric_limits::max())); +} + +bool CanIncreaseFrameRateTo(int max_frame_rate, + const VideoSourceRestrictions& restrictions) { + return max_frame_rate > + rtc::dchecked_cast(restrictions.max_frame_rate().value_or( + std::numeric_limits::max())); +} + } // namespace +VideoSourceRestrictionsListener::~VideoSourceRestrictionsListener() = default; + VideoSourceRestrictions FilterRestrictionsByDegradationPreference( VideoSourceRestrictions source_restrictions, DegradationPreference degradation_preference) { @@ -82,28 +133,6 @@ VideoSourceRestrictions FilterRestrictionsByDegradationPreference( return source_restrictions; } -VideoAdaptationCounters FilterVideoAdaptationCountersByDegradationPreference( - VideoAdaptationCounters counters, - DegradationPreference degradation_preference) { - switch (degradation_preference) { - case DegradationPreference::BALANCED: - break; - case DegradationPreference::MAINTAIN_FRAMERATE: - counters.fps_adaptations = 0; - break; - case DegradationPreference::MAINTAIN_RESOLUTION: - counters.resolution_adaptations = 0; - break; - case DegradationPreference::DISABLED: - counters.resolution_adaptations = 0; - counters.fps_adaptations = 0; - break; - default: - RTC_NOTREACHED(); - } - return counters; -} - // TODO(hbos): Use absl::optional<> instead? int GetHigherResolutionThan(int pixel_count) { return pixel_count != std::numeric_limits::max() @@ -111,38 +140,44 @@ int GetHigherResolutionThan(int pixel_count) { : std::numeric_limits::max(); } -Adaptation::Step::Step(StepType type, int target) - : type(type), target(target) {} - -Adaptation::Adaptation(int validation_id, Step step) - : validation_id_(validation_id), - status_(Status::kValid), - step_(std::move(step)), - min_pixel_limit_reached_(false) {} - -Adaptation::Adaptation(int validation_id, - Step step, - bool min_pixel_limit_reached) - : validation_id_(validation_id), - status_(Status::kValid), - step_(std::move(step)), - min_pixel_limit_reached_(min_pixel_limit_reached) {} - -Adaptation::Adaptation(int validation_id, Status invalid_status) - : validation_id_(validation_id), - status_(invalid_status), - step_(absl::nullopt), - min_pixel_limit_reached_(false) { - RTC_DCHECK_NE(status_, Status::kValid); +// static +const char* Adaptation::StatusToString(Adaptation::Status status) { + switch (status) { + case Adaptation::Status::kValid: + return "kValid"; + case Adaptation::Status::kLimitReached: + return "kLimitReached"; + case Adaptation::Status::kAwaitingPreviousAdaptation: + return "kAwaitingPreviousAdaptation"; + case Status::kInsufficientInput: + return "kInsufficientInput"; + case Status::kAdaptationDisabled: + return "kAdaptationDisabled"; + case Status::kRejectedByConstraint: + return "kRejectedByConstraint"; + } } +Adaptation::Adaptation(int validation_id, + VideoSourceRestrictions restrictions, + VideoAdaptationCounters counters, + VideoStreamInputState input_state, + bool min_pixel_limit_reached) + : validation_id_(validation_id), + status_(Status::kValid), + min_pixel_limit_reached_(min_pixel_limit_reached), + input_state_(std::move(input_state)), + restrictions_(std::move(restrictions)), + counters_(std::move(counters)) {} + Adaptation::Adaptation(int validation_id, Status invalid_status, + VideoStreamInputState input_state, bool min_pixel_limit_reached) : validation_id_(validation_id), status_(invalid_status), - step_(absl::nullopt), - min_pixel_limit_reached_(min_pixel_limit_reached) { + min_pixel_limit_reached_(min_pixel_limit_reached), + input_state_(std::move(input_state)) { RTC_DCHECK_NE(status_, Status::kValid); } @@ -154,398 +189,513 @@ bool Adaptation::min_pixel_limit_reached() const { return min_pixel_limit_reached_; } -const Adaptation::Step& Adaptation::step() const { - RTC_DCHECK_EQ(status_, Status::kValid); - return step_.value(); +const VideoStreamInputState& Adaptation::input_state() const { + return input_state_; } -// VideoSourceRestrictor is responsible for keeping track of current -// VideoSourceRestrictions. -class VideoStreamAdapter::VideoSourceRestrictor { - public: - VideoSourceRestrictor() {} - - VideoSourceRestrictions source_restrictions() const { - return source_restrictions_; - } - const VideoAdaptationCounters& adaptation_counters() const { - return adaptations_; - } - void ClearRestrictions() { - source_restrictions_ = VideoSourceRestrictions(); - adaptations_ = VideoAdaptationCounters(); - } - - void set_min_pixels_per_frame(int min_pixels_per_frame) { - min_pixels_per_frame_ = min_pixels_per_frame; - } - - int min_pixels_per_frame() const { return min_pixels_per_frame_; } - - bool CanDecreaseResolutionTo(int target_pixels) { - int max_pixels_per_frame = rtc::dchecked_cast( - source_restrictions_.max_pixels_per_frame().value_or( - std::numeric_limits::max())); - return target_pixels < max_pixels_per_frame && - target_pixels >= min_pixels_per_frame_; - } - - bool CanIncreaseResolutionTo(int target_pixels) { - int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels); - int max_pixels_per_frame = rtc::dchecked_cast( - source_restrictions_.max_pixels_per_frame().value_or( - std::numeric_limits::max())); - return max_pixels_wanted > max_pixels_per_frame; - } - - bool CanDecreaseFrameRateTo(int max_frame_rate) { - const int fps_wanted = std::max(kMinFrameRateFps, max_frame_rate); - return fps_wanted < rtc::dchecked_cast( - source_restrictions_.max_frame_rate().value_or( - std::numeric_limits::max())); - } - - bool CanIncreaseFrameRateTo(int max_frame_rate) { - return max_frame_rate > rtc::dchecked_cast( - source_restrictions_.max_frame_rate().value_or( - std::numeric_limits::max())); - } - - void ApplyAdaptationStep(const Adaptation::Step& step, - DegradationPreference degradation_preference) { - switch (step.type) { - case Adaptation::StepType::kIncreaseResolution: - IncreaseResolutionTo(step.target); - break; - case Adaptation::StepType::kDecreaseResolution: - DecreaseResolutionTo(step.target); - break; - case Adaptation::StepType::kIncreaseFrameRate: - IncreaseFrameRateTo(step.target); - // TODO(https://crbug.com/webrtc/11222): Don't adapt in two steps. - // GetAdaptationUp() should tell us the correct value, but BALANCED - // logic in DecrementFramerate() makes it hard to predict whether this - // will be the last step. Remove the dependency on - // adaptation_counters(). - if (degradation_preference == DegradationPreference::BALANCED && - adaptation_counters().fps_adaptations == 0 && - step.target != std::numeric_limits::max()) { - RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting."; - IncreaseFrameRateTo(std::numeric_limits::max()); - } - break; - case Adaptation::StepType::kDecreaseFrameRate: - DecreaseFrameRateTo(step.target); - break; - } - } - - private: - static int GetIncreasedMaxPixelsWanted(int target_pixels) { - if (target_pixels == std::numeric_limits::max()) - return std::numeric_limits::max(); - // When we decrease resolution, we go down to at most 3/5 of current pixels. - // Thus to increase resolution, we need 3/5 to get back to where we started. - // When going up, the desired max_pixels_per_frame() has to be significantly - // higher than the target because the source's native resolutions might not - // match the target. We pick 12/5 of the target. - // - // (This value was historically 4 times the old target, which is (3/5)*4 of - // the new target - or 12/5 - assuming the target is adjusted according to - // the above steps.) - RTC_DCHECK(target_pixels != std::numeric_limits::max()); - return (target_pixels * 12) / 5; - } - - void DecreaseResolutionTo(int target_pixels) { - RTC_DCHECK(CanDecreaseResolutionTo(target_pixels)); - RTC_LOG(LS_INFO) << "Scaling down resolution, max pixels: " - << target_pixels; - source_restrictions_.set_max_pixels_per_frame( - target_pixels != std::numeric_limits::max() - ? absl::optional(target_pixels) - : absl::nullopt); - source_restrictions_.set_target_pixels_per_frame(absl::nullopt); - ++adaptations_.resolution_adaptations; - } - - void IncreaseResolutionTo(int target_pixels) { - RTC_DCHECK(CanIncreaseResolutionTo(target_pixels)); - int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels); - RTC_LOG(LS_INFO) << "Scaling up resolution, max pixels: " - << max_pixels_wanted; - source_restrictions_.set_max_pixels_per_frame( - max_pixels_wanted != std::numeric_limits::max() - ? absl::optional(max_pixels_wanted) - : absl::nullopt); - source_restrictions_.set_target_pixels_per_frame( - max_pixels_wanted != std::numeric_limits::max() - ? absl::optional(target_pixels) - : absl::nullopt); - --adaptations_.resolution_adaptations; - RTC_DCHECK_GE(adaptations_.resolution_adaptations, 0); - } - - void DecreaseFrameRateTo(int max_frame_rate) { - RTC_DCHECK(CanDecreaseFrameRateTo(max_frame_rate)); - max_frame_rate = std::max(kMinFrameRateFps, max_frame_rate); - RTC_LOG(LS_INFO) << "Scaling down framerate: " << max_frame_rate; - source_restrictions_.set_max_frame_rate( - max_frame_rate != std::numeric_limits::max() - ? absl::optional(max_frame_rate) - : absl::nullopt); - ++adaptations_.fps_adaptations; - } - - void IncreaseFrameRateTo(int max_frame_rate) { - RTC_DCHECK(CanIncreaseFrameRateTo(max_frame_rate)); - RTC_LOG(LS_INFO) << "Scaling up framerate: " << max_frame_rate; - source_restrictions_.set_max_frame_rate( - max_frame_rate != std::numeric_limits::max() - ? absl::optional(max_frame_rate) - : absl::nullopt); - --adaptations_.fps_adaptations; - RTC_DCHECK_GE(adaptations_.fps_adaptations, 0); - } - - // Needed by CanDecreaseResolutionTo(). - int min_pixels_per_frame_ = 0; - // Current State. - VideoSourceRestrictions source_restrictions_; - VideoAdaptationCounters adaptations_; -}; - -// static -VideoStreamAdapter::AdaptationRequest::Mode -VideoStreamAdapter::AdaptationRequest::GetModeFromAdaptationAction( - Adaptation::StepType step_type) { - switch (step_type) { - case Adaptation::StepType::kIncreaseResolution: - return AdaptationRequest::Mode::kAdaptUp; - case Adaptation::StepType::kDecreaseResolution: - return AdaptationRequest::Mode::kAdaptDown; - case Adaptation::StepType::kIncreaseFrameRate: - return AdaptationRequest::Mode::kAdaptUp; - case Adaptation::StepType::kDecreaseFrameRate: - return AdaptationRequest::Mode::kAdaptDown; - } +const VideoSourceRestrictions& Adaptation::restrictions() const { + return restrictions_; } -VideoStreamAdapter::VideoStreamAdapter() - : source_restrictor_(std::make_unique()), +const VideoAdaptationCounters& Adaptation::counters() const { + return counters_; +} + +VideoStreamAdapter::VideoStreamAdapter( + VideoStreamInputStateProvider* input_state_provider) + : input_state_provider_(input_state_provider), balanced_settings_(), adaptation_validation_id_(0), degradation_preference_(DegradationPreference::DISABLED), - input_state_(), - last_adaptation_request_(absl::nullopt) {} + awaiting_frame_size_change_(absl::nullopt), + last_video_source_restrictions_() { + sequence_checker_.Detach(); +} -VideoStreamAdapter::~VideoStreamAdapter() {} +VideoStreamAdapter::~VideoStreamAdapter() { + RTC_DCHECK(adaptation_listeners_.empty()) + << "There are listener(s) attached to a VideoStreamAdapter being " + "destroyed."; + RTC_DCHECK(adaptation_constraints_.empty()) + << "There are constaint(s) attached to a VideoStreamAdapter being " + "destroyed."; +} VideoSourceRestrictions VideoStreamAdapter::source_restrictions() const { - return source_restrictor_->source_restrictions(); + RTC_DCHECK_RUN_ON(&sequence_checker_); + return current_restrictions_.restrictions; } const VideoAdaptationCounters& VideoStreamAdapter::adaptation_counters() const { - return source_restrictor_->adaptation_counters(); -} - -const BalancedDegradationSettings& VideoStreamAdapter::balanced_settings() - const { - return balanced_settings_; + RTC_DCHECK_RUN_ON(&sequence_checker_); + return current_restrictions_.counters; } void VideoStreamAdapter::ClearRestrictions() { + RTC_DCHECK_RUN_ON(&sequence_checker_); // Invalidate any previously returned Adaptation. + RTC_LOG(INFO) << "Resetting restrictions"; ++adaptation_validation_id_; - source_restrictor_->ClearRestrictions(); - last_adaptation_request_.reset(); + current_restrictions_ = {VideoSourceRestrictions(), + VideoAdaptationCounters()}; + awaiting_frame_size_change_ = absl::nullopt; + BroadcastVideoRestrictionsUpdate(input_state_provider_->InputState(), + nullptr); +} + +void VideoStreamAdapter::AddRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(std::find(restrictions_listeners_.begin(), + restrictions_listeners_.end(), + restrictions_listener) == restrictions_listeners_.end()); + restrictions_listeners_.push_back(restrictions_listener); +} + +void VideoStreamAdapter::RemoveRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + auto it = std::find(restrictions_listeners_.begin(), + restrictions_listeners_.end(), restrictions_listener); + RTC_DCHECK(it != restrictions_listeners_.end()); + restrictions_listeners_.erase(it); +} + +void VideoStreamAdapter::AddAdaptationListener( + AdaptationListener* adaptation_listener) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(std::find(adaptation_listeners_.begin(), + adaptation_listeners_.end(), + adaptation_listener) == adaptation_listeners_.end()); + adaptation_listeners_.push_back(adaptation_listener); +} + +void VideoStreamAdapter::RemoveAdaptationListener( + AdaptationListener* adaptation_listener) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + auto it = std::find(adaptation_listeners_.begin(), + adaptation_listeners_.end(), adaptation_listener); + RTC_DCHECK(it != adaptation_listeners_.end()); + adaptation_listeners_.erase(it); +} + +void VideoStreamAdapter::AddAdaptationConstraint( + AdaptationConstraint* adaptation_constraint) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(std::find(adaptation_constraints_.begin(), + adaptation_constraints_.end(), + adaptation_constraint) == adaptation_constraints_.end()); + adaptation_constraints_.push_back(adaptation_constraint); +} + +void VideoStreamAdapter::RemoveAdaptationConstraint( + AdaptationConstraint* adaptation_constraint) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + auto it = std::find(adaptation_constraints_.begin(), + adaptation_constraints_.end(), adaptation_constraint); + RTC_DCHECK(it != adaptation_constraints_.end()); + adaptation_constraints_.erase(it); } void VideoStreamAdapter::SetDegradationPreference( DegradationPreference degradation_preference) { + RTC_DCHECK_RUN_ON(&sequence_checker_); if (degradation_preference_ == degradation_preference) return; // Invalidate any previously returned Adaptation. ++adaptation_validation_id_; - if (degradation_preference == DegradationPreference::BALANCED || - degradation_preference_ == DegradationPreference::BALANCED) { - ClearRestrictions(); - } + bool balanced_switch = + degradation_preference == DegradationPreference::BALANCED || + degradation_preference_ == DegradationPreference::BALANCED; degradation_preference_ = degradation_preference; + if (balanced_switch) { + // ClearRestrictions() calls BroadcastVideoRestrictionsUpdate(nullptr). + ClearRestrictions(); + } else { + BroadcastVideoRestrictionsUpdate(input_state_provider_->InputState(), + nullptr); + } } -void VideoStreamAdapter::SetInput(VideoStreamInputState input_state) { - // Invalidate any previously returned Adaptation. +struct VideoStreamAdapter::RestrictionsOrStateVisitor { + Adaptation operator()(const RestrictionsWithCounters& r) const { + return Adaptation(adaptation_validation_id, r.restrictions, r.counters, + input_state, min_pixel_limit_reached()); + } + Adaptation operator()(const Adaptation::Status& status) const { + RTC_DCHECK_NE(status, Adaptation::Status::kValid); + return Adaptation(adaptation_validation_id, status, input_state, + min_pixel_limit_reached()); + } + bool min_pixel_limit_reached() const { + return input_state.frame_size_pixels().has_value() && + GetLowerResolutionThan(input_state.frame_size_pixels().value()) < + input_state.min_pixels_per_frame(); + } + + const int adaptation_validation_id; + const VideoStreamInputState& input_state; +}; + +Adaptation VideoStreamAdapter::RestrictionsOrStateToAdaptation( + VideoStreamAdapter::RestrictionsOrState step_or_state, + const VideoStreamInputState& input_state) const { + RTC_DCHECK(!step_or_state.valueless_by_exception()); + return absl::visit( + RestrictionsOrStateVisitor{adaptation_validation_id_, input_state}, + step_or_state); +} + +Adaptation VideoStreamAdapter::GetAdaptationUp( + const VideoStreamInputState& input_state, + rtc::scoped_refptr resource) const { + RestrictionsOrState step = GetAdaptationUpStep(input_state); + // If an adaptation proposed, check with the constraints that it is ok. + if (absl::holds_alternative(step)) { + RestrictionsWithCounters restrictions = + absl::get(step); + for (const auto* constraint : adaptation_constraints_) { + if (!constraint->IsAdaptationUpAllowed( + input_state, current_restrictions_.restrictions, + restrictions.restrictions, resource)) { + RTC_LOG(INFO) << "Not adapting up because constraint \"" + << constraint->Name() << "\" disallowed it"; + step = Adaptation::Status::kRejectedByConstraint; + } + } + } + return RestrictionsOrStateToAdaptation(step, input_state); +} + +Adaptation VideoStreamAdapter::GetAdaptationUp( + rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + RTC_DCHECK(resource); + VideoStreamInputState input_state = input_state_provider_->InputState(); ++adaptation_validation_id_; - input_state_ = input_state; - source_restrictor_->set_min_pixels_per_frame( - input_state_.min_pixels_per_frame()); + Adaptation adaptation = GetAdaptationUp(input_state, resource); + return adaptation; } -Adaptation VideoStreamAdapter::GetAdaptationUp() const { - RTC_DCHECK_NE(degradation_preference_, DegradationPreference::DISABLED); - RTC_DCHECK(input_state_.HasInputFrameSizeAndFramesPerSecond()); +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::GetAdaptationUpStep( + const VideoStreamInputState& input_state) const { + if (!HasSufficientInputForAdaptation(input_state)) { + return Adaptation::Status::kInsufficientInput; + } // Don't adapt if we're awaiting a previous adaptation to have an effect. - bool last_adaptation_was_up = - last_adaptation_request_ && - last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptUp; - if (last_adaptation_was_up && + if (awaiting_frame_size_change_ && + awaiting_frame_size_change_->pixels_increased && degradation_preference_ == DegradationPreference::MAINTAIN_FRAMERATE && - input_state_.frame_size_pixels().value() <= - last_adaptation_request_->input_pixel_count_) { - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kAwaitingPreviousAdaptation); + input_state.frame_size_pixels().value() <= + awaiting_frame_size_change_->frame_size_pixels) { + return Adaptation::Status::kAwaitingPreviousAdaptation; } // Maybe propose targets based on degradation preference. switch (degradation_preference_) { case DegradationPreference::BALANCED: { // Attempt to increase target frame rate. - int target_fps = - balanced_settings_.MaxFps(input_state_.video_codec_type(), - input_state_.frame_size_pixels().value()); - if (source_restrictor_->CanIncreaseFrameRateTo(target_fps)) { - return Adaptation( - adaptation_validation_id_, - Adaptation::Step(Adaptation::StepType::kIncreaseFrameRate, - target_fps)); + RestrictionsOrState increase_frame_rate = + IncreaseFramerate(input_state, current_restrictions_); + if (absl::holds_alternative( + increase_frame_rate)) { + return increase_frame_rate; } - // Scale up resolution. + // else, increase resolution. ABSL_FALLTHROUGH_INTENDED; } case DegradationPreference::MAINTAIN_FRAMERATE: { // Attempt to increase pixel count. - int target_pixels = input_state_.frame_size_pixels().value(); - if (source_restrictor_->adaptation_counters().resolution_adaptations == - 1) { - RTC_LOG(LS_INFO) << "Removing resolution down-scaling setting."; - target_pixels = std::numeric_limits::max(); - } - target_pixels = GetHigherResolutionThan(target_pixels); - if (!source_restrictor_->CanIncreaseResolutionTo(target_pixels)) { - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kLimitReached); - } - return Adaptation( - adaptation_validation_id_, - Adaptation::Step(Adaptation::StepType::kIncreaseResolution, - target_pixels)); + return IncreaseResolution(input_state, current_restrictions_); } case DegradationPreference::MAINTAIN_RESOLUTION: { // Scale up framerate. - int target_fps = input_state_.frames_per_second(); - if (source_restrictor_->adaptation_counters().fps_adaptations == 1) { - RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting."; - target_fps = std::numeric_limits::max(); - } - target_fps = GetHigherFrameRateThan(target_fps); - if (!source_restrictor_->CanIncreaseFrameRateTo(target_fps)) { - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kLimitReached); - } - return Adaptation( - adaptation_validation_id_, - Adaptation::Step(Adaptation::StepType::kIncreaseFrameRate, - target_fps)); + return IncreaseFramerate(input_state, current_restrictions_); } case DegradationPreference::DISABLED: - RTC_NOTREACHED(); - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kLimitReached); + return Adaptation::Status::kAdaptationDisabled; } } -Adaptation VideoStreamAdapter::GetAdaptationDown() const { - RTC_DCHECK_NE(degradation_preference_, DegradationPreference::DISABLED); - RTC_DCHECK(input_state_.HasInputFrameSizeAndFramesPerSecond()); - // Don't adapt adaptation is disabled. - bool last_adaptation_was_down = - last_adaptation_request_ && - last_adaptation_request_->mode_ == AdaptationRequest::Mode::kAdaptDown; - // Don't adapt if we're awaiting a previous adaptation to have an effect. - if (last_adaptation_was_down && - degradation_preference_ == DegradationPreference::MAINTAIN_FRAMERATE && - input_state_.frame_size_pixels().value() >= - last_adaptation_request_->input_pixel_count_) { - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kAwaitingPreviousAdaptation); - } +Adaptation VideoStreamAdapter::GetAdaptationDown() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoStreamInputState input_state = input_state_provider_->InputState(); + ++adaptation_validation_id_; + return RestrictionsOrStateToAdaptation(GetAdaptationDownStep(input_state), + input_state); +} +VideoStreamAdapter::RestrictionsOrState +VideoStreamAdapter::GetAdaptationDownStep( + const VideoStreamInputState& input_state) const { + if (!HasSufficientInputForAdaptation(input_state)) { + return Adaptation::Status::kInsufficientInput; + } + // Don't adapt if we're awaiting a previous adaptation to have an effect or + // if we switched degradation preference. + if (awaiting_frame_size_change_ && + !awaiting_frame_size_change_->pixels_increased && + degradation_preference_ == DegradationPreference::MAINTAIN_FRAMERATE && + input_state.frame_size_pixels().value() >= + awaiting_frame_size_change_->frame_size_pixels) { + return Adaptation::Status::kAwaitingPreviousAdaptation; + } // Maybe propose targets based on degradation preference. switch (degradation_preference_) { case DegradationPreference::BALANCED: { // Try scale down framerate, if lower. - int target_fps = - balanced_settings_.MinFps(input_state_.video_codec_type(), - input_state_.frame_size_pixels().value()); - if (source_restrictor_->CanDecreaseFrameRateTo(target_fps)) { - return Adaptation( - adaptation_validation_id_, - Adaptation::Step(Adaptation::StepType::kDecreaseFrameRate, - target_fps)); + RestrictionsOrState decrease_frame_rate = + DecreaseFramerate(input_state, current_restrictions_); + if (absl::holds_alternative( + decrease_frame_rate)) { + return decrease_frame_rate; } - // Scale down resolution. + // else, decrease resolution. ABSL_FALLTHROUGH_INTENDED; } case DegradationPreference::MAINTAIN_FRAMERATE: { - // Scale down resolution. - int target_pixels = - GetLowerResolutionThan(input_state_.frame_size_pixels().value()); - bool min_pixel_limit_reached = - target_pixels < source_restrictor_->min_pixels_per_frame(); - if (!source_restrictor_->CanDecreaseResolutionTo(target_pixels)) { - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kLimitReached, - min_pixel_limit_reached); - } - return Adaptation( - adaptation_validation_id_, - Adaptation::Step(Adaptation::StepType::kDecreaseResolution, - target_pixels), - min_pixel_limit_reached); + return DecreaseResolution(input_state, current_restrictions_); } case DegradationPreference::MAINTAIN_RESOLUTION: { - int target_fps = GetLowerFrameRateThan(input_state_.frames_per_second()); - if (!source_restrictor_->CanDecreaseFrameRateTo(target_fps)) { - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kLimitReached); - } - return Adaptation( - adaptation_validation_id_, - Adaptation::Step(Adaptation::StepType::kDecreaseFrameRate, - target_fps)); + return DecreaseFramerate(input_state, current_restrictions_); } case DegradationPreference::DISABLED: - RTC_NOTREACHED(); - return Adaptation(adaptation_validation_id_, - Adaptation::Status::kLimitReached); + return Adaptation::Status::kAdaptationDisabled; } } -VideoSourceRestrictions VideoStreamAdapter::PeekNextRestrictions( - const Adaptation& adaptation) const { - RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_); - if (adaptation.status() != Adaptation::Status::kValid) - return source_restrictor_->source_restrictions(); - VideoSourceRestrictor restrictor_copy = *source_restrictor_; - restrictor_copy.ApplyAdaptationStep(adaptation.step(), - degradation_preference_); - return restrictor_copy.source_restrictions(); +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) { + int target_pixels = + GetLowerResolutionThan(input_state.frame_size_pixels().value()); + if (!CanDecreaseResolutionTo(target_pixels, input_state, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + RestrictionsWithCounters new_restrictions = current_restrictions; + RTC_LOG(LS_INFO) << "Scaling down resolution, max pixels: " << target_pixels; + new_restrictions.restrictions.set_max_pixels_per_frame( + target_pixels != std::numeric_limits::max() + ? absl::optional(target_pixels) + : absl::nullopt); + new_restrictions.restrictions.set_target_pixels_per_frame(absl::nullopt); + ++new_restrictions.counters.resolution_adaptations; + return new_restrictions; } -void VideoStreamAdapter::ApplyAdaptation(const Adaptation& adaptation) { +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::DecreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const { + int max_frame_rate; + if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) { + max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second()); + } else if (degradation_preference_ == DegradationPreference::BALANCED) { + max_frame_rate = + balanced_settings_.MinFps(input_state.video_codec_type(), + input_state.frame_size_pixels().value()); + } else { + RTC_NOTREACHED(); + max_frame_rate = GetLowerFrameRateThan(input_state.frames_per_second()); + } + if (!CanDecreaseFrameRateTo(max_frame_rate, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + RestrictionsWithCounters new_restrictions = current_restrictions; + max_frame_rate = std::max(kMinFrameRateFps, max_frame_rate); + RTC_LOG(LS_INFO) << "Scaling down framerate: " << max_frame_rate; + new_restrictions.restrictions.set_max_frame_rate( + max_frame_rate != std::numeric_limits::max() + ? absl::optional(max_frame_rate) + : absl::nullopt); + ++new_restrictions.counters.fps_adaptations; + return new_restrictions; +} + +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) { + int target_pixels = input_state.frame_size_pixels().value(); + if (current_restrictions.counters.resolution_adaptations == 1) { + RTC_LOG(LS_INFO) << "Removing resolution down-scaling setting."; + target_pixels = std::numeric_limits::max(); + } + target_pixels = GetHigherResolutionThan(target_pixels); + if (!CanIncreaseResolutionTo(target_pixels, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + int max_pixels_wanted = GetIncreasedMaxPixelsWanted(target_pixels); + RestrictionsWithCounters new_restrictions = current_restrictions; + RTC_LOG(LS_INFO) << "Scaling up resolution, max pixels: " + << max_pixels_wanted; + new_restrictions.restrictions.set_max_pixels_per_frame( + max_pixels_wanted != std::numeric_limits::max() + ? absl::optional(max_pixels_wanted) + : absl::nullopt); + new_restrictions.restrictions.set_target_pixels_per_frame( + max_pixels_wanted != std::numeric_limits::max() + ? absl::optional(target_pixels) + : absl::nullopt); + --new_restrictions.counters.resolution_adaptations; + RTC_DCHECK_GE(new_restrictions.counters.resolution_adaptations, 0); + return new_restrictions; +} + +VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::IncreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const { + int max_frame_rate; + if (degradation_preference_ == DegradationPreference::MAINTAIN_RESOLUTION) { + max_frame_rate = GetHigherFrameRateThan(input_state.frames_per_second()); + } else if (degradation_preference_ == DegradationPreference::BALANCED) { + max_frame_rate = + balanced_settings_.MaxFps(input_state.video_codec_type(), + input_state.frame_size_pixels().value()); + // In BALANCED, the max_frame_rate must be checked before proceeding. This + // is because the MaxFps might be the current Fps and so the balanced + // settings may want to scale up the resolution.= + if (!CanIncreaseFrameRateTo(max_frame_rate, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + } else { + RTC_NOTREACHED(); + max_frame_rate = GetHigherFrameRateThan(input_state.frames_per_second()); + } + if (current_restrictions.counters.fps_adaptations == 1) { + RTC_LOG(LS_INFO) << "Removing framerate down-scaling setting."; + max_frame_rate = std::numeric_limits::max(); + } + if (!CanIncreaseFrameRateTo(max_frame_rate, + current_restrictions.restrictions)) { + return Adaptation::Status::kLimitReached; + } + RTC_LOG(LS_INFO) << "Scaling up framerate: " << max_frame_rate; + RestrictionsWithCounters new_restrictions = current_restrictions; + new_restrictions.restrictions.set_max_frame_rate( + max_frame_rate != std::numeric_limits::max() + ? absl::optional(max_frame_rate) + : absl::nullopt); + --new_restrictions.counters.fps_adaptations; + RTC_DCHECK_GE(new_restrictions.counters.fps_adaptations, 0); + return new_restrictions; +} + +Adaptation VideoStreamAdapter::GetAdaptDownResolution() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoStreamInputState input_state = input_state_provider_->InputState(); + switch (degradation_preference_) { + case DegradationPreference::DISABLED: + return RestrictionsOrStateToAdaptation( + Adaptation::Status::kAdaptationDisabled, input_state); + case DegradationPreference::MAINTAIN_RESOLUTION: + return RestrictionsOrStateToAdaptation(Adaptation::Status::kLimitReached, + input_state); + case DegradationPreference::MAINTAIN_FRAMERATE: + return GetAdaptationDown(); + case DegradationPreference::BALANCED: { + return RestrictionsOrStateToAdaptation( + GetAdaptDownResolutionStepForBalanced(input_state), input_state); + } + default: + RTC_NOTREACHED(); + } +} + +VideoStreamAdapter::RestrictionsOrState +VideoStreamAdapter::GetAdaptDownResolutionStepForBalanced( + const VideoStreamInputState& input_state) const { + // Adapt twice if the first adaptation did not decrease resolution. + auto first_step = GetAdaptationDownStep(input_state); + if (!absl::holds_alternative(first_step)) { + return first_step; + } + auto first_restrictions = absl::get(first_step); + if (first_restrictions.counters.resolution_adaptations > + current_restrictions_.counters.resolution_adaptations) { + return first_step; + } + // We didn't decrease resolution so force it; amend a resolution resuction + // to the existing framerate reduction in |first_restrictions|. + auto second_step = DecreaseResolution(input_state, first_restrictions); + if (absl::holds_alternative(second_step)) { + return second_step; + } + // If the second step was not successful then settle for the first one. + return first_step; +} + +void VideoStreamAdapter::ApplyAdaptation( + const Adaptation& adaptation, + rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(&sequence_checker_); RTC_DCHECK_EQ(adaptation.validation_id_, adaptation_validation_id_); if (adaptation.status() != Adaptation::Status::kValid) return; // Remember the input pixels and fps of this adaptation. Used to avoid // adapting again before this adaptation has had an effect. - last_adaptation_request_.emplace(AdaptationRequest{ - input_state_.frame_size_pixels().value(), - input_state_.frames_per_second(), - AdaptationRequest::GetModeFromAdaptationAction(adaptation.step().type)}); - // Adapt! - source_restrictor_->ApplyAdaptationStep(adaptation.step(), - degradation_preference_); + if (DidIncreaseResolution(current_restrictions_.restrictions, + adaptation.restrictions())) { + awaiting_frame_size_change_.emplace( + true, adaptation.input_state().frame_size_pixels().value()); + } else if (DidDecreaseResolution(current_restrictions_.restrictions, + adaptation.restrictions())) { + awaiting_frame_size_change_.emplace( + false, adaptation.input_state().frame_size_pixels().value()); + } else { + awaiting_frame_size_change_ = absl::nullopt; + } + current_restrictions_ = {adaptation.restrictions(), adaptation.counters()}; + BroadcastVideoRestrictionsUpdate(adaptation.input_state(), resource); } +Adaptation VideoStreamAdapter::GetAdaptationTo( + const VideoAdaptationCounters& counters, + const VideoSourceRestrictions& restrictions) { + // Adapts up/down from the current levels so counters are equal. + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoStreamInputState input_state = input_state_provider_->InputState(); + return Adaptation(adaptation_validation_id_, restrictions, counters, + input_state, false); +} + +void VideoStreamAdapter::BroadcastVideoRestrictionsUpdate( + const VideoStreamInputState& input_state, + const rtc::scoped_refptr& resource) { + RTC_DCHECK_RUN_ON(&sequence_checker_); + VideoSourceRestrictions filtered = FilterRestrictionsByDegradationPreference( + source_restrictions(), degradation_preference_); + if (last_filtered_restrictions_ == filtered) { + return; + } + for (auto* restrictions_listener : restrictions_listeners_) { + restrictions_listener->OnVideoSourceRestrictionsUpdated( + filtered, current_restrictions_.counters, resource, + source_restrictions()); + } + for (auto* adaptation_listener : adaptation_listeners_) { + adaptation_listener->OnAdaptationApplied( + input_state, last_video_source_restrictions_, + current_restrictions_.restrictions, resource); + } + last_video_source_restrictions_ = current_restrictions_.restrictions; + last_filtered_restrictions_ = filtered; +} + +bool VideoStreamAdapter::HasSufficientInputForAdaptation( + const VideoStreamInputState& input_state) const { + return input_state.HasInputFrameSizeAndFramesPerSecond() && + (degradation_preference_ != + DegradationPreference::MAINTAIN_RESOLUTION || + input_state.frames_per_second() >= kMinFrameRateFps); +} + +VideoStreamAdapter::AwaitingFrameSizeChange::AwaitingFrameSizeChange( + bool pixels_increased, + int frame_size_pixels) + : pixels_increased(pixels_increased), + frame_size_pixels(frame_size_pixels) {} + } // namespace webrtc diff --git a/call/adaptation/video_stream_adapter.h b/call/adaptation/video_stream_adapter.h index a2dea157bb..27699e6aa8 100644 --- a/call/adaptation/video_stream_adapter.h +++ b/call/adaptation/video_stream_adapter.h @@ -12,18 +12,42 @@ #define CALL_ADAPTATION_VIDEO_STREAM_ADAPTER_H_ #include +#include +#include #include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/adaptation/resource.h" #include "api/rtp_parameters.h" #include "api/video/video_adaptation_counters.h" -#include "call/adaptation/resource.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/adaptation_listener.h" +#include "call/adaptation/degradation_preference_provider.h" #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" +#include "call/adaptation/video_stream_input_state_provider.h" #include "modules/video_coding/utility/quality_scaler.h" #include "rtc_base/experiments/balanced_degradation_settings.h" +#include "rtc_base/thread_annotations.h" namespace webrtc { +// The listener is responsible for carrying out the reconfiguration of the video +// source such that the VideoSourceRestrictions are fulfilled. +class VideoSourceRestrictionsListener { + public: + virtual ~VideoSourceRestrictionsListener(); + + // The |restrictions| are filtered by degradation preference but not the + // |adaptation_counters|, which are currently only reported for legacy stats + // calculation purposes. + virtual void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) = 0; +}; + class VideoStreamAdapter; extern const int kMinFrameRateFps; @@ -32,15 +56,11 @@ VideoSourceRestrictions FilterRestrictionsByDegradationPreference( VideoSourceRestrictions source_restrictions, DegradationPreference degradation_preference); -VideoAdaptationCounters FilterVideoAdaptationCountersByDegradationPreference( - VideoAdaptationCounters counters, - DegradationPreference degradation_preference); - int GetHigherResolutionThan(int pixel_count); -// Represents one step that the VideoStreamAdapter can take when adapting the -// VideoSourceRestrictions up or down. Or, if adaptation is not valid, provides -// a Status code indicating the reason for not adapting. +// Either represents the next VideoSourceRestrictions the VideoStreamAdapter +// will take, or provides a Status code indicating the reason for not adapting +// if the adaptation is not valid. class Adaptation final { public: enum class Status { @@ -54,51 +74,49 @@ class Adaptation final { // adaptation has not yet been reflected in the input resolution or frame // rate; adaptation is refused to avoid "double-adapting". kAwaitingPreviousAdaptation, + // Not enough input. + kInsufficientInput, + // Adaptation disabled via degradation preference. + kAdaptationDisabled, + // Adaptation up was rejected by a VideoAdaptationConstraint. + kRejectedByConstraint, }; - // The status of this Adaptation. To find out how this Adaptation affects - // VideoSourceRestrictions, see VideoStreamAdapter::PeekNextRestrictions(). + static const char* StatusToString(Status status); + Status status() const; + const VideoStreamInputState& input_state() const; + const VideoSourceRestrictions& restrictions() const; + const VideoAdaptationCounters& counters() const; // Used for stats reporting. bool min_pixel_limit_reached() const; private: - // The adapter needs to know about step type and step target in order to - // construct and perform an Adaptation, which is a detail we do not want to - // expose to the public interface. friend class VideoStreamAdapter; - enum class StepType { - kIncreaseResolution, - kDecreaseResolution, - kIncreaseFrameRate, - kDecreaseFrameRate, - }; - - struct Step { - Step(StepType type, int target); - const StepType type; - const int target; // Pixel or frame rate depending on |type|. - }; - - // Constructs with a valid adaptation Step. Status is kValid. - Adaptation(int validation_id, Step step); - Adaptation(int validation_id, Step step, bool min_pixel_limit_reached); + // Constructs with a valid adaptation. Status is kValid. + Adaptation(int validation_id, + VideoSourceRestrictions restrictions, + VideoAdaptationCounters counters, + VideoStreamInputState input_state, + bool min_pixel_limit_reached); // Constructor when adaptation is not valid. Status MUST NOT be kValid. - Adaptation(int validation_id, Status invalid_status); Adaptation(int validation_id, Status invalid_status, + VideoStreamInputState input_state, bool min_pixel_limit_reached); - const Step& step() const; // Only callable if |status_| is kValid. - // An Adaptation can become invalidated if the state of VideoStreamAdapter is // modified before the Adaptation is applied. To guard against this, this ID // has to match VideoStreamAdapter::adaptation_validation_id_ when applied. + // TODO(https://crbug.com/webrtc/11700): Remove the validation_id_. const int validation_id_; const Status status_; - const absl::optional step_; // Only present if |status_| is kValid. const bool min_pixel_limit_reached_; + // Input state when adaptation was made. + const VideoStreamInputState input_state_; + const VideoSourceRestrictions restrictions_; + const VideoAdaptationCounters counters_; }; // Owns the VideoSourceRestriction for a single stream and is responsible for @@ -109,77 +127,144 @@ class Adaptation final { // 3. Modify the stream's restrictions in one of the valid ways. class VideoStreamAdapter { public: - VideoStreamAdapter(); + explicit VideoStreamAdapter( + VideoStreamInputStateProvider* input_state_provider); ~VideoStreamAdapter(); VideoSourceRestrictions source_restrictions() const; const VideoAdaptationCounters& adaptation_counters() const; - // TODO(hbos): Can we get rid of any external dependencies on - // BalancedDegradationPreference? How the adaptor generates possible next - // steps for adaptation should be an implementation detail. Can the relevant - // information be inferred from AdaptationTargetOrReason? - const BalancedDegradationSettings& balanced_settings() const; void ClearRestrictions(); + void AddRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener); + void RemoveRestrictionsListener( + VideoSourceRestrictionsListener* restrictions_listener); + void AddAdaptationListener(AdaptationListener* adaptation_listener); + void RemoveAdaptationListener(AdaptationListener* adaptation_listener); + void AddAdaptationConstraint(AdaptationConstraint* adaptation_constraint); + void RemoveAdaptationConstraint(AdaptationConstraint* adaptation_constraint); + // TODO(hbos): Setting the degradation preference should not clear // restrictions! This is not defined in the spec and is unexpected, there is a // tiny risk that people would discover and rely on this behavior. void SetDegradationPreference(DegradationPreference degradation_preference); - // The adaptaiton logic depends on these inputs. - void SetInput(VideoStreamInputState input_state); // Returns an adaptation that we are guaranteed to be able to apply, or a // status code indicating the reason why we cannot adapt. - Adaptation GetAdaptationUp() const; - Adaptation GetAdaptationDown() const; - // Returns the restrictions that result from applying the adaptation, without - // actually applying it. If the adaptation is not valid, current restrictions - // are returned. - VideoSourceRestrictions PeekNextRestrictions( - const Adaptation& adaptation) const; - // Updates source_restrictions() based according to the Adaptation. - void ApplyAdaptation(const Adaptation& adaptation); + // TODO(https://crbug.com/webrtc/11771) |resource| is needed by the + // AdaptationConstraint resources. Remove this parameter when it's removed. + Adaptation GetAdaptationUp(rtc::scoped_refptr resource); + Adaptation GetAdaptationDown(); + Adaptation GetAdaptationTo(const VideoAdaptationCounters& counters, + const VideoSourceRestrictions& restrictions); + // Tries to adapt the resolution one step. This is used for initial frame + // dropping. Does nothing if the degradation preference is not BALANCED or + // MAINTAIN_FRAMERATE. In the case of BALANCED, it will try twice to reduce + // the resolution. If it fails twice it gives up. + Adaptation GetAdaptDownResolution(); - private: - class VideoSourceRestrictor; + // Updates source_restrictions() the Adaptation. + void ApplyAdaptation(const Adaptation& adaptation, + rtc::scoped_refptr resource); - // The input frame rate and resolution at the time of an adaptation in the - // direction described by |mode_| (up or down). - // TODO(https://crbug.com/webrtc/11393): Can this be renamed? Can this be - // merged with AdaptationTarget? - struct AdaptationRequest { - // The pixel count produced by the source at the time of the adaptation. - int input_pixel_count_; - // Framerate received from the source at the time of the adaptation. - int framerate_fps_; - // Indicates if request was to adapt up or down. - enum class Mode { kAdaptUp, kAdaptDown } mode_; - - // This is a static method rather than an anonymous namespace function due - // to namespace visiblity. - static Mode GetModeFromAdaptationAction(Adaptation::StepType step_type); + struct RestrictionsWithCounters { + VideoSourceRestrictions restrictions; + VideoAdaptationCounters counters; }; - // Owner and modifier of the VideoSourceRestriction of this stream adaptor. - const std::unique_ptr source_restrictor_; + private: + void BroadcastVideoRestrictionsUpdate( + const VideoStreamInputState& input_state, + const rtc::scoped_refptr& resource); + + bool HasSufficientInputForAdaptation(const VideoStreamInputState& input_state) + const RTC_RUN_ON(&sequence_checker_); + + using RestrictionsOrState = + absl::variant; + RestrictionsOrState GetAdaptationUpStep( + const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + RestrictionsOrState GetAdaptationDownStep( + const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + RestrictionsOrState GetAdaptDownResolutionStepForBalanced( + const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + + // TODO(https://crbug.com/webrtc/11771) |resource| is needed by the + // AdaptationConstraint resources. Remove this parameter when it's removed. + Adaptation GetAdaptationUp(const VideoStreamInputState& input_state, + rtc::scoped_refptr resource) const + RTC_RUN_ON(&sequence_checker_); + Adaptation GetAdaptationDown(const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + + static RestrictionsOrState DecreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions); + static RestrictionsOrState IncreaseResolution( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions); + // Framerate methods are member functions because they need internal state + // if the degradation preference is BALANCED. + RestrictionsOrState DecreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const + RTC_RUN_ON(&sequence_checker_); + RestrictionsOrState IncreaseFramerate( + const VideoStreamInputState& input_state, + const RestrictionsWithCounters& current_restrictions) const + RTC_RUN_ON(&sequence_checker_); + + struct RestrictionsOrStateVisitor; + Adaptation RestrictionsOrStateToAdaptation( + RestrictionsOrState step_or_state, + const VideoStreamInputState& input_state) const + RTC_RUN_ON(&sequence_checker_); + + SequenceChecker sequence_checker_ RTC_GUARDED_BY(&sequence_checker_); + // Gets the input state which is the basis of all adaptations. + // Thread safe. + VideoStreamInputStateProvider* input_state_provider_; // Decides the next adaptation target in DegradationPreference::BALANCED. const BalancedDegradationSettings balanced_settings_; // To guard against applying adaptations that have become invalidated, an // Adaptation that is applied has to have a matching validation ID. - int adaptation_validation_id_; + int adaptation_validation_id_ RTC_GUARDED_BY(&sequence_checker_); // When deciding the next target up or down, different strategies are used // depending on the DegradationPreference. // https://w3c.github.io/mst-content-hint/#dom-rtcdegradationpreference - DegradationPreference degradation_preference_; - VideoStreamInputState input_state_; - // The input frame rate, resolution and adaptation direction of the last - // ApplyAdaptationTarget(). Used to avoid adapting twice if a recent - // adaptation has not had an effect on the input frame rate or resolution yet. + DegradationPreference degradation_preference_ + RTC_GUARDED_BY(&sequence_checker_); + // Used to avoid adapting twice. Stores the resolution at the time of the last + // adaptation. // TODO(hbos): Can we implement a more general "cooldown" mechanism of // resources intead? If we already have adapted it seems like we should wait // a while before adapting again, so that we are not acting on usage // measurements that are made obsolete/unreliable by an "ongoing" adaptation. - absl::optional last_adaptation_request_; + struct AwaitingFrameSizeChange { + AwaitingFrameSizeChange(bool pixels_increased, int frame_size); + const bool pixels_increased; + const int frame_size_pixels; + }; + absl::optional awaiting_frame_size_change_ + RTC_GUARDED_BY(&sequence_checker_); + // The previous restrictions value. Starts as unrestricted. + VideoSourceRestrictions last_video_source_restrictions_ + RTC_GUARDED_BY(&sequence_checker_); + VideoSourceRestrictions last_filtered_restrictions_ + RTC_GUARDED_BY(&sequence_checker_); + + std::vector restrictions_listeners_ + RTC_GUARDED_BY(&sequence_checker_); + std::vector adaptation_listeners_ + RTC_GUARDED_BY(&sequence_checker_); + std::vector adaptation_constraints_ + RTC_GUARDED_BY(&sequence_checker_); + + RestrictionsWithCounters current_restrictions_ + RTC_GUARDED_BY(&sequence_checker_); }; } // namespace webrtc diff --git a/call/adaptation/video_stream_adapter_unittest.cc b/call/adaptation/video_stream_adapter_unittest.cc index 55d604e917..a6c8f6ece3 100644 --- a/call/adaptation/video_stream_adapter_unittest.cc +++ b/call/adaptation/video_stream_adapter_unittest.cc @@ -14,12 +14,18 @@ #include #include "absl/types/optional.h" +#include "api/scoped_refptr.h" #include "api/video/video_adaptation_reason.h" #include "api/video_codecs/video_codec.h" #include "api/video_codecs/video_encoder.h" #include "api/video_codecs/video_encoder_config.h" +#include "call/adaptation/adaptation_constraint.h" +#include "call/adaptation/adaptation_listener.h" #include "call/adaptation/encoder_settings.h" +#include "call/adaptation/test/fake_adaptation_listener.h" +#include "call/adaptation/test/fake_resource.h" #include "call/adaptation/video_source_restrictions.h" +#include "call/adaptation/video_stream_input_state.h" #include "rtc_base/string_encode.h" #include "test/field_trial.h" #include "test/gmock.h" @@ -28,6 +34,11 @@ namespace webrtc { +using ::testing::_; +using ::testing::DoAll; +using ::testing::Return; +using ::testing::SaveArg; + namespace { const int kBalancedHighResolutionPixels = 1280 * 720; @@ -49,16 +60,27 @@ std::string BalancedFieldTrialConfig() { rtc::ToString(kBalancedHighFrameRateFps) + "/"; } -VideoStreamInputState InputState(int input_pixels, - int input_fps, - int min_pixels_per_frame) { - VideoStreamInputState input_state; - input_state.set_has_input(true); - input_state.set_frame_size_pixels(input_pixels); - input_state.set_frames_per_second(input_fps); - input_state.set_min_pixels_per_frame(min_pixels_per_frame); - return input_state; -} +class FakeVideoStreamInputStateProvider : public VideoStreamInputStateProvider { + public: + FakeVideoStreamInputStateProvider() + : VideoStreamInputStateProvider(nullptr) {} + virtual ~FakeVideoStreamInputStateProvider() = default; + + void SetInputState(int input_pixels, + int input_fps, + int min_pixels_per_frame) { + VideoStreamInputState input_state; + input_state.set_has_input(true); + input_state.set_frame_size_pixels(input_pixels); + input_state.set_frames_per_second(input_fps); + input_state.set_min_pixels_per_frame(min_pixels_per_frame); + fake_input_state_ = input_state; + } + VideoStreamInputState InputState() override { return fake_input_state_; } + + private: + VideoStreamInputState fake_input_state_; +}; // Responsible for adjusting the inputs to VideoStreamAdapter (SetInput), such // as pixels and frame rate, according to the most recent source restrictions. @@ -68,15 +90,16 @@ VideoStreamInputState InputState(int input_pixels, class FakeVideoStream { public: FakeVideoStream(VideoStreamAdapter* adapter, + FakeVideoStreamInputStateProvider* provider, int input_pixels, int input_fps, int min_pixels_per_frame) : adapter_(adapter), + provider_(provider), input_pixels_(input_pixels), input_fps_(input_fps), min_pixels_per_frame_(min_pixels_per_frame) { - adapter_->SetInput( - InputState(input_pixels_, input_fps_, min_pixels_per_frame_)); + provider_->SetInputState(input_pixels_, input_fps_, min_pixels_per_frame_); } int input_pixels() const { return input_pixels_; } @@ -85,7 +108,7 @@ class FakeVideoStream { // Performs ApplyAdaptation() followed by SetInput() with input pixels and // frame rate adjusted according to the resulting restrictions. void ApplyAdaptation(Adaptation adaptation) { - adapter_->ApplyAdaptation(adaptation); + adapter_->ApplyAdaptation(adaptation, nullptr); // Update input pixels and fps according to the resulting restrictions. auto restrictions = adapter_->source_restrictions(); if (restrictions.target_pixels_per_frame().has_value()) { @@ -99,201 +122,253 @@ class FakeVideoStream { if (restrictions.max_frame_rate().has_value()) { input_fps_ = restrictions.max_frame_rate().value(); } - adapter_->SetInput( - InputState(input_pixels_, input_fps_, min_pixels_per_frame_)); + provider_->SetInputState(input_pixels_, input_fps_, min_pixels_per_frame_); } private: VideoStreamAdapter* adapter_; + FakeVideoStreamInputStateProvider* provider_; int input_pixels_; int input_fps_; int min_pixels_per_frame_; }; +class FakeVideoStreamAdapterListner : public VideoSourceRestrictionsListener { + public: + void OnVideoSourceRestrictionsUpdated( + VideoSourceRestrictions restrictions, + const VideoAdaptationCounters& adaptation_counters, + rtc::scoped_refptr reason, + const VideoSourceRestrictions& unfiltered_restrictions) { + calls_++; + last_restrictions_ = unfiltered_restrictions; + } + + int calls() const { return calls_; } + + VideoSourceRestrictions last_restrictions() const { + return last_restrictions_; + } + + private: + int calls_ = 0; + VideoSourceRestrictions last_restrictions_; +}; + +class MockAdaptationListener : public AdaptationListener { + public: + MOCK_METHOD(void, + OnAdaptationApplied, + (const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource), + (override)); +}; + +class MockAdaptationConstraint : public AdaptationConstraint { + public: + MOCK_METHOD(bool, + IsAdaptationUpAllowed, + (const VideoStreamInputState& input_state, + const VideoSourceRestrictions& restrictions_before, + const VideoSourceRestrictions& restrictions_after, + rtc::scoped_refptr reason_resource), + (const, override)); + + // MOCK_METHOD(std::string, Name, (), (const, override)); + std::string Name() const override { return "MockAdaptationConstraint"; } +}; + } // namespace -TEST(VideoStreamAdapterTest, NoRestrictionsByDefault) { - VideoStreamAdapter adapter; - EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_EQ(0, adapter.adaptation_counters().Total()); +class VideoStreamAdapterTest : public ::testing::Test { + public: + VideoStreamAdapterTest() + : field_trials_(BalancedFieldTrialConfig()), + input_state_provider_(), + resource_(FakeResource::Create("FakeResource")), + adapter_(&input_state_provider_) {} + + protected: + webrtc::test::ScopedFieldTrials field_trials_; + FakeVideoStreamInputStateProvider input_state_provider_; + rtc::scoped_refptr resource_; + VideoStreamAdapter adapter_; +}; + +TEST_F(VideoStreamAdapterTest, NoRestrictionsByDefault) { + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); } -TEST(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToThreeFifths) { +TEST_F(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToThreeFifths) { const int kInputPixels = 1280 * 720; - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - adapter.SetInput(InputState(kInputPixels, 30, kDefaultMinPixelsPerFrame)); - Adaptation adaptation = adapter.GetAdaptationDown(); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(kInputPixels, 30, + kDefaultMinPixelsPerFrame); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); EXPECT_FALSE(adaptation.min_pixel_limit_reached()); - adapter.ApplyAdaptation(adaptation); + adapter_.ApplyAdaptation(adaptation, nullptr); EXPECT_EQ(static_cast((kInputPixels * 3) / 5), - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().target_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations); + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); } -TEST(VideoStreamAdapterTest, MaintainFramerate_DecreasesPixelsToLimitReached) { +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_DecreasesPixelsToLimitReached) { const int kMinPixelsPerFrame = 640 * 480; - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - adapter.SetInput(InputState(kMinPixelsPerFrame + 1, 30, kMinPixelsPerFrame)); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(kMinPixelsPerFrame + 1, 30, + kMinPixelsPerFrame); // Even though we are above kMinPixelsPerFrame, because adapting down would // have exceeded the limit, we are said to have reached the limit already. // This differs from the frame rate adaptation logic, which would have clamped // to the limit in the first step and reported kLimitReached in the second // step. - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kLimitReached, adaptation.status()); EXPECT_TRUE(adaptation.min_pixel_limit_reached()); } -TEST(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToFiveThirds) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - FakeVideoStream fake_stream(&adapter, 1280 * 720, 30, +TEST_F(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToFiveThirds) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, kDefaultMinPixelsPerFrame); // Go down twice, ensuring going back up is still a restricted resolution. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); int input_pixels = fake_stream.input_pixels(); // Go up once. The target is 5/3 and the max is 12/5 of the target. const int target = (input_pixels * 5) / 3; - fake_stream.ApplyAdaptation(adapter.GetAdaptationUp()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_)); EXPECT_EQ(static_cast((target * 12) / 5), - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(static_cast(target), - adapter.source_restrictions().target_pixels_per_frame()); - EXPECT_EQ(absl::nullopt, adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations); + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); } -TEST(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToUnrestricted) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - FakeVideoStream fake_stream(&adapter, 1280 * 720, 30, +TEST_F(VideoStreamAdapterTest, MaintainFramerate_IncreasePixelsToUnrestricted) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, kDefaultMinPixelsPerFrame); // We are unrestricted by default and should not be able to adapt up. EXPECT_EQ(Adaptation::Status::kLimitReached, - adapter.GetAdaptationUp().status()); + adapter_.GetAdaptationUp(resource_).status()); // If we go down once and then back up we should not have any restrictions. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations); - fake_stream.ApplyAdaptation(adapter.GetAdaptationUp()); - EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_EQ(0, adapter.adaptation_counters().Total()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_)); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); } -TEST(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToTwoThirds) { +TEST_F(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToTwoThirds) { const int kInputFps = 30; - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); - adapter.SetInput( - InputState(1280 * 720, kInputFps, kDefaultMinPixelsPerFrame)); - Adaptation adaptation = adapter.GetAdaptationDown(); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + input_state_provider_.SetInputState(1280 * 720, kInputFps, + kDefaultMinPixelsPerFrame); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); - adapter.ApplyAdaptation(adaptation); + adapter_.ApplyAdaptation(adaptation, nullptr); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().target_pixels_per_frame()); + adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast((kInputFps * 2) / 3), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); } -TEST(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToLimitReached) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); - FakeVideoStream fake_stream(&adapter, 1280 * 720, kMinFrameRateFps + 1, - kDefaultMinPixelsPerFrame); +TEST_F(VideoStreamAdapterTest, MaintainResolution_DecreasesFpsToLimitReached) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, + kMinFrameRateFps + 1, kDefaultMinPixelsPerFrame); // If we are not yet at the limit and the next step would exceed it, the step // is clamped such that we end up exactly on the limit. - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); EXPECT_EQ(static_cast(kMinFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); // Having reached the limit, the next adaptation down is not valid. EXPECT_EQ(Adaptation::Status::kLimitReached, - adapter.GetAdaptationDown().status()); + adapter_.GetAdaptationDown().status()); } -TEST(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToThreeHalves) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); - FakeVideoStream fake_stream(&adapter, 1280 * 720, 30, +TEST_F(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToThreeHalves) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, kDefaultMinPixelsPerFrame); // Go down twice, ensuring going back up is still a restricted frame rate. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_EQ(2, adapter.adaptation_counters().fps_adaptations); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(2, adapter_.adaptation_counters().fps_adaptations); int input_fps = fake_stream.input_fps(); // Go up once. The target is 3/2 of the input. - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().target_pixels_per_frame()); + adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast((input_fps * 3) / 2), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); } -TEST(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToUnrestricted) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); - FakeVideoStream fake_stream(&adapter, 1280 * 720, 30, +TEST_F(VideoStreamAdapterTest, MaintainResolution_IncreaseFpsToUnrestricted) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, kDefaultMinPixelsPerFrame); // We are unrestricted by default and should not be able to adapt up. EXPECT_EQ(Adaptation::Status::kLimitReached, - adapter.GetAdaptationUp().status()); + adapter_.GetAdaptationUp(resource_).status()); // If we go down once and then back up we should not have any restrictions. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); - fake_stream.ApplyAdaptation(adapter.GetAdaptationUp()); - EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_EQ(0, adapter.adaptation_counters().Total()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_)); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); } -TEST(VideoStreamAdapterTest, Balanced_DecreaseFrameRate) { - webrtc::test::ScopedFieldTrials balanced_field_trials( - BalancedFieldTrialConfig()); - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::BALANCED); - adapter.SetInput(InputState(kBalancedMediumResolutionPixels, - kBalancedHighFrameRateFps, - kDefaultMinPixelsPerFrame)); +TEST_F(VideoStreamAdapterTest, Balanced_DecreaseFrameRate) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(kBalancedMediumResolutionPixels, + kBalancedHighFrameRateFps, + kDefaultMinPixelsPerFrame); // If our frame rate is higher than the frame rate associated with our // resolution we should try to adapt to the frame rate associated with our // resolution: kBalancedMediumFrameRateFps. - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); - adapter.ApplyAdaptation(adaptation); + adapter_.ApplyAdaptation(adaptation, nullptr); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().target_pixels_per_frame()); + adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedMediumFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(0, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); } -TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) { - webrtc::test::ScopedFieldTrials balanced_field_trials( - BalancedFieldTrialConfig()); - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::BALANCED); - FakeVideoStream fake_stream(&adapter, kBalancedHighResolutionPixels, - kBalancedHighFrameRateFps, - kDefaultMinPixelsPerFrame); +TEST_F(VideoStreamAdapterTest, Balanced_DecreaseResolution) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + FakeVideoStream fake_stream( + &adapter_, &input_state_provider_, kBalancedHighResolutionPixels, + kBalancedHighFrameRateFps, kDefaultMinPixelsPerFrame); // If we are not below the current resolution's frame rate limit, we should // adapt resolution according to "maintain-framerate" logic (three fifths). // @@ -303,35 +378,35 @@ TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) { // does prevent the source from going higher, though, so it's technically not // a NO-OP. { - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); } EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().target_pixels_per_frame()); + adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(0, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); // Verify "maintain-framerate" logic the second time we adapt: Frame rate // restrictions remains the same and resolution goes down. { - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); } constexpr size_t kReducedPixelsFirstStep = static_cast((kBalancedHighResolutionPixels * 3) / 5); EXPECT_EQ(kReducedPixelsFirstStep, - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().target_pixels_per_frame()); + adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); // If we adapt again, because the balanced settings' proposed frame rate is // still kBalancedHighFrameRateFps, "maintain-framerate" will trigger again. static_assert(kReducedPixelsFirstStep > kBalancedMediumResolutionPixels, @@ -339,18 +414,18 @@ TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) { "balanced setting resolution"); constexpr size_t kReducedPixelsSecondStep = (kReducedPixelsFirstStep * 3) / 5; { - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); } EXPECT_EQ(kReducedPixelsSecondStep, - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); EXPECT_EQ(absl::nullopt, - adapter.source_restrictions().target_pixels_per_frame()); + adapter_.source_restrictions().target_pixels_per_frame()); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); } // Testing when to adapt frame rate and when to adapt resolution is quite @@ -360,14 +435,11 @@ TEST(VideoStreamAdapterTest, Balanced_DecreaseResolution) { // adapt up we don't do it in the reverse order. Instead we always try to adapt // frame rate first according to balanced settings' configs and only when the // frame rate is already achieved do we adjust the resolution. -TEST(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) { - webrtc::test::ScopedFieldTrials balanced_field_trials( - BalancedFieldTrialConfig()); - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::BALANCED); - FakeVideoStream fake_stream(&adapter, kBalancedHighResolutionPixels, - kBalancedHighFrameRateFps, - kDefaultMinPixelsPerFrame); +TEST_F(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + FakeVideoStream fake_stream( + &adapter_, &input_state_provider_, kBalancedHighResolutionPixels, + kBalancedHighFrameRateFps, kDefaultMinPixelsPerFrame); // The desired starting point of this test is having adapted frame rate twice. // This requires performing a number of adaptations. constexpr size_t kReducedPixelsFirstStep = @@ -385,41 +457,41 @@ TEST(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) { "settings' medium pixel configuration"); // The first adaptation should affect the frame rate: See // Balanced_DecreaseResolution for explanation why. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), - adapter.source_restrictions().max_frame_rate()); + adapter_.source_restrictions().max_frame_rate()); // The next three adaptations affects the resolution, because we have to reach // kBalancedMediumResolutionPixels before a lower frame rate is considered by // BalancedDegradationSettings. The number three is derived from the // static_asserts above. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); EXPECT_EQ(kReducedPixelsFirstStep, - adapter.source_restrictions().max_pixels_per_frame()); - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); + adapter_.source_restrictions().max_pixels_per_frame()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); EXPECT_EQ(kReducedPixelsSecondStep, - adapter.source_restrictions().max_pixels_per_frame()); - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); + adapter_.source_restrictions().max_pixels_per_frame()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); EXPECT_EQ(kReducedPixelsThirdStep, - adapter.source_restrictions().max_pixels_per_frame()); + adapter_.source_restrictions().max_pixels_per_frame()); // Thus, the next adaptation will reduce frame rate to // kBalancedMediumFrameRateFps. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); EXPECT_EQ(static_cast(kBalancedMediumFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(3, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(2, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(3, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(2, adapter_.adaptation_counters().fps_adaptations); // Adapt up! // While our resolution is in the medium-range, the frame rate associated with // the next resolution configuration up ("high") is kBalancedHighFrameRateFps // and "balanced" prefers adapting frame rate if not already applied. { - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); EXPECT_EQ(static_cast(kBalancedHighFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(3, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(3, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); } // Now that we have already achieved the next frame rate up, we act according // to "maintain-framerate". We go back up in resolution. Due to rounding @@ -429,63 +501,60 @@ TEST(VideoStreamAdapterTest, Balanced_IncreaseFrameRateAndResolution) { constexpr size_t kReducedPixelsSecondStepUp = (kReducedPixelsThirdStep * 5) / 3; { - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); EXPECT_EQ(kReducedPixelsSecondStepUp, - adapter.source_restrictions().target_pixels_per_frame()); - EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); } // Now that our resolution is back in the high-range, the next frame rate to // try out is "unlimited". { - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); - EXPECT_EQ(absl::nullopt, adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(0, adapter.adaptation_counters().fps_adaptations); + EXPECT_EQ(absl::nullopt, adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); } // Now only adapting resolution remains. constexpr size_t kReducedPixelsFirstStepUp = (kReducedPixelsSecondStepUp * 5) / 3; { - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); EXPECT_EQ(kReducedPixelsFirstStepUp, - adapter.source_restrictions().target_pixels_per_frame()); - EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations); - EXPECT_EQ(0, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().target_pixels_per_frame()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); } // The last step up should make us entirely unrestricted. { - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); - EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_EQ(0, adapter.adaptation_counters().Total()); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); } } -TEST(VideoStreamAdapterTest, Balanced_LimitReached) { - webrtc::test::ScopedFieldTrials balanced_field_trials( - BalancedFieldTrialConfig()); - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::BALANCED); - FakeVideoStream fake_stream(&adapter, kBalancedLowResolutionPixels, - kBalancedLowFrameRateFps, - kDefaultMinPixelsPerFrame); +TEST_F(VideoStreamAdapterTest, Balanced_LimitReached) { + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + FakeVideoStream fake_stream( + &adapter_, &input_state_provider_, kBalancedLowResolutionPixels, + kBalancedLowFrameRateFps, kDefaultMinPixelsPerFrame); // Attempting to adapt up while unrestricted should result in kLimitReached. EXPECT_EQ(Adaptation::Status::kLimitReached, - adapter.GetAdaptationUp().status()); + adapter_.GetAdaptationUp(resource_).status()); // Adapting down once result in restricted frame rate, in this case we reach // the lowest possible frame rate immediately: kBalancedLowFrameRateFps. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); EXPECT_EQ(static_cast(kBalancedLowFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); // Any further adaptation must follow "maintain-framerate" rules (these are // covered in more depth by the MaintainFramerate tests). This test does not // assert exactly how resolution is adjusted, only that resolution always @@ -494,117 +563,410 @@ TEST(VideoStreamAdapterTest, Balanced_LimitReached) { bool did_reach_limit = false; // If we have not reached the limit within 5 adaptations something is wrong... for (int i = 0; i < 5; i++) { - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); if (adaptation.status() == Adaptation::Status::kLimitReached) { did_reach_limit = true; break; } EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); fake_stream.ApplyAdaptation(adaptation); - EXPECT_LT(adapter.source_restrictions().max_pixels_per_frame().value(), + EXPECT_LT(adapter_.source_restrictions().max_pixels_per_frame().value(), previous_resolution); previous_resolution = - adapter.source_restrictions().max_pixels_per_frame().value(); + adapter_.source_restrictions().max_pixels_per_frame().value(); } EXPECT_TRUE(did_reach_limit); // Frame rate restrictions are the same as before. EXPECT_EQ(static_cast(kBalancedLowFrameRateFps), - adapter.source_restrictions().max_frame_rate()); - EXPECT_EQ(1, adapter.adaptation_counters().fps_adaptations); + adapter_.source_restrictions().max_frame_rate()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); } // kAwaitingPreviousAdaptation is only supported in "maintain-framerate". -TEST(VideoStreamAdapterTest, MaintainFramerate_AwaitingPreviousAdaptationDown) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame)); +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_AwaitingPreviousAdaptationDown) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); // Adapt down once, but don't update the input. - adapter.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations); + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); { // Having performed the adaptation, but not updated the input based on the // new restrictions, adapting again in the same direction will not work. - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, adaptation.status()); } } // kAwaitingPreviousAdaptation is only supported in "maintain-framerate". -TEST(VideoStreamAdapterTest, MaintainFramerate_AwaitingPreviousAdaptationUp) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - FakeVideoStream fake_stream(&adapter, 1280 * 720, 30, +TEST_F(VideoStreamAdapterTest, MaintainFramerate_AwaitingPreviousAdaptationUp) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, kDefaultMinPixelsPerFrame); // Perform two adaptation down so that adapting up twice is possible. - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - fake_stream.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_EQ(2, adapter.adaptation_counters().resolution_adaptations); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(2, adapter_.adaptation_counters().resolution_adaptations); // Adapt up once, but don't update the input. - adapter.ApplyAdaptation(adapter.GetAdaptationUp()); - EXPECT_EQ(1, adapter.adaptation_counters().resolution_adaptations); + adapter_.ApplyAdaptation(adapter_.GetAdaptationUp(resource_), nullptr); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); { // Having performed the adaptation, but not updated the input based on the // new restrictions, adapting again in the same direction will not work. - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, adaptation.status()); } } -TEST(VideoStreamAdapterTest, PeekNextRestrictions) { - VideoStreamAdapter adapter; +TEST_F(VideoStreamAdapterTest, + MaintainResolution_AdaptsUpAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down in fps for later. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_)); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); + + // We should be able to adapt in framerate one last time after the change of + // degradation preference. + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_)); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_AdaptsUpAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down in resolution for later. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_)); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + EXPECT_EQ(0, adapter_.adaptation_counters().fps_adaptations); + + // We should be able to adapt in framerate one last time after the change of + // degradation preference. + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationUp(resource_)); + EXPECT_EQ(0, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + PendingResolutionIncreaseAllowsAdaptUpAfterSwitchToMaintainResolution) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt fps down so we can adapt up later in the test. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + // Apply adaptation up but don't update input. + adapter_.ApplyAdaptation(adapter_.GetAdaptationUp(resource_), nullptr); + EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, + adapter_.GetAdaptationUp(resource_).status()); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); +} + +TEST_F(VideoStreamAdapterTest, + MaintainFramerate_AdaptsDownAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once, should change FPS. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + // Adaptation down should apply after the degradation prefs change. + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + MaintainResolution_AdaptsDownAfterSwitchingDegradationPreference) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once, should change FPS. + fake_stream.ApplyAdaptation(adapter_.GetAdaptationDown()); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); + + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + fake_stream.ApplyAdaptation(adaptation); + + EXPECT_EQ(1, adapter_.adaptation_counters().fps_adaptations); + EXPECT_EQ(1, adapter_.adaptation_counters().resolution_adaptations); +} + +TEST_F( + VideoStreamAdapterTest, + PendingResolutionDecreaseAllowsAdaptDownAfterSwitchToMaintainResolution) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Apply adaptation but don't update the input. + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_EQ(Adaptation::Status::kAwaitingPreviousAdaptation, + adapter_.GetAdaptationDown().status()); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + Adaptation adaptation = adapter_.GetAdaptationDown(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); +} + +TEST_F(VideoStreamAdapterTest, RestrictionBroadcasted) { + FakeVideoStreamAdapterListner listener; + adapter_.AddRestrictionsListener(&listener); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Not broadcast on invalid ApplyAdaptation. + { + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); + adapter_.ApplyAdaptation(adaptation, nullptr); + EXPECT_EQ(0, listener.calls()); + } + + // Broadcast on ApplyAdaptation. + { + Adaptation adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(adaptation); + EXPECT_EQ(1, listener.calls()); + EXPECT_EQ(adaptation.restrictions(), listener.last_restrictions()); + } + + // Broadcast on ClearRestrictions(). + adapter_.ClearRestrictions(); + EXPECT_EQ(2, listener.calls()); + EXPECT_EQ(VideoSourceRestrictions(), listener.last_restrictions()); +} + +TEST_F(VideoStreamAdapterTest, AdaptationHasNextRestrcitions) { // Any non-disabled DegradationPreference will do. - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - FakeVideoStream fake_stream(&adapter, 1280 * 720, 30, + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, kDefaultMinPixelsPerFrame); // When adaptation is not possible. { - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kLimitReached, adaptation.status()); - EXPECT_EQ(adapter.PeekNextRestrictions(adaptation), - adapter.source_restrictions()); + EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adaptation.counters().Total()); } // When we adapt down. { - Adaptation adaptation = adapter.GetAdaptationDown(); + Adaptation adaptation = adapter_.GetAdaptationDown(); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); - VideoSourceRestrictions next_restrictions = - adapter.PeekNextRestrictions(adaptation); fake_stream.ApplyAdaptation(adaptation); - EXPECT_EQ(next_restrictions, adapter.source_restrictions()); + EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions()); + EXPECT_EQ(adaptation.counters(), adapter_.adaptation_counters()); } // When we adapt up. { - Adaptation adaptation = adapter.GetAdaptationUp(); + Adaptation adaptation = adapter_.GetAdaptationUp(resource_); EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); - VideoSourceRestrictions next_restrictions = - adapter.PeekNextRestrictions(adaptation); fake_stream.ApplyAdaptation(adaptation); - EXPECT_EQ(next_restrictions, adapter.source_restrictions()); + EXPECT_EQ(adaptation.restrictions(), adapter_.source_restrictions()); + EXPECT_EQ(adaptation.counters(), adapter_.adaptation_counters()); } } -TEST(VideoStreamAdapterTest, - SetDegradationPreferenceToOrFromBalancedClearsRestrictions) { - VideoStreamAdapter adapter; - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame)); - adapter.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_NE(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_NE(0, adapter.adaptation_counters().Total()); +TEST_F(VideoStreamAdapterTest, + SetDegradationPreferenceToOrFromBalancedClearsRestrictions) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_NE(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_NE(0, adapter_.adaptation_counters().Total()); // Changing from non-balanced to balanced clears the restrictions. - adapter.SetDegradationPreference(DegradationPreference::BALANCED); - EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_EQ(0, adapter.adaptation_counters().Total()); + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); // Apply adaptation again. - adapter.ApplyAdaptation(adapter.GetAdaptationDown()); - EXPECT_NE(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_NE(0, adapter.adaptation_counters().Total()); + adapter_.ApplyAdaptation(adapter_.GetAdaptationDown(), nullptr); + EXPECT_NE(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_NE(0, adapter_.adaptation_counters().Total()); // Changing from balanced to non-balanced clears the restrictions. - adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); - EXPECT_EQ(VideoSourceRestrictions(), adapter.source_restrictions()); - EXPECT_EQ(0, adapter.adaptation_counters().Total()); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + EXPECT_EQ(VideoSourceRestrictions(), adapter_.source_restrictions()); + EXPECT_EQ(0, adapter_.adaptation_counters().Total()); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsResolutionInMaintainFramerate) { + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(1, adaptation.counters().resolution_adaptations); + EXPECT_EQ(0, adaptation.counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionReturnsWithStatusInDisabledAndMaintainResolution) { + adapter_.SetDegradationPreference(DegradationPreference::DISABLED); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptDownResolution().status()); + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); + EXPECT_EQ(Adaptation::Status::kLimitReached, + adapter_.GetAdaptDownResolution().status()); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsFpsAndResolutionInBalanced) { + // Note: This test depends on BALANCED implementation, but with current + // implementation and input state settings, BALANCED will adapt resolution and + // frame rate once. + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(1, adaptation.counters().resolution_adaptations); + EXPECT_EQ(1, adaptation.counters().fps_adaptations); +} + +TEST_F( + VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsOnlyResolutionIfFpsAlreadyAdapterInBalanced) { + // Note: This test depends on BALANCED implementation, but with current + // implementation and input state settings, BALANCED will adapt resolution + // only. + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(1280 * 720, 5, kDefaultMinPixelsPerFrame); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + + auto first_adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(first_adaptation); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(1, adaptation.counters().resolution_adaptations); + EXPECT_EQ(first_adaptation.counters().fps_adaptations, + adaptation.counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + GetAdaptDownResolutionAdaptsOnlyFpsIfResolutionLowInBalanced) { + // Note: This test depends on BALANCED implementation, but with current + // implementation and input state settings, BALANCED will adapt resolution + // only. + adapter_.SetDegradationPreference(DegradationPreference::BALANCED); + input_state_provider_.SetInputState(kDefaultMinPixelsPerFrame, 30, + kDefaultMinPixelsPerFrame); + + auto adaptation = adapter_.GetAdaptDownResolution(); + EXPECT_EQ(Adaptation::Status::kValid, adaptation.status()); + EXPECT_EQ(0, adaptation.counters().resolution_adaptations); + EXPECT_EQ(1, adaptation.counters().fps_adaptations); +} + +TEST_F(VideoStreamAdapterTest, + AdaptationDisabledStatusAlwaysWhenDegradationPreferenceDisabled) { + adapter_.SetDegradationPreference(DegradationPreference::DISABLED); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptationDown().status()); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptationUp(resource_).status()); + EXPECT_EQ(Adaptation::Status::kAdaptationDisabled, + adapter_.GetAdaptDownResolution().status()); +} + +TEST_F(VideoStreamAdapterTest, AdaptationListenerReceivesSignalOnAdaptation) { + testing::StrictMock adaptation_listener; + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + adapter_.AddAdaptationListener(&adaptation_listener); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + VideoSourceRestrictions restrictions_before; + VideoSourceRestrictions restrictions_after; + EXPECT_CALL(adaptation_listener, OnAdaptationApplied) + .WillOnce(DoAll(SaveArg<1>(&restrictions_before), + SaveArg<2>(&restrictions_after))); + auto adaptation = adapter_.GetAdaptationDown(); + adapter_.ApplyAdaptation(adaptation, nullptr); + EXPECT_EQ(VideoSourceRestrictions(), restrictions_before); + EXPECT_EQ(adaptation.restrictions(), restrictions_after); + + // Clean up. + adapter_.RemoveAdaptationListener(&adaptation_listener); +} + +TEST_F(VideoStreamAdapterTest, AdaptationConstraintAllowsAdaptationsUp) { + testing::StrictMock adaptation_constraint; + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + adapter_.AddAdaptationConstraint(&adaptation_constraint); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once so we can adapt up later. + auto first_adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(first_adaptation); + + EXPECT_CALL( + adaptation_constraint, + IsAdaptationUpAllowed(_, first_adaptation.restrictions(), _, resource_)) + .WillOnce(Return(true)); + EXPECT_EQ(Adaptation::Status::kValid, + adapter_.GetAdaptationUp(resource_).status()); + adapter_.RemoveAdaptationConstraint(&adaptation_constraint); +} + +TEST_F(VideoStreamAdapterTest, AdaptationConstraintDisallowsAdaptationsUp) { + testing::StrictMock adaptation_constraint; + adapter_.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); + adapter_.AddAdaptationConstraint(&adaptation_constraint); + input_state_provider_.SetInputState(1280 * 720, 30, + kDefaultMinPixelsPerFrame); + FakeVideoStream fake_stream(&adapter_, &input_state_provider_, 1280 * 720, 30, + kDefaultMinPixelsPerFrame); + // Adapt down once so we can adapt up later. + auto first_adaptation = adapter_.GetAdaptationDown(); + fake_stream.ApplyAdaptation(first_adaptation); + + EXPECT_CALL( + adaptation_constraint, + IsAdaptationUpAllowed(_, first_adaptation.restrictions(), _, resource_)) + .WillOnce(Return(false)); + EXPECT_EQ(Adaptation::Status::kRejectedByConstraint, + adapter_.GetAdaptationUp(resource_).status()); + adapter_.RemoveAdaptationConstraint(&adaptation_constraint); } // Death tests. @@ -614,21 +976,23 @@ TEST(VideoStreamAdapterTest, TEST(VideoStreamAdapterDeathTest, SetDegradationPreferenceInvalidatesAdaptations) { - VideoStreamAdapter adapter; + FakeVideoStreamInputStateProvider input_state_provider; + VideoStreamAdapter adapter(&input_state_provider); adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_FRAMERATE); - adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame)); + input_state_provider.SetInputState(1280 * 720, 30, kDefaultMinPixelsPerFrame); Adaptation adaptation = adapter.GetAdaptationDown(); adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); - EXPECT_DEATH(adapter.ApplyAdaptation(adaptation), ""); + EXPECT_DEATH(adapter.ApplyAdaptation(adaptation, nullptr), ""); } -TEST(VideoStreamAdapterDeathTest, SetInputInvalidatesAdaptations) { - VideoStreamAdapter adapter; +TEST(VideoStreamAdapterDeathTest, AdaptDownInvalidatesAdaptations) { + FakeVideoStreamInputStateProvider input_state_provider; + VideoStreamAdapter adapter(&input_state_provider); adapter.SetDegradationPreference(DegradationPreference::MAINTAIN_RESOLUTION); - adapter.SetInput(InputState(1280 * 720, 30, kDefaultMinPixelsPerFrame)); + input_state_provider.SetInputState(1280 * 720, 30, kDefaultMinPixelsPerFrame); Adaptation adaptation = adapter.GetAdaptationDown(); - adapter.SetInput(InputState(1280 * 720, 31, kDefaultMinPixelsPerFrame)); - EXPECT_DEATH(adapter.PeekNextRestrictions(adaptation), ""); + adapter.GetAdaptationDown(); + EXPECT_DEATH(adapter.ApplyAdaptation(adaptation, nullptr), ""); } #endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) diff --git a/call/adaptation/video_stream_input_state_provider.cc b/call/adaptation/video_stream_input_state_provider.cc index eac30bbfac..3c0a7e3fa2 100644 --- a/call/adaptation/video_stream_input_state_provider.cc +++ b/call/adaptation/video_stream_input_state_provider.cc @@ -16,20 +16,22 @@ VideoStreamInputStateProvider::VideoStreamInputStateProvider( VideoStreamEncoderObserver* frame_rate_provider) : frame_rate_provider_(frame_rate_provider) {} +VideoStreamInputStateProvider::~VideoStreamInputStateProvider() {} + void VideoStreamInputStateProvider::OnHasInputChanged(bool has_input) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); input_state_.set_has_input(has_input); } void VideoStreamInputStateProvider::OnFrameSizeObserved(int frame_size_pixels) { RTC_DCHECK_GT(frame_size_pixels, 0); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); input_state_.set_frame_size_pixels(frame_size_pixels); } void VideoStreamInputStateProvider::OnEncoderSettingsChanged( EncoderSettings encoder_settings) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); input_state_.set_video_codec_type( encoder_settings.encoder_config().codec_type); input_state_.set_min_pixels_per_frame( @@ -39,7 +41,7 @@ void VideoStreamInputStateProvider::OnEncoderSettingsChanged( VideoStreamInputState VideoStreamInputStateProvider::InputState() { // GetInputFrameRate() is thread-safe. int input_fps = frame_rate_provider_->GetInputFrameRate(); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); input_state_.set_frames_per_second(input_fps); return input_state_; } diff --git a/call/adaptation/video_stream_input_state_provider.h b/call/adaptation/video_stream_input_state_provider.h index 7093e97fdd..f4a3e0bfa0 100644 --- a/call/adaptation/video_stream_input_state_provider.h +++ b/call/adaptation/video_stream_input_state_provider.h @@ -14,7 +14,7 @@ #include "api/video/video_stream_encoder_observer.h" #include "call/adaptation/encoder_settings.h" #include "call/adaptation/video_stream_input_state.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { @@ -22,17 +22,18 @@ class VideoStreamInputStateProvider { public: VideoStreamInputStateProvider( VideoStreamEncoderObserver* frame_rate_provider); + virtual ~VideoStreamInputStateProvider(); void OnHasInputChanged(bool has_input); void OnFrameSizeObserved(int frame_size_pixels); void OnEncoderSettingsChanged(EncoderSettings encoder_settings); - VideoStreamInputState InputState(); + virtual VideoStreamInputState InputState(); private: - mutable rtc::CriticalSection crit_; + Mutex mutex_; VideoStreamEncoderObserver* const frame_rate_provider_; - VideoStreamInputState input_state_ RTC_GUARDED_BY(crit_); + VideoStreamInputState input_state_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/call/audio_send_stream.cc b/call/audio_send_stream.cc index ddcba031a7..765ece7eb9 100644 --- a/call/audio_send_stream.cc +++ b/call/audio_send_stream.cc @@ -75,6 +75,8 @@ std::string AudioSendStream::Config::SendCodecSpec::ToString() const { ss << ", transport_cc_enabled: " << (transport_cc_enabled ? "true" : "false"); ss << ", cng_payload_type: " << (cng_payload_type ? rtc::ToString(*cng_payload_type) : ""); + ss << ", red_payload_type: " + << (red_payload_type ? rtc::ToString(*red_payload_type) : ""); ss << ", payload_type: " << payload_type; ss << ", format: " << rtc::ToString(format); ss << '}'; diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index 86cea38938..d21dff4889 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -140,6 +140,7 @@ class AudioSendStream : public AudioSender { bool nack_enabled = false; bool transport_cc_enabled = false; absl::optional cng_payload_type; + absl::optional red_payload_type; // If unset, use the encoder's default target bitrate. absl::optional target_bitrate_bps; }; diff --git a/call/bitrate_allocator_unittest.cc b/call/bitrate_allocator_unittest.cc index 1479a4714a..00fb236948 100644 --- a/call/bitrate_allocator_unittest.cc +++ b/call/bitrate_allocator_unittest.cc @@ -47,7 +47,10 @@ auto AllocationLimitsEq(uint32_t min_allocatable_rate_bps, class MockLimitObserver : public BitrateAllocator::LimitObserver { public: - MOCK_METHOD1(OnAllocationLimitsChanged, void(BitrateAllocationLimits)); + MOCK_METHOD(void, + OnAllocationLimitsChanged, + (BitrateAllocationLimits), + (override)); }; class TestBitrateObserver : public BitrateAllocatorObserver { diff --git a/call/bitrate_estimator_tests.cc b/call/bitrate_estimator_tests.cc index 50da12bbdf..cd052dc331 100644 --- a/call/bitrate_estimator_tests.cc +++ b/call/bitrate_estimator_tests.cc @@ -19,6 +19,7 @@ #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/thread_annotations.h" #include "test/call_test.h" @@ -49,7 +50,7 @@ class LogObserver { class Callback : public rtc::LogSink { public: void OnLogMessage(const std::string& message) override { - rtc::CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); // Ignore log lines that are due to missing AST extensions, these are // logged when we switch back from AST to TOF until the wrapping bitrate // estimator gives up on using AST. @@ -78,15 +79,15 @@ class LogObserver { bool Wait() { return done_.Wait(test::CallTest::kDefaultTimeoutMs); } void PushExpectedLogLine(const std::string& expected_log_line) { - rtc::CritScope lock(&crit_sect_); + MutexLock lock(&mutex_); expected_log_lines_.push_back(expected_log_line); } private: typedef std::list Strings; - rtc::CriticalSection crit_sect_; - Strings received_log_lines_ RTC_GUARDED_BY(crit_sect_); - Strings expected_log_lines_ RTC_GUARDED_BY(crit_sect_); + Mutex mutex_; + Strings received_log_lines_ RTC_GUARDED_BY(mutex_); + Strings expected_log_lines_ RTC_GUARDED_BY(mutex_); rtc::Event done_; }; diff --git a/call/call.cc b/call/call.cc index 4068db9f00..ace83bee9f 100644 --- a/call/call.cc +++ b/call/call.cc @@ -25,6 +25,7 @@ #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" #include "audio/audio_state.h" +#include "call/adaptation/broadcast_resource_listener.h" #include "call/bitrate_allocator.h" #include "call/flexfec_receive_stream_impl.h" #include "call/receive_time_calculator.h" @@ -49,8 +50,8 @@ #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/synchronization/rw_lock_wrapper.h" #include "rtc_base/synchronization/sequence_checker.h" +#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" @@ -168,6 +169,47 @@ TaskQueueBase* GetCurrentTaskQueueOrThread() { namespace internal { +// Wraps an injected resource in a BroadcastResourceListener and handles adding +// and removing adapter resources to individual VideoSendStreams. +class ResourceVideoSendStreamForwarder { + public: + ResourceVideoSendStreamForwarder( + rtc::scoped_refptr resource) + : broadcast_resource_listener_(resource) { + broadcast_resource_listener_.StartListening(); + } + ~ResourceVideoSendStreamForwarder() { + RTC_DCHECK(adapter_resources_.empty()); + broadcast_resource_listener_.StopListening(); + } + + rtc::scoped_refptr Resource() const { + return broadcast_resource_listener_.SourceResource(); + } + + void OnCreateVideoSendStream(VideoSendStream* video_send_stream) { + RTC_DCHECK(adapter_resources_.find(video_send_stream) == + adapter_resources_.end()); + auto adapter_resource = + broadcast_resource_listener_.CreateAdapterResource(); + video_send_stream->AddAdaptationResource(adapter_resource); + adapter_resources_.insert( + std::make_pair(video_send_stream, adapter_resource)); + } + + void OnDestroyVideoSendStream(VideoSendStream* video_send_stream) { + auto it = adapter_resources_.find(video_send_stream); + RTC_DCHECK(it != adapter_resources_.end()); + broadcast_resource_listener_.RemoveAdapterResource(it->second); + adapter_resources_.erase(it); + } + + private: + BroadcastResourceListener broadcast_resource_listener_; + std::map> + adapter_resources_; +}; + class Call final : public webrtc::Call, public PacketReceiver, public RecoveredPacketReceiver, @@ -177,7 +219,7 @@ class Call final : public webrtc::Call, Call(Clock* clock, const Call::Config& config, std::unique_ptr transport_send, - std::unique_ptr module_process_thread, + rtc::scoped_refptr module_process_thread, TaskQueueFactory* task_queue_factory); ~Call() override; @@ -212,6 +254,8 @@ class Call final : public webrtc::Call, void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) override; + void AddAdaptationResource(rtc::scoped_refptr resource) override; + RtpTransportControllerSendInterface* GetTransportControllerSend() override; Stats GetStats() const override; @@ -243,54 +287,54 @@ class Call final : public webrtc::Call, private: DeliveryStatus DeliverRtcp(MediaType media_type, const uint8_t* packet, - size_t length); + size_t length) + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); DeliveryStatus DeliverRtp(MediaType media_type, rtc::CopyOnWriteBuffer packet, - int64_t packet_time_us); + int64_t packet_time_us) + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); void ConfigureSync(const std::string& sync_group) - RTC_EXCLUSIVE_LOCKS_REQUIRED(receive_crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); void NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, MediaType media_type) - RTC_SHARED_LOCKS_REQUIRED(receive_crit_); + RTC_SHARED_LOCKS_REQUIRED(worker_thread_); void UpdateSendHistograms(Timestamp first_sent_packet) - RTC_EXCLUSIVE_LOCKS_REQUIRED(&bitrate_crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(worker_thread_); void UpdateReceiveHistograms(); void UpdateHistograms(); void UpdateAggregateNetworkState(); void RegisterRateObserver(); - rtc::TaskQueue* network_queue() const { + rtc::TaskQueue* send_transport_queue() const { return transport_send_ptr_->GetWorkerQueue(); } Clock* const clock_; TaskQueueFactory* const task_queue_factory_; + TaskQueueBase* const worker_thread_; const int num_cpu_cores_; - const std::unique_ptr module_process_thread_; + const rtc::scoped_refptr module_process_thread_; const std::unique_ptr call_stats_; const std::unique_ptr bitrate_allocator_; Call::Config config_; - SequenceChecker configuration_sequence_checker_; - SequenceChecker worker_sequence_checker_; NetworkState audio_network_state_; NetworkState video_network_state_; - bool aggregate_network_up_ RTC_GUARDED_BY(configuration_sequence_checker_); + bool aggregate_network_up_ RTC_GUARDED_BY(worker_thread_); - std::unique_ptr receive_crit_; // Audio, Video, and FlexFEC receive streams are owned by the client that // creates them. std::set audio_receive_streams_ - RTC_GUARDED_BY(receive_crit_); + RTC_GUARDED_BY(worker_thread_); std::set video_receive_streams_ - RTC_GUARDED_BY(receive_crit_); + RTC_GUARDED_BY(worker_thread_); std::map sync_stream_mapping_ - RTC_GUARDED_BY(receive_crit_); + RTC_GUARDED_BY(worker_thread_); // TODO(nisse): Should eventually be injected at creation, // with a single object in the bundled case. @@ -324,25 +368,26 @@ class Call final : public webrtc::Call, const bool use_send_side_bwe; }; std::map receive_rtp_config_ - RTC_GUARDED_BY(receive_crit_); + RTC_GUARDED_BY(worker_thread_); - std::unique_ptr send_crit_; // Audio and Video send streams are owned by the client that creates them. std::map audio_send_ssrcs_ - RTC_GUARDED_BY(send_crit_); + RTC_GUARDED_BY(worker_thread_); std::map video_send_ssrcs_ - RTC_GUARDED_BY(send_crit_); - std::set video_send_streams_ RTC_GUARDED_BY(send_crit_); + RTC_GUARDED_BY(worker_thread_); + std::set video_send_streams_ RTC_GUARDED_BY(worker_thread_); + + // Each forwarder wraps an adaptation resource that was added to the call. + std::vector> + adaptation_resource_forwarders_ RTC_GUARDED_BY(worker_thread_); using RtpStateMap = std::map; - RtpStateMap suspended_audio_send_ssrcs_ - RTC_GUARDED_BY(configuration_sequence_checker_); - RtpStateMap suspended_video_send_ssrcs_ - RTC_GUARDED_BY(configuration_sequence_checker_); + RtpStateMap suspended_audio_send_ssrcs_ RTC_GUARDED_BY(worker_thread_); + RtpStateMap suspended_video_send_ssrcs_ RTC_GUARDED_BY(worker_thread_); using RtpPayloadStateMap = std::map; RtpPayloadStateMap suspended_video_payload_states_ - RTC_GUARDED_BY(configuration_sequence_checker_); + RTC_GUARDED_BY(worker_thread_); webrtc::RtcEventLog* event_log_; @@ -358,17 +403,14 @@ class Call final : public webrtc::Call, absl::optional first_received_rtp_video_ms_; absl::optional last_received_rtp_video_ms_; - rtc::CriticalSection last_bandwidth_bps_crit_; - uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(&last_bandwidth_bps_crit_); + uint32_t last_bandwidth_bps_ RTC_GUARDED_BY(worker_thread_); // TODO(holmer): Remove this lock once BitrateController no longer calls // OnNetworkChanged from multiple threads. - rtc::CriticalSection bitrate_crit_; - uint32_t min_allocated_send_bitrate_bps_ - RTC_GUARDED_BY(&worker_sequence_checker_); - uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(&bitrate_crit_); + uint32_t min_allocated_send_bitrate_bps_ RTC_GUARDED_BY(worker_thread_); + uint32_t configured_max_padding_bitrate_bps_ RTC_GUARDED_BY(worker_thread_); AvgCounter estimated_send_bitrate_kbps_counter_ - RTC_GUARDED_BY(&bitrate_crit_); - AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(&bitrate_crit_); + RTC_GUARDED_BY(worker_thread_); + AvgCounter pacer_bitrate_kbps_counter_ RTC_GUARDED_BY(worker_thread_); ReceiveSideCongestionController receive_side_cc_; @@ -377,6 +419,11 @@ class Call final : public webrtc::Call, const std::unique_ptr video_send_delay_stats_; const int64_t start_ms_; + // Note that |task_safety_| needs to be at a greater scope than the task queue + // owned by |transport_send_| since calls might arrive on the network thread + // while Call is being deleted and the task queue is being torn down. + ScopedTaskSafety task_safety_; + // Caches transport_send_.get(), to avoid racing with destructor. // Note that this is declared before transport_send_ to ensure that it is not // invalidated until no more tasks can be running on the transport_send_ task @@ -386,8 +433,8 @@ class Call final : public webrtc::Call, // last ensures that it is destroyed first and any running tasks are finished. std::unique_ptr transport_send_; - bool is_target_rate_observer_registered_ - RTC_GUARDED_BY(&configuration_sequence_checker_) = false; + bool is_target_rate_observer_registered_ RTC_GUARDED_BY(worker_thread_) = + false; RTC_DISALLOW_COPY_AND_ASSIGN(Call); }; @@ -407,14 +454,21 @@ std::string Call::Stats::ToString(int64_t time_ms) const { } Call* Call::Create(const Call::Config& config) { - return Create(config, Clock::GetRealTimeClock(), - ProcessThread::Create("ModuleProcessThread"), + rtc::scoped_refptr call_thread = + SharedModuleThread::Create(ProcessThread::Create("ModuleProcessThread"), + nullptr); + return Create(config, std::move(call_thread)); +} + +Call* Call::Create(const Call::Config& config, + rtc::scoped_refptr call_thread) { + return Create(config, Clock::GetRealTimeClock(), std::move(call_thread), ProcessThread::Create("PacerThread")); } Call* Call::Create(const Call::Config& config, Clock* clock, - std::unique_ptr call_thread, + rtc::scoped_refptr call_thread, std::unique_ptr pacer_thread) { RTC_DCHECK(config.task_queue_factory); return new internal::Call( @@ -426,6 +480,98 @@ Call* Call::Create(const Call::Config& config, std::move(call_thread), config.task_queue_factory); } +class SharedModuleThread::Impl { + public: + Impl(std::unique_ptr process_thread, + std::function on_one_ref_remaining) + : module_thread_(std::move(process_thread)), + on_one_ref_remaining_(std::move(on_one_ref_remaining)) {} + + void EnsureStarted() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + if (started_) + return; + started_ = true; + module_thread_->Start(); + } + + ProcessThread* process_thread() { + RTC_DCHECK_RUN_ON(&sequence_checker_); + return module_thread_.get(); + } + + void AddRef() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + ++ref_count_; + } + + rtc::RefCountReleaseStatus Release() const { + RTC_DCHECK_RUN_ON(&sequence_checker_); + --ref_count_; + + if (ref_count_ == 0) { + module_thread_->Stop(); + return rtc::RefCountReleaseStatus::kDroppedLastRef; + } + + if (ref_count_ == 1 && on_one_ref_remaining_) { + auto moved_fn = std::move(on_one_ref_remaining_); + // NOTE: after this function returns, chances are that |this| has been + // deleted - do not touch any member variables. + // If the owner of the last reference implements a lambda that releases + // that last reference inside of the callback (which is legal according + // to this implementation), we will recursively enter Release() above, + // call Stop() and release the last reference. + moved_fn(); + } + + return rtc::RefCountReleaseStatus::kOtherRefsRemained; + } + + private: + SequenceChecker sequence_checker_; + mutable int ref_count_ RTC_GUARDED_BY(sequence_checker_) = 0; + std::unique_ptr const module_thread_; + std::function const on_one_ref_remaining_; + bool started_ = false; +}; + +SharedModuleThread::SharedModuleThread( + std::unique_ptr process_thread, + std::function on_one_ref_remaining) + : impl_(std::make_unique(std::move(process_thread), + std::move(on_one_ref_remaining))) {} + +SharedModuleThread::~SharedModuleThread() = default; + +// static + +rtc::scoped_refptr SharedModuleThread::Create( + std::unique_ptr process_thread, + std::function on_one_ref_remaining) { + return new SharedModuleThread(std::move(process_thread), + std::move(on_one_ref_remaining)); +} + +void SharedModuleThread::EnsureStarted() { + impl_->EnsureStarted(); +} + +ProcessThread* SharedModuleThread::process_thread() { + return impl_->process_thread(); +} + +void SharedModuleThread::AddRef() const { + impl_->AddRef(); +} + +rtc::RefCountReleaseStatus SharedModuleThread::Release() const { + auto ret = impl_->Release(); + if (ret == rtc::RefCountReleaseStatus::kDroppedLastRef) + delete this; + return ret; +} + // This method here to avoid subclasses has to implement this method. // Call perf test will use Internal::Call::CreateVideoSendStream() to inject // FecController. @@ -441,20 +587,19 @@ namespace internal { Call::Call(Clock* clock, const Call::Config& config, std::unique_ptr transport_send, - std::unique_ptr module_process_thread, + rtc::scoped_refptr module_process_thread, TaskQueueFactory* task_queue_factory) : clock_(clock), task_queue_factory_(task_queue_factory), + worker_thread_(GetCurrentTaskQueueOrThread()), num_cpu_cores_(CpuInfo::DetectNumberOfCores()), module_process_thread_(std::move(module_process_thread)), - call_stats_(new CallStats(clock_, GetCurrentTaskQueueOrThread())), + call_stats_(new CallStats(clock_, worker_thread_)), bitrate_allocator_(new BitrateAllocator(this)), config_(config), audio_network_state_(kNetworkDown), video_network_state_(kNetworkDown), aggregate_network_up_(false), - receive_crit_(RWLockWrapper::CreateRWLock()), - send_crit_(RWLockWrapper::CreateRWLock()), event_log_(config.event_log), received_bytes_per_second_counter_(clock_, nullptr, true), received_audio_bytes_per_second_counter_(clock_, nullptr, true), @@ -473,17 +618,18 @@ Call::Call(Clock* clock, transport_send_(std::move(transport_send)) { RTC_DCHECK(config.event_log != nullptr); RTC_DCHECK(config.trials != nullptr); - worker_sequence_checker_.Detach(); + RTC_DCHECK(worker_thread_->IsCurrent()); call_stats_->RegisterStatsObserver(&receive_side_cc_); - module_process_thread_->RegisterModule( + module_process_thread_->process_thread()->RegisterModule( receive_side_cc_.GetRemoteBitrateEstimator(true), RTC_FROM_HERE); - module_process_thread_->RegisterModule(&receive_side_cc_, RTC_FROM_HERE); + module_process_thread_->process_thread()->RegisterModule(&receive_side_cc_, + RTC_FROM_HERE); } Call::~Call() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_CHECK(audio_send_ssrcs_.empty()); RTC_CHECK(video_send_ssrcs_.empty()); @@ -491,10 +637,9 @@ Call::~Call() { RTC_CHECK(audio_receive_streams_.empty()); RTC_CHECK(video_receive_streams_.empty()); - module_process_thread_->Stop(); - module_process_thread_->DeRegisterModule( + module_process_thread_->process_thread()->DeRegisterModule( receive_side_cc_.GetRemoteBitrateEstimator(true)); - module_process_thread_->DeRegisterModule(&receive_side_cc_); + module_process_thread_->process_thread()->DeRegisterModule(&receive_side_cc_); call_stats_->DeregisterStatsObserver(&receive_side_cc_); absl::optional first_sent_packet_ms = @@ -503,7 +648,6 @@ Call::~Call() { // Only update histograms after process threads have been shut down, so that // they won't try to concurrently update stats. if (first_sent_packet_ms) { - rtc::CritScope lock(&bitrate_crit_); UpdateSendHistograms(*first_sent_packet_ms); } @@ -512,7 +656,7 @@ Call::~Call() { } void Call::RegisterRateObserver() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); if (is_target_rate_observer_registered_) return; @@ -523,11 +667,11 @@ void Call::RegisterRateObserver() { // off being kicked off on request rather than in the ctor. transport_send_ptr_->RegisterTargetTransferRateObserver(this); - module_process_thread_->Start(); + module_process_thread_->EnsureStarted(); } void Call::SetClientBitratePreferences(const BitrateSettings& preferences) { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); GetTransportControllerSend()->SetClientBitratePreferences(preferences); } @@ -609,14 +753,14 @@ void Call::UpdateReceiveHistograms() { } PacketReceiver* Call::Receiver() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); return this; } webrtc::AudioSendStream* Call::CreateAudioSendStream( const webrtc::AudioSendStream::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateAudioSendStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RegisterRateObserver(); @@ -632,30 +776,26 @@ webrtc::AudioSendStream* Call::CreateAudioSendStream( AudioSendStream* send_stream = new AudioSendStream( clock_, config, config_.audio_state, task_queue_factory_, - module_process_thread_.get(), transport_send_ptr_, + module_process_thread_->process_thread(), transport_send_ptr_, bitrate_allocator_.get(), event_log_, call_stats_->AsRtcpRttStats(), suspended_rtp_state); - { - WriteLockScoped write_lock(*send_crit_); - RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) == - audio_send_ssrcs_.end()); - audio_send_ssrcs_[config.rtp.ssrc] = send_stream; - } - { - ReadLockScoped read_lock(*receive_crit_); - for (AudioReceiveStream* stream : audio_receive_streams_) { - if (stream->config().rtp.local_ssrc == config.rtp.ssrc) { - stream->AssociateSendStream(send_stream); - } + RTC_DCHECK(audio_send_ssrcs_.find(config.rtp.ssrc) == + audio_send_ssrcs_.end()); + audio_send_ssrcs_[config.rtp.ssrc] = send_stream; + + for (AudioReceiveStream* stream : audio_receive_streams_) { + if (stream->config().rtp.local_ssrc == config.rtp.ssrc) { + stream->AssociateSendStream(send_stream); } } + UpdateAggregateNetworkState(); return send_stream; } void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { TRACE_EVENT0("webrtc", "Call::DestroyAudioSendStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(send_stream != nullptr); send_stream->Stop(); @@ -664,19 +804,16 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { webrtc::internal::AudioSendStream* audio_send_stream = static_cast(send_stream); suspended_audio_send_ssrcs_[ssrc] = audio_send_stream->GetRtpState(); - { - WriteLockScoped write_lock(*send_crit_); - size_t num_deleted = audio_send_ssrcs_.erase(ssrc); - RTC_DCHECK_EQ(1, num_deleted); - } - { - ReadLockScoped read_lock(*receive_crit_); - for (AudioReceiveStream* stream : audio_receive_streams_) { - if (stream->config().rtp.local_ssrc == ssrc) { - stream->AssociateSendStream(nullptr); - } + + size_t num_deleted = audio_send_ssrcs_.erase(ssrc); + RTC_DCHECK_EQ(1, num_deleted); + + for (AudioReceiveStream* stream : audio_receive_streams_) { + if (stream->config().rtp.local_ssrc == ssrc) { + stream->AssociateSendStream(nullptr); } } + UpdateAggregateNetworkState(); delete send_stream; } @@ -684,29 +821,25 @@ void Call::DestroyAudioSendStream(webrtc::AudioSendStream* send_stream) { webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( const webrtc::AudioReceiveStream::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateAudioReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RegisterRateObserver(); event_log_->Log(std::make_unique( CreateRtcLogStreamConfig(config))); AudioReceiveStream* receive_stream = new AudioReceiveStream( clock_, &audio_receiver_controller_, transport_send_ptr_->packet_router(), - module_process_thread_.get(), config_.neteq_factory, config, + module_process_thread_->process_thread(), config_.neteq_factory, config, config_.audio_state, event_log_); - { - WriteLockScoped write_lock(*receive_crit_); - receive_rtp_config_.emplace(config.rtp.remote_ssrc, - ReceiveRtpConfig(config)); - audio_receive_streams_.insert(receive_stream); - ConfigureSync(config.sync_group); - } - { - ReadLockScoped read_lock(*send_crit_); - auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc); - if (it != audio_send_ssrcs_.end()) { - receive_stream->AssociateSendStream(it->second); - } + receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config)); + audio_receive_streams_.insert(receive_stream); + + ConfigureSync(config.sync_group); + + auto it = audio_send_ssrcs_.find(config.rtp.local_ssrc); + if (it != audio_send_ssrcs_.end()) { + receive_stream->AssociateSendStream(it->second); } + UpdateAggregateNetworkState(); return receive_stream; } @@ -714,26 +847,24 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( void Call::DestroyAudioReceiveStream( webrtc::AudioReceiveStream* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyAudioReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); webrtc::internal::AudioReceiveStream* audio_receive_stream = static_cast(receive_stream); - { - WriteLockScoped write_lock(*receive_crit_); - const AudioReceiveStream::Config& config = audio_receive_stream->config(); - uint32_t ssrc = config.rtp.remote_ssrc; - receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) - ->RemoveStream(ssrc); - audio_receive_streams_.erase(audio_receive_stream); - const std::string& sync_group = audio_receive_stream->config().sync_group; - const auto it = sync_stream_mapping_.find(sync_group); - if (it != sync_stream_mapping_.end() && - it->second == audio_receive_stream) { - sync_stream_mapping_.erase(it); - ConfigureSync(sync_group); - } - receive_rtp_config_.erase(ssrc); + + const AudioReceiveStream::Config& config = audio_receive_stream->config(); + uint32_t ssrc = config.rtp.remote_ssrc; + receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) + ->RemoveStream(ssrc); + audio_receive_streams_.erase(audio_receive_stream); + const std::string& sync_group = audio_receive_stream->config().sync_group; + const auto it = sync_stream_mapping_.find(sync_group); + if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) { + sync_stream_mapping_.erase(it); + ConfigureSync(sync_group); } + receive_rtp_config_.erase(ssrc); + UpdateAggregateNetworkState(); delete audio_receive_stream; } @@ -744,7 +875,7 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( VideoEncoderConfig encoder_config, std::unique_ptr fec_controller) { TRACE_EVENT0("webrtc", "Call::CreateVideoSendStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RegisterRateObserver(); @@ -761,20 +892,22 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( std::vector ssrcs = config.rtp.ssrcs; VideoSendStream* send_stream = new VideoSendStream( - clock_, num_cpu_cores_, module_process_thread_.get(), task_queue_factory_, - call_stats_->AsRtcpRttStats(), transport_send_ptr_, + clock_, num_cpu_cores_, module_process_thread_->process_thread(), + task_queue_factory_, call_stats_->AsRtcpRttStats(), transport_send_ptr_, bitrate_allocator_.get(), video_send_delay_stats_.get(), event_log_, std::move(config), std::move(encoder_config), suspended_video_send_ssrcs_, suspended_video_payload_states_, std::move(fec_controller)); - { - WriteLockScoped write_lock(*send_crit_); - for (uint32_t ssrc : ssrcs) { - RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end()); - video_send_ssrcs_[ssrc] = send_stream; - } - video_send_streams_.insert(send_stream); + for (uint32_t ssrc : ssrcs) { + RTC_DCHECK(video_send_ssrcs_.find(ssrc) == video_send_ssrcs_.end()); + video_send_ssrcs_[ssrc] = send_stream; } + video_send_streams_.insert(send_stream); + // Forward resources that were previously added to the call to the new stream. + for (const auto& resource_forwarder : adaptation_resource_forwarders_) { + resource_forwarder->OnCreateVideoSendStream(send_stream); + } + UpdateAggregateNetworkState(); return send_stream; @@ -797,24 +930,27 @@ webrtc::VideoSendStream* Call::CreateVideoSendStream( void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { TRACE_EVENT0("webrtc", "Call::DestroyVideoSendStream"); RTC_DCHECK(send_stream != nullptr); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); send_stream->Stop(); VideoSendStream* send_stream_impl = nullptr; - { - WriteLockScoped write_lock(*send_crit_); - auto it = video_send_ssrcs_.begin(); - while (it != video_send_ssrcs_.end()) { - if (it->second == static_cast(send_stream)) { - send_stream_impl = it->second; - video_send_ssrcs_.erase(it++); - } else { - ++it; - } + + auto it = video_send_ssrcs_.begin(); + while (it != video_send_ssrcs_.end()) { + if (it->second == static_cast(send_stream)) { + send_stream_impl = it->second; + video_send_ssrcs_.erase(it++); + } else { + ++it; } - video_send_streams_.erase(send_stream_impl); } + // Stop forwarding resources to the stream being destroyed. + for (const auto& resource_forwarder : adaptation_resource_forwarders_) { + resource_forwarder->OnDestroyVideoSendStream(send_stream_impl); + } + video_send_streams_.erase(send_stream_impl); + RTC_CHECK(send_stream_impl != nullptr); VideoSendStream::RtpStateMap rtp_states; @@ -835,7 +971,7 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( webrtc::VideoReceiveStream::Config configuration) { TRACE_EVENT0("webrtc", "Call::CreateVideoReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); receive_side_cc_.SetSendPeriodicFeedback( SendPeriodicFeedback(configuration.rtp.extensions)); @@ -847,25 +983,21 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( VideoReceiveStream2* receive_stream = new VideoReceiveStream2( task_queue_factory_, current, &video_receiver_controller_, num_cpu_cores_, transport_send_ptr_->packet_router(), std::move(configuration), - module_process_thread_.get(), call_stats_.get(), clock_, + module_process_thread_->process_thread(), call_stats_.get(), clock_, new VCMTiming(clock_)); const webrtc::VideoReceiveStream::Config& config = receive_stream->config(); - { - WriteLockScoped write_lock(*receive_crit_); - if (config.rtp.rtx_ssrc) { - // We record identical config for the rtx stream as for the main - // stream. Since the transport_send_cc negotiation is per payload - // type, we may get an incorrect value for the rtx stream, but - // that is unlikely to matter in practice. - receive_rtp_config_.emplace(config.rtp.rtx_ssrc, - ReceiveRtpConfig(config)); - } - receive_rtp_config_.emplace(config.rtp.remote_ssrc, - ReceiveRtpConfig(config)); - video_receive_streams_.insert(receive_stream); - ConfigureSync(config.sync_group); + if (config.rtp.rtx_ssrc) { + // We record identical config for the rtx stream as for the main + // stream. Since the transport_send_cc negotiation is per payload + // type, we may get an incorrect value for the rtx stream, but + // that is unlikely to matter in practice. + receive_rtp_config_.emplace(config.rtp.rtx_ssrc, ReceiveRtpConfig(config)); } + receive_rtp_config_.emplace(config.rtp.remote_ssrc, ReceiveRtpConfig(config)); + video_receive_streams_.insert(receive_stream); + ConfigureSync(config.sync_group); + receive_stream->SignalNetworkState(video_network_state_); UpdateAggregateNetworkState(); event_log_->Log(std::make_unique( @@ -876,22 +1008,20 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( void Call::DestroyVideoReceiveStream( webrtc::VideoReceiveStream* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyVideoReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); VideoReceiveStream2* receive_stream_impl = static_cast(receive_stream); const VideoReceiveStream::Config& config = receive_stream_impl->config(); - { - WriteLockScoped write_lock(*receive_crit_); - // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a - // separate SSRC there can be either one or two. - receive_rtp_config_.erase(config.rtp.remote_ssrc); - if (config.rtp.rtx_ssrc) { - receive_rtp_config_.erase(config.rtp.rtx_ssrc); - } - video_receive_streams_.erase(receive_stream_impl); - ConfigureSync(config.sync_group); + + // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a + // separate SSRC there can be either one or two. + receive_rtp_config_.erase(config.rtp.remote_ssrc); + if (config.rtp.rtx_ssrc) { + receive_rtp_config_.erase(config.rtp.rtx_ssrc); } + video_receive_streams_.erase(receive_stream_impl); + ConfigureSync(config.sync_group); receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) ->RemoveStream(config.rtp.remote_ssrc); @@ -903,30 +1033,25 @@ void Call::DestroyVideoReceiveStream( FlexfecReceiveStream* Call::CreateFlexfecReceiveStream( const FlexfecReceiveStream::Config& config) { TRACE_EVENT0("webrtc", "Call::CreateFlexfecReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RecoveredPacketReceiver* recovered_packet_receiver = this; FlexfecReceiveStreamImpl* receive_stream; - { - WriteLockScoped write_lock(*receive_crit_); - // Unlike the video and audio receive streams, - // FlexfecReceiveStream implements RtpPacketSinkInterface itself, - // and hence its constructor passes its |this| pointer to - // video_receiver_controller_->CreateStream(). Calling the - // constructor while holding |receive_crit_| ensures that we don't - // call OnRtpPacket until the constructor is finished and the - // object is in a valid state. - // TODO(nisse): Fix constructor so that it can be moved outside of - // this locked scope. - receive_stream = new FlexfecReceiveStreamImpl( - clock_, &video_receiver_controller_, config, recovered_packet_receiver, - call_stats_->AsRtcpRttStats(), module_process_thread_.get()); - RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) == - receive_rtp_config_.end()); - receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config)); - } + // Unlike the video and audio receive streams, FlexfecReceiveStream implements + // RtpPacketSinkInterface itself, and hence its constructor passes its |this| + // pointer to video_receiver_controller_->CreateStream(). Calling the + // constructor while on the worker thread ensures that we don't call + // OnRtpPacket until the constructor is finished and the object is + // in a valid state, since OnRtpPacket runs on the same thread. + receive_stream = new FlexfecReceiveStreamImpl( + clock_, &video_receiver_controller_, config, recovered_packet_receiver, + call_stats_->AsRtcpRttStats(), module_process_thread_->process_thread()); + + RTC_DCHECK(receive_rtp_config_.find(config.remote_ssrc) == + receive_rtp_config_.end()); + receive_rtp_config_.emplace(config.remote_ssrc, ReceiveRtpConfig(config)); // TODO(brandtr): Store config in RtcEventLog here. @@ -935,39 +1060,37 @@ FlexfecReceiveStream* Call::CreateFlexfecReceiveStream( void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) { TRACE_EVENT0("webrtc", "Call::DestroyFlexfecReceiveStream"); - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); RTC_DCHECK(receive_stream != nullptr); - { - WriteLockScoped write_lock(*receive_crit_); + const FlexfecReceiveStream::Config& config = receive_stream->GetConfig(); + uint32_t ssrc = config.remote_ssrc; + receive_rtp_config_.erase(ssrc); - const FlexfecReceiveStream::Config& config = receive_stream->GetConfig(); - uint32_t ssrc = config.remote_ssrc; - receive_rtp_config_.erase(ssrc); - - // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be - // destroyed. - receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) - ->RemoveStream(ssrc); - } + // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be + // destroyed. + receive_side_cc_.GetRemoteBitrateEstimator(UseSendSideBwe(config)) + ->RemoveStream(ssrc); delete receive_stream; } +void Call::AddAdaptationResource(rtc::scoped_refptr resource) { + RTC_DCHECK_RUN_ON(worker_thread_); + adaptation_resource_forwarders_.push_back( + std::make_unique(resource)); + const auto& resource_forwarder = adaptation_resource_forwarders_.back(); + for (VideoSendStream* send_stream : video_send_streams_) { + resource_forwarder->OnCreateVideoSendStream(send_stream); + } +} + RtpTransportControllerSendInterface* Call::GetTransportControllerSend() { return transport_send_ptr_; } Call::Stats Call::GetStats() const { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); - - // TODO(tommi): The following stats are managed on the process thread: - // - pacer_delay_ms (PacedSender::Process) - // - rtt_ms - // - recv_bandwidth_bps - // These are delivered on the network TQ: - // - send_bandwidth_bps (see OnTargetTransferRate) - // - max_padding_bitrate_bps (see OnAllocationLimitsChanged) + RTC_DCHECK_RUN_ON(worker_thread_); Stats stats; // TODO(srte): It is unclear if we only want to report queues if network is @@ -983,22 +1106,14 @@ Call::Stats Call::GetStats() const { receive_side_cc_.GetRemoteBitrateEstimator(false)->LatestEstimate( &ssrcs, &recv_bandwidth); stats.recv_bandwidth_bps = recv_bandwidth; - - { - rtc::CritScope cs(&last_bandwidth_bps_crit_); - stats.send_bandwidth_bps = last_bandwidth_bps_; - } - - { - rtc::CritScope cs(&bitrate_crit_); - stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_; - } + stats.send_bandwidth_bps = last_bandwidth_bps_; + stats.max_padding_bitrate_bps = configured_max_padding_bitrate_bps_; return stats; } void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); switch (media) { case MediaType::AUDIO: audio_network_state_ = state; @@ -1013,40 +1128,25 @@ void Call::SignalChannelNetworkState(MediaType media, NetworkState state) { } UpdateAggregateNetworkState(); - { - ReadLockScoped read_lock(*receive_crit_); - for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) { - video_receive_stream->SignalNetworkState(video_network_state_); - } + for (VideoReceiveStream2* video_receive_stream : video_receive_streams_) { + video_receive_stream->SignalNetworkState(video_network_state_); } } void Call::OnAudioTransportOverheadChanged(int transport_overhead_per_packet) { - ReadLockScoped read_lock(*send_crit_); + RTC_DCHECK_RUN_ON(worker_thread_); for (auto& kv : audio_send_ssrcs_) { kv.second->SetTransportOverhead(transport_overhead_per_packet); } } void Call::UpdateAggregateNetworkState() { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); - bool have_audio = false; - bool have_video = false; - { - ReadLockScoped read_lock(*send_crit_); - if (!audio_send_ssrcs_.empty()) - have_audio = true; - if (!video_send_ssrcs_.empty()) - have_video = true; - } - { - ReadLockScoped read_lock(*receive_crit_); - if (!audio_receive_streams_.empty()) - have_audio = true; - if (!video_receive_streams_.empty()) - have_video = true; - } + bool have_audio = + !audio_send_ssrcs_.empty() || !audio_receive_streams_.empty(); + bool have_video = + !video_send_ssrcs_.empty() || !video_receive_streams_.empty(); bool aggregate_network_up = ((have_video && video_network_state_ == kNetworkUp) || @@ -1073,61 +1173,50 @@ void Call::OnSentPacket(const rtc::SentPacket& sent_packet) { } void Call::OnStartRateUpdate(DataRate start_rate) { - RTC_DCHECK(network_queue()->IsCurrent()); + RTC_DCHECK_RUN_ON(send_transport_queue()); bitrate_allocator_->UpdateStartRate(start_rate.bps()); } void Call::OnTargetTransferRate(TargetTransferRate msg) { - RTC_DCHECK(network_queue()->IsCurrent()); - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - { - rtc::CritScope cs(&last_bandwidth_bps_crit_); - last_bandwidth_bps_ = msg.target_rate.bps(); - } + RTC_DCHECK_RUN_ON(send_transport_queue()); uint32_t target_bitrate_bps = msg.target_rate.bps(); // For controlling the rate of feedback messages. receive_side_cc_.OnBitrateChanged(target_bitrate_bps); bitrate_allocator_->OnNetworkEstimateChanged(msg); - // Ignore updates if bitrate is zero (the aggregate network state is down). - if (target_bitrate_bps == 0) { - rtc::CritScope lock(&bitrate_crit_); - estimated_send_bitrate_kbps_counter_.ProcessAndPause(); - pacer_bitrate_kbps_counter_.ProcessAndPause(); - return; - } + worker_thread_->PostTask( + ToQueuedTask(task_safety_, [this, target_bitrate_bps]() { + RTC_DCHECK_RUN_ON(worker_thread_); + last_bandwidth_bps_ = target_bitrate_bps; - bool sending_video; - { - ReadLockScoped read_lock(*send_crit_); - sending_video = !video_send_streams_.empty(); - } + // Ignore updates if bitrate is zero (the aggregate network state is + // down) or if we're not sending video. + if (target_bitrate_bps == 0 || video_send_streams_.empty()) { + estimated_send_bitrate_kbps_counter_.ProcessAndPause(); + pacer_bitrate_kbps_counter_.ProcessAndPause(); + return; + } - rtc::CritScope lock(&bitrate_crit_); - if (!sending_video) { - // Do not update the stats if we are not sending video. - estimated_send_bitrate_kbps_counter_.ProcessAndPause(); - pacer_bitrate_kbps_counter_.ProcessAndPause(); - return; - } - estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000); - // Pacer bitrate may be higher than bitrate estimate if enforcing min bitrate. - uint32_t pacer_bitrate_bps = - std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_); - pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000); + estimated_send_bitrate_kbps_counter_.Add(target_bitrate_bps / 1000); + // Pacer bitrate may be higher than bitrate estimate if enforcing min + // bitrate. + uint32_t pacer_bitrate_bps = + std::max(target_bitrate_bps, min_allocated_send_bitrate_bps_); + pacer_bitrate_kbps_counter_.Add(pacer_bitrate_bps / 1000); + })); } void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) { - RTC_DCHECK(network_queue()->IsCurrent()); - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RTC_DCHECK_RUN_ON(send_transport_queue()); transport_send_ptr_->SetAllocatedSendBitrateLimits(limits); - min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps(); - - rtc::CritScope lock(&bitrate_crit_); - configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps(); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this, limits]() { + RTC_DCHECK_RUN_ON(worker_thread_); + min_allocated_send_bitrate_bps_ = limits.min_allocatable_rate.bps(); + configured_max_padding_bitrate_bps_ = limits.max_padding_rate.bps(); + })); } void Call::ConfigureSync(const std::string& sync_group) { @@ -1194,28 +1283,24 @@ PacketReceiver::DeliveryStatus Call::DeliverRtcp(MediaType media_type, } bool rtcp_delivered = false; if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) { - ReadLockScoped read_lock(*receive_crit_); for (VideoReceiveStream2* stream : video_receive_streams_) { if (stream->DeliverRtcp(packet, length)) rtcp_delivered = true; } } if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) { - ReadLockScoped read_lock(*receive_crit_); for (AudioReceiveStream* stream : audio_receive_streams_) { stream->DeliverRtcp(packet, length); rtcp_delivered = true; } } if (media_type == MediaType::ANY || media_type == MediaType::VIDEO) { - ReadLockScoped read_lock(*send_crit_); for (VideoSendStream* stream : video_send_streams_) { stream->DeliverRtcp(packet, length); rtcp_delivered = true; } } if (media_type == MediaType::ANY || media_type == MediaType::AUDIO) { - ReadLockScoped read_lock(*send_crit_); for (auto& kv : audio_send_ssrcs_) { kv.second->DeliverRtcp(packet, length); rtcp_delivered = true; @@ -1259,17 +1344,15 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, RTC_DCHECK(media_type == MediaType::AUDIO || media_type == MediaType::VIDEO || is_keep_alive_packet); - ReadLockScoped read_lock(*receive_crit_); auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); if (it == receive_rtp_config_.end()) { RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " << parsed_packet.Ssrc(); // Destruction of the receive stream, including deregistering from the - // RtpDemuxer, is not protected by the |receive_crit_| lock. But - // deregistering in the |receive_rtp_config_| map is protected by that lock. - // So by not passing the packet on to demuxing in this case, we prevent - // incoming packets to be passed on via the demuxer to a receive stream - // which is being torned down. + // RtpDemuxer, is not protected by the |worker_thread_|. + // But deregistering in the |receive_rtp_config_| map is. So by not passing + // the packet on to demuxing in this case, we prevent incoming packets to be + // passed on via the demuxer to a receive stream which is being torned down. return DELIVERY_UNKNOWN_SSRC; } @@ -1315,7 +1398,8 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket( MediaType media_type, rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) { - RTC_DCHECK_RUN_ON(&configuration_sequence_checker_); + RTC_DCHECK_RUN_ON(worker_thread_); + if (IsRtcp(packet.cdata(), packet.size())) return DeliverRtcp(media_type, packet.cdata(), packet.size()); @@ -1323,20 +1407,20 @@ PacketReceiver::DeliveryStatus Call::DeliverPacket( } void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) { + RTC_DCHECK_RUN_ON(worker_thread_); RtpPacketReceived parsed_packet; if (!parsed_packet.Parse(packet, length)) return; parsed_packet.set_recovered(true); - ReadLockScoped read_lock(*receive_crit_); auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); if (it == receive_rtp_config_.end()) { RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " << parsed_packet.Ssrc(); // Destruction of the receive stream, including deregistering from the - // RtpDemuxer, is not protected by the |receive_crit_| lock. But - // deregistering in the |receive_rtp_config_| map is protected by that lock. + // RtpDemuxer, is not protected by the |worker_thread_|. + // But deregistering in the |receive_rtp_config_| map is. // So by not passing the packet on to demuxing in this case, we prevent // incoming packets to be passed on via the demuxer to a receive stream // which is being torn down. diff --git a/call/call.h b/call/call.h index 77cd3d2690..75272248c4 100644 --- a/call/call.h +++ b/call/call.h @@ -15,6 +15,7 @@ #include #include +#include "api/adaptation/resource.h" #include "api/media_types.h" #include "call/audio_receive_stream.h" #include "call/audio_send_stream.h" @@ -28,9 +29,41 @@ #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" +#include "rtc_base/ref_count.h" namespace webrtc { +// A restricted way to share the module process thread across multiple instances +// of Call that are constructed on the same worker thread (which is what the +// peer connection factory guarantees). +// SharedModuleThread supports a callback that is issued when only one reference +// remains, which is used to indicate to the original owner that the thread may +// be discarded. +class SharedModuleThread : public rtc::RefCountInterface { + protected: + SharedModuleThread(std::unique_ptr process_thread, + std::function on_one_ref_remaining); + friend class rtc::scoped_refptr; + ~SharedModuleThread() override; + + public: + // Allows injection of an externally created process thread. + static rtc::scoped_refptr Create( + std::unique_ptr process_thread, + std::function on_one_ref_remaining); + + void EnsureStarted(); + + ProcessThread* process_thread(); + + private: + void AddRef() const override; + rtc::RefCountReleaseStatus Release() const override; + + class Impl; + mutable std::unique_ptr impl_; +}; + // A Call instance can contain several send and/or receive streams. All streams // are assumed to have the same remote endpoint and will share bitrate estimates // etc. @@ -49,9 +82,11 @@ class Call { }; static Call* Create(const Call::Config& config); + static Call* Create(const Call::Config& config, + rtc::scoped_refptr call_thread); static Call* Create(const Call::Config& config, Clock* clock, - std::unique_ptr call_thread, + rtc::scoped_refptr call_thread, std::unique_ptr pacer_thread); virtual AudioSendStream* CreateAudioSendStream( @@ -86,6 +121,11 @@ class Call { virtual void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) = 0; + // When a resource is overused, the Call will try to reduce the load on the + // sysem, for example by reducing the resolution or frame rate of encoded + // streams. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; + // All received RTP and RTCP packets for the call should be inserted to this // PacketReceiver. The PacketReceiver pointer is valid as long as the // Call instance exists. diff --git a/call/call_factory.cc b/call/call_factory.cc index 6b4f419742..cc02c02835 100644 --- a/call/call_factory.cc +++ b/call/call_factory.cc @@ -70,7 +70,12 @@ absl::optional ParseDegradationConfig( } } // namespace +CallFactory::CallFactory() { + call_thread_.Detach(); +} + Call* CallFactory::CreateCall(const Call::Config& config) { + RTC_DCHECK_RUN_ON(&call_thread_); absl::optional send_degradation_config = ParseDegradationConfig(true); absl::optional @@ -82,7 +87,15 @@ Call* CallFactory::CreateCall(const Call::Config& config) { config.task_queue_factory); } - return Call::Create(config); + if (!module_thread_) { + module_thread_ = SharedModuleThread::Create( + ProcessThread::Create("SharedModThread"), [this]() { + RTC_DCHECK_RUN_ON(&call_thread_); + module_thread_ = nullptr; + }); + } + + return Call::Create(config, module_thread_); } std::unique_ptr CreateCallFactory() { diff --git a/call/call_factory.h b/call/call_factory.h index f0d695c915..65c0b6532a 100644 --- a/call/call_factory.h +++ b/call/call_factory.h @@ -14,13 +14,22 @@ #include "api/call/call_factory_interface.h" #include "call/call.h" #include "call/call_config.h" +#include "rtc_base/synchronization/sequence_checker.h" namespace webrtc { class CallFactory : public CallFactoryInterface { + public: + CallFactory(); + + private: ~CallFactory() override {} Call* CreateCall(const CallConfig& config) override; + + SequenceChecker call_thread_; + rtc::scoped_refptr module_thread_ + RTC_GUARDED_BY(call_thread_); }; } // namespace webrtc diff --git a/call/call_perf_tests.cc b/call/call_perf_tests.cc index 123be7da4c..9214ae5d14 100644 --- a/call/call_perf_tests.cc +++ b/call/call_perf_tests.cc @@ -29,6 +29,7 @@ #include "modules/audio_mixer/audio_mixer_impl.h" #include "modules/rtp_rtcp/source/rtp_packet.h" #include "rtc_base/checks.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue_for_test.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -409,7 +410,7 @@ void CallPerfTest::TestCaptureNtpTime( } void OnFrame(const VideoFrame& video_frame) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (video_frame.ntp_time_ms() <= 0) { // Haven't got enough RTCP SR in order to calculate the capture ntp // time. @@ -445,7 +446,7 @@ void CallPerfTest::TestCaptureNtpTime( } Action OnSendRtp(const uint8_t* packet, size_t length) override { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RtpPacket rtp_packet; EXPECT_TRUE(rtp_packet.Parse(packet, length)); @@ -488,7 +489,7 @@ void CallPerfTest::TestCaptureNtpTime( time_offset_ms_list_, "ms", true); } - rtc::CriticalSection crit_; + Mutex mutex_; const BuiltInNetworkBehaviorConfig net_config_; Clock* const clock_; int threshold_ms_; @@ -499,7 +500,7 @@ void CallPerfTest::TestCaptureNtpTime( bool rtp_start_timestamp_set_; uint32_t rtp_start_timestamp_; typedef std::map FrameCaptureTimeList; - FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&crit_); + FrameCaptureTimeList capture_time_list_ RTC_GUARDED_BY(&mutex_); std::vector time_offset_ms_list_; } test(net_config, threshold_ms, start_time_ms, run_time_ms); diff --git a/call/call_unittest.cc b/call/call_unittest.cc index 8afcf25121..e165107d98 100644 --- a/call/call_unittest.cc +++ b/call/call_unittest.cc @@ -20,13 +20,17 @@ #include "api/rtc_event_log/rtc_event_log.h" #include "api/task_queue/default_task_queue_factory.h" #include "api/test/mock_audio_mixer.h" +#include "api/test/video/function_video_encoder_factory.h" #include "api/transport/field_trial_based_config.h" +#include "api/video/builtin_video_bitrate_allocator_factory.h" #include "audio/audio_receive_stream.h" #include "audio/audio_send_stream.h" +#include "call/adaptation/test/fake_resource.h" +#include "call/adaptation/test/mock_resource_listener.h" #include "call/audio_state.h" #include "modules/audio_device/include/mock_audio_device.h" #include "modules/audio_processing/include/mock_audio_processing.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" #include "test/fake_encoder.h" #include "test/gtest.h" #include "test/mock_audio_decoder_factory.h" @@ -35,6 +39,10 @@ namespace { +using ::testing::_; +using ::testing::Contains; +using ::testing::StrictMock; + struct CallHelper { explicit CallHelper(bool use_null_audio_processing) { task_queue_factory_ = webrtc::CreateDefaultTaskQueueFactory(); @@ -67,6 +75,20 @@ struct CallHelper { namespace webrtc { +namespace { + +rtc::scoped_refptr FindResourceWhoseNameContains( + const std::vector>& resources, + const std::string& name_contains) { + for (const auto& resource : resources) { + if (resource->Name().find(name_contains) != std::string::npos) + return resource; + } + return nullptr; +} + +} // namespace + TEST(CallTest, ConstructDestruct) { for (bool use_null_audio_processing : {false, true}) { CallHelper call(use_null_audio_processing); @@ -321,8 +343,186 @@ TEST(CallTest, RecreatingAudioStreamWithSameSsrcReusesRtpState) { EXPECT_EQ(rtp_state1.capture_time_ms, rtp_state2.capture_time_ms); EXPECT_EQ(rtp_state1.last_timestamp_time_ms, rtp_state2.last_timestamp_time_ms); - EXPECT_EQ(rtp_state1.media_has_been_sent, rtp_state2.media_has_been_sent); } } +TEST(CallTest, AddAdaptationResourceAfterCreatingVideoSendStream) { + CallHelper call(true); + // Create a VideoSendStream. + test::FunctionVideoEncoderFactory fake_encoder_factory([]() { + return std::make_unique(Clock::GetRealTimeClock()); + }); + auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); + MockTransport send_transport; + VideoSendStream::Config config(&send_transport); + config.rtp.payload_type = 110; + config.rtp.ssrcs = {42}; + config.encoder_settings.encoder_factory = &fake_encoder_factory; + config.encoder_settings.bitrate_allocator_factory = + bitrate_allocator_factory.get(); + VideoEncoderConfig encoder_config; + encoder_config.max_bitrate_bps = 1337; + VideoSendStream* stream1 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream1, nullptr); + config.rtp.ssrcs = {43}; + VideoSendStream* stream2 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream2, nullptr); + // Add a fake resource. + auto fake_resource = FakeResource::Create("FakeResource"); + call->AddAdaptationResource(fake_resource); + // An adapter resource mirroring the |fake_resource| should now be present on + // both streams. + auto injected_resource1 = FindResourceWhoseNameContains( + stream1->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource1); + auto injected_resource2 = FindResourceWhoseNameContains( + stream2->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource2); + // Overwrite the real resource listeners with mock ones to verify the signal + // gets through. + injected_resource1->SetResourceListener(nullptr); + StrictMock resource_listener1; + EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource1, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + injected_resource1->SetResourceListener(&resource_listener1); + injected_resource2->SetResourceListener(nullptr); + StrictMock resource_listener2; + EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource2, resource); + EXPECT_EQ(ResourceUsageState::kOveruse, usage_state); + }); + injected_resource2->SetResourceListener(&resource_listener2); + // The kOveruse signal should get to our resource listeners. + fake_resource->SetUsageState(ResourceUsageState::kOveruse); + call->DestroyVideoSendStream(stream1); + call->DestroyVideoSendStream(stream2); +} + +TEST(CallTest, AddAdaptationResourceBeforeCreatingVideoSendStream) { + CallHelper call(true); + // Add a fake resource. + auto fake_resource = FakeResource::Create("FakeResource"); + call->AddAdaptationResource(fake_resource); + // Create a VideoSendStream. + test::FunctionVideoEncoderFactory fake_encoder_factory([]() { + return std::make_unique(Clock::GetRealTimeClock()); + }); + auto bitrate_allocator_factory = CreateBuiltinVideoBitrateAllocatorFactory(); + MockTransport send_transport; + VideoSendStream::Config config(&send_transport); + config.rtp.payload_type = 110; + config.rtp.ssrcs = {42}; + config.encoder_settings.encoder_factory = &fake_encoder_factory; + config.encoder_settings.bitrate_allocator_factory = + bitrate_allocator_factory.get(); + VideoEncoderConfig encoder_config; + encoder_config.max_bitrate_bps = 1337; + VideoSendStream* stream1 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream1, nullptr); + config.rtp.ssrcs = {43}; + VideoSendStream* stream2 = + call->CreateVideoSendStream(config.Copy(), encoder_config.Copy()); + EXPECT_NE(stream2, nullptr); + // An adapter resource mirroring the |fake_resource| should be present on both + // streams. + auto injected_resource1 = FindResourceWhoseNameContains( + stream1->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource1); + auto injected_resource2 = FindResourceWhoseNameContains( + stream2->GetAdaptationResources(), fake_resource->Name()); + EXPECT_TRUE(injected_resource2); + // Overwrite the real resource listeners with mock ones to verify the signal + // gets through. + injected_resource1->SetResourceListener(nullptr); + StrictMock resource_listener1; + EXPECT_CALL(resource_listener1, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource1](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource1, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + injected_resource1->SetResourceListener(&resource_listener1); + injected_resource2->SetResourceListener(nullptr); + StrictMock resource_listener2; + EXPECT_CALL(resource_listener2, OnResourceUsageStateMeasured(_, _)) + .Times(1) + .WillOnce([injected_resource2](rtc::scoped_refptr resource, + ResourceUsageState usage_state) { + EXPECT_EQ(injected_resource2, resource); + EXPECT_EQ(ResourceUsageState::kUnderuse, usage_state); + }); + injected_resource2->SetResourceListener(&resource_listener2); + // The kUnderuse signal should get to our resource listeners. + fake_resource->SetUsageState(ResourceUsageState::kUnderuse); + call->DestroyVideoSendStream(stream1); + call->DestroyVideoSendStream(stream2); +} + +TEST(CallTest, SharedModuleThread) { + class SharedModuleThreadUser : public Module { + public: + SharedModuleThreadUser(ProcessThread* expected_thread, + rtc::scoped_refptr thread) + : expected_thread_(expected_thread), thread_(std::move(thread)) { + thread_->EnsureStarted(); + thread_->process_thread()->RegisterModule(this, RTC_FROM_HERE); + } + + ~SharedModuleThreadUser() override { + thread_->process_thread()->DeRegisterModule(this); + EXPECT_TRUE(thread_was_checked_); + } + + private: + int64_t TimeUntilNextProcess() override { return 1000; } + void Process() override {} + void ProcessThreadAttached(ProcessThread* process_thread) override { + if (!process_thread) { + // Being detached. + return; + } + EXPECT_EQ(process_thread, expected_thread_); + thread_was_checked_ = true; + } + + bool thread_was_checked_ = false; + ProcessThread* const expected_thread_; + rtc::scoped_refptr thread_; + }; + + // Create our test instance and pass a lambda to it that gets executed when + // the reference count goes back to 1 - meaning |shared| again is the only + // reference, which means we can free the variable and deallocate the thread. + rtc::scoped_refptr shared; + shared = + SharedModuleThread::Create(ProcessThread::Create("MySharedProcessThread"), + [&shared]() { shared = nullptr; }); + ProcessThread* process_thread = shared->process_thread(); + + ASSERT_TRUE(shared.get()); + + { + // Create a couple of users of the thread. + // These instances are in a separate scope to trigger the callback to our + // lambda, which will run when these go out of scope. + SharedModuleThreadUser user1(process_thread, shared); + SharedModuleThreadUser user2(process_thread, shared); + } + + // The thread should now have been stopped and freed. + EXPECT_FALSE(shared); +} + } // namespace webrtc diff --git a/call/degraded_call.cc b/call/degraded_call.cc index 9c8d2be508..007e0af360 100644 --- a/call/degraded_call.cc +++ b/call/degraded_call.cc @@ -245,6 +245,11 @@ void DegradedCall::DestroyFlexfecReceiveStream( call_->DestroyFlexfecReceiveStream(receive_stream); } +void DegradedCall::AddAdaptationResource( + rtc::scoped_refptr resource) { + call_->AddAdaptationResource(std::move(resource)); +} + PacketReceiver* DegradedCall::Receiver() { if (receive_config_) { return this; diff --git a/call/degraded_call.h b/call/degraded_call.h index 49230ca1ed..ac072b7159 100644 --- a/call/degraded_call.h +++ b/call/degraded_call.h @@ -77,6 +77,8 @@ class DegradedCall : public Call, private PacketReceiver { void DestroyFlexfecReceiveStream( FlexfecReceiveStream* receive_stream) override; + void AddAdaptationResource(rtc::scoped_refptr resource) override; + PacketReceiver* Receiver() override; RtpTransportControllerSendInterface* GetTransportControllerSend() override; diff --git a/call/fake_network_pipe.cc b/call/fake_network_pipe.cc index 8844700e67..324a7bd793 100644 --- a/call/fake_network_pipe.cc +++ b/call/fake_network_pipe.cc @@ -122,17 +122,17 @@ FakeNetworkPipe::~FakeNetworkPipe() { } void FakeNetworkPipe::SetReceiver(PacketReceiver* receiver) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); receiver_ = receiver; } void FakeNetworkPipe::AddActiveTransport(Transport* transport) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); active_transports_[transport]++; } void FakeNetworkPipe::RemoveActiveTransport(Transport* transport) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); auto it = active_transports_.find(transport); RTC_CHECK(it != active_transports_.end()); if (--(it->second) == 0) { @@ -186,7 +186,7 @@ PacketReceiver::DeliveryStatus FakeNetworkPipe::DeliverPacket( } void FakeNetworkPipe::SetClockOffset(int64_t offset_ms) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); clock_offset_ms_ = offset_ms; } @@ -198,7 +198,7 @@ bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet, bool is_rtcp, MediaType media_type, absl::optional packet_time_us) { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); int64_t time_now_us = clock_->TimeInMicroseconds(); return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us, time_now_us, options, is_rtcp, media_type, @@ -209,7 +209,7 @@ bool FakeNetworkPipe::EnqueuePacket(rtc::CopyOnWriteBuffer packet, absl::optional options, bool is_rtcp, Transport* transport) { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); int64_t time_now_us = clock_->TimeInMicroseconds(); return EnqueuePacket(NetworkPacket(std::move(packet), time_now_us, time_now_us, options, is_rtcp, @@ -233,7 +233,7 @@ bool FakeNetworkPipe::EnqueuePacket(NetworkPacket&& net_packet) { } float FakeNetworkPipe::PercentageLoss() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); if (sent_packets_ == 0) return 0; @@ -242,7 +242,7 @@ float FakeNetworkPipe::PercentageLoss() { } int FakeNetworkPipe::AverageDelay() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); if (sent_packets_ == 0) return 0; @@ -251,12 +251,12 @@ int FakeNetworkPipe::AverageDelay() { } size_t FakeNetworkPipe::DroppedPackets() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); return dropped_packets_; } size_t FakeNetworkPipe::SentPackets() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); return sent_packets_; } @@ -264,7 +264,7 @@ void FakeNetworkPipe::Process() { int64_t time_now_us; std::queue packets_to_deliver; { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); time_now_us = clock_->TimeInMicroseconds(); if (time_now_us - last_log_time_us_ > kLogIntervalMs * 1000) { int64_t queueing_delay_us = 0; @@ -318,7 +318,7 @@ void FakeNetworkPipe::Process() { } } - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); while (!packets_to_deliver.empty()) { NetworkPacket packet = std::move(packets_to_deliver.front()); packets_to_deliver.pop(); @@ -354,7 +354,7 @@ void FakeNetworkPipe::DeliverNetworkPacket(NetworkPacket* packet) { } absl::optional FakeNetworkPipe::TimeUntilNextProcess() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); absl::optional delivery_us = network_behavior_->NextDeliveryTimeUs(); if (delivery_us) { int64_t delay_us = *delivery_us - clock_->TimeInMicroseconds(); @@ -364,17 +364,17 @@ absl::optional FakeNetworkPipe::TimeUntilNextProcess() { } bool FakeNetworkPipe::HasReceiver() const { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); return receiver_ != nullptr; } void FakeNetworkPipe::DeliverPacketWithLock(NetworkPacket* packet) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); DeliverNetworkPacket(packet); } void FakeNetworkPipe::ResetStats() { - rtc::CritScope crit(&process_lock_); + MutexLock lock(&process_lock_); dropped_packets_ = 0; sent_packets_ = 0; total_packet_delay_us_ = 0; diff --git a/call/fake_network_pipe.h b/call/fake_network_pipe.h index 24340a2f29..1e5bb513bf 100644 --- a/call/fake_network_pipe.h +++ b/call/fake_network_pipe.h @@ -24,7 +24,7 @@ #include "call/call.h" #include "call/simulated_packet_receiver.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" namespace webrtc { @@ -204,14 +204,14 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { Clock* const clock_; // |config_lock| guards the mostly constant things like the callbacks. - rtc::CriticalSection config_lock_; + mutable Mutex config_lock_; const std::unique_ptr network_behavior_; PacketReceiver* receiver_ RTC_GUARDED_BY(config_lock_); Transport* const global_transport_; // |process_lock| guards the data structures involved in delay and loss // processes, such as the packet queues. - rtc::CriticalSection process_lock_; + Mutex process_lock_; // Packets are added at the back of the deque, this makes the deque ordered // by increasing send time. The common case when removing packets from the // deque is removing early packets, which will be close to the front of the diff --git a/call/fake_network_pipe_unittest.cc b/call/fake_network_pipe_unittest.cc index 9c4a3bf755..852a427222 100644 --- a/call/fake_network_pipe_unittest.cc +++ b/call/fake_network_pipe_unittest.cc @@ -24,8 +24,10 @@ namespace webrtc { class MockReceiver : public PacketReceiver { public: - MOCK_METHOD3(DeliverPacket, - DeliveryStatus(MediaType, rtc::CopyOnWriteBuffer, int64_t)); + MOCK_METHOD(DeliveryStatus, + DeliverPacket, + (MediaType, rtc::CopyOnWriteBuffer, int64_t), + (override)); virtual ~MockReceiver() = default; }; diff --git a/call/flexfec_receive_stream_impl.cc b/call/flexfec_receive_stream_impl.cc index 40005efe83..e629bca347 100644 --- a/call/flexfec_receive_stream_impl.cc +++ b/call/flexfec_receive_stream_impl.cc @@ -22,7 +22,6 @@ #include "call/rtp_stream_receiver_controller_interface.h" #include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/include/receive_statistics.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" @@ -119,12 +118,12 @@ std::unique_ptr MaybeCreateFlexfecReceiver( recovered_packet_receiver)); } -std::unique_ptr CreateRtpRtcpModule( +std::unique_ptr CreateRtpRtcpModule( Clock* clock, ReceiveStatistics* receive_statistics, const FlexfecReceiveStreamImpl::Config& config, RtcpRttStats* rtt_stats) { - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.audio = false; configuration.receiver_only = true; configuration.clock = clock; @@ -132,7 +131,7 @@ std::unique_ptr CreateRtpRtcpModule( configuration.outgoing_transport = config.rtcp_send_transport; configuration.rtt_stats = rtt_stats; configuration.local_media_ssrc = config.local_ssrc; - return RtpRtcp::Create(configuration); + return ModuleRtpRtcpImpl2::Create(configuration); } } // namespace diff --git a/call/flexfec_receive_stream_impl.h b/call/flexfec_receive_stream_impl.h index d4fdc7431a..888dae9ebd 100644 --- a/call/flexfec_receive_stream_impl.h +++ b/call/flexfec_receive_stream_impl.h @@ -15,6 +15,7 @@ #include "call/flexfec_receive_stream.h" #include "call/rtp_packet_sink_interface.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "system_wrappers/include/clock.h" namespace webrtc { @@ -55,7 +56,7 @@ class FlexfecReceiveStreamImpl : public FlexfecReceiveStream { // RTCP reporting. const std::unique_ptr rtp_receive_statistics_; - const std::unique_ptr rtp_rtcp_; + const std::unique_ptr rtp_rtcp_; ProcessThread* process_thread_; std::unique_ptr rtp_stream_receiver_; diff --git a/call/rampup_tests.cc b/call/rampup_tests.cc index 64eab050cb..89fbe3dde7 100644 --- a/call/rampup_tests.cc +++ b/call/rampup_tests.cc @@ -362,14 +362,14 @@ void RampUpTester::AccumulateStats(const VideoSendStream::StreamStats& stream, void RampUpTester::TriggerTestDone() { RTC_DCHECK_GE(test_start_ms_, 0); - // TODO(holmer): Add audio send stats here too when those APIs are available. - if (!send_stream_) - return; - // Stop polling stats. // Corner case for field_trials=WebRTC-QuickPerfTest/Enabled/ SendTask(RTC_FROM_HERE, task_queue_, [this] { pending_task_.Stop(); }); + // TODO(holmer): Add audio send stats here too when those APIs are available. + if (!send_stream_) + return; + VideoSendStream::Stats send_stats = send_stream_->GetStats(); send_stream_ = nullptr; // To avoid dereferencing a bad pointer. diff --git a/call/rtcp_demuxer.cc b/call/rtcp_demuxer.cc deleted file mode 100644 index 738109fa43..0000000000 --- a/call/rtcp_demuxer.cc +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtcp_demuxer.h" - -#include - -#include -#include - -#include "absl/types/optional.h" -#include "api/rtp_headers.h" -#include "call/rtcp_packet_sink_interface.h" -#include "call/rtp_rtcp_demuxer_helper.h" -#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/checks.h" - -namespace webrtc { - -RtcpDemuxer::RtcpDemuxer() = default; - -RtcpDemuxer::~RtcpDemuxer() { - RTC_DCHECK(ssrc_sinks_.empty()); - RTC_DCHECK(rsid_sinks_.empty()); - RTC_DCHECK(broadcast_sinks_.empty()); -} - -void RtcpDemuxer::AddSink(uint32_t sender_ssrc, RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink)); - RTC_DCHECK(!MultimapAssociationExists(ssrc_sinks_, sender_ssrc, sink)); - ssrc_sinks_.emplace(sender_ssrc, sink); -} - -void RtcpDemuxer::AddSink(const std::string& rsid, - RtcpPacketSinkInterface* sink) { - RTC_DCHECK(IsLegalRsidName(rsid)); - RTC_DCHECK(sink); - RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink)); - RTC_DCHECK(!MultimapAssociationExists(rsid_sinks_, rsid, sink)); - rsid_sinks_.emplace(rsid, sink); -} - -void RtcpDemuxer::AddBroadcastSink(RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - RTC_DCHECK(!MultimapHasValue(ssrc_sinks_, sink)); - RTC_DCHECK(!MultimapHasValue(rsid_sinks_, sink)); - RTC_DCHECK(!ContainerHasKey(broadcast_sinks_, sink)); - broadcast_sinks_.push_back(sink); -} - -void RtcpDemuxer::RemoveSink(const RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - size_t removal_count = RemoveFromMultimapByValue(&ssrc_sinks_, sink) + - RemoveFromMultimapByValue(&rsid_sinks_, sink); - RTC_DCHECK_GT(removal_count, 0); -} - -void RtcpDemuxer::RemoveBroadcastSink(const RtcpPacketSinkInterface* sink) { - RTC_DCHECK(sink); - auto it = std::find(broadcast_sinks_.begin(), broadcast_sinks_.end(), sink); - RTC_DCHECK(it != broadcast_sinks_.end()); - broadcast_sinks_.erase(it); -} - -void RtcpDemuxer::OnRtcpPacket(rtc::ArrayView packet) { - // Perform sender-SSRC-based demuxing for packets with a sender-SSRC. - absl::optional sender_ssrc = ParseRtcpPacketSenderSsrc(packet); - if (sender_ssrc) { - auto it_range = ssrc_sinks_.equal_range(*sender_ssrc); - for (auto it = it_range.first; it != it_range.second; ++it) { - it->second->OnRtcpPacket(packet); - } - } - - // All packets, even those without a sender-SSRC, are broadcast to sinks - // which listen to broadcasts. - for (RtcpPacketSinkInterface* sink : broadcast_sinks_) { - sink->OnRtcpPacket(packet); - } -} - -void RtcpDemuxer::OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) { - // Record the new SSRC association for all of the sinks that were associated - // with the RSID. - auto it_range = rsid_sinks_.equal_range(rsid); - for (auto it = it_range.first; it != it_range.second; ++it) { - RtcpPacketSinkInterface* sink = it->second; - // Watch out for pre-existing SSRC-based associations. - if (!MultimapAssociationExists(ssrc_sinks_, ssrc, sink)) { - AddSink(ssrc, sink); - } - } - - // RSIDs are uniquely associated with SSRCs; no need to keep in memory - // the RSID-to-sink association of resolved RSIDs. - rsid_sinks_.erase(it_range.first, it_range.second); -} - -} // namespace webrtc diff --git a/call/rtcp_demuxer.h b/call/rtcp_demuxer.h deleted file mode 100644 index 494e0cea4b..0000000000 --- a/call/rtcp_demuxer.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_RTCP_DEMUXER_H_ -#define CALL_RTCP_DEMUXER_H_ - -#include -#include -#include - -#include "api/array_view.h" -#include "call/ssrc_binding_observer.h" - -namespace webrtc { - -class RtcpPacketSinkInterface; - -// This class represents the RTCP demuxing, for a single RTP session (i.e., one -// SSRC space, see RFC 7656). It isn't thread aware, leaving responsibility of -// multithreading issues to the user of this class. -class RtcpDemuxer : public SsrcBindingObserver { - public: - RtcpDemuxer(); - ~RtcpDemuxer() override; - - // Registers a sink. The sink will be notified of incoming RTCP packets with - // that sender-SSRC. The same sink can be registered for multiple SSRCs, and - // the same SSRC can have multiple sinks. Null pointer is not allowed. - // Sinks may be associated with both an SSRC and an RSID. - // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both. - void AddSink(uint32_t sender_ssrc, RtcpPacketSinkInterface* sink); - - // Registers a sink. Once the RSID is resolved to an SSRC, the sink will be - // notified of all RTCP packets with that sender-SSRC. - // The same sink can be registered for multiple RSIDs, and - // the same RSID can have multiple sinks. Null pointer is not allowed. - // Sinks may be associated with both an SSRC and an RSID. - // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both. - void AddSink(const std::string& rsid, RtcpPacketSinkInterface* sink); - - // Registers a sink. The sink will be notified of any incoming RTCP packet. - // Null pointer is not allowed. - // Sinks may be registered as SSRC/RSID-specific or broadcast, but not both. - void AddBroadcastSink(RtcpPacketSinkInterface* sink); - - // Undo previous AddSink() calls with the given sink. - void RemoveSink(const RtcpPacketSinkInterface* sink); - - // Undo AddBroadcastSink(). - void RemoveBroadcastSink(const RtcpPacketSinkInterface* sink); - - // Process a new RTCP packet and forward it to the appropriate sinks. - void OnRtcpPacket(rtc::ArrayView packet); - - // Implement SsrcBindingObserver - become notified whenever RSIDs resolve to - // an SSRC. - void OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) override; - - // TODO(eladalon): Add the ability to resolve RSIDs and inform observers, - // like in the RtpDemuxer case, once the relevant standard is finalized. - - private: - // Records the association SSRCs to sinks. - std::multimap ssrc_sinks_; - - // Records the association RSIDs to sinks. - std::multimap rsid_sinks_; - - // Sinks which will receive notifications of all incoming RTCP packets. - // Additional/removal of sinks is expected to be significantly less frequent - // than RTCP message reception; container chosen for iteration performance. - std::vector broadcast_sinks_; -}; - -} // namespace webrtc - -#endif // CALL_RTCP_DEMUXER_H_ diff --git a/call/rtcp_demuxer_unittest.cc b/call/rtcp_demuxer_unittest.cc deleted file mode 100644 index 23c305c900..0000000000 --- a/call/rtcp_demuxer_unittest.cc +++ /dev/null @@ -1,505 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtcp_demuxer.h" - -#include - -#include - -#include "api/rtp_headers.h" -#include "call/rtcp_packet_sink_interface.h" -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtp_header_extensions.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/buffer.h" -#include "rtc_base/checks.h" -#include "test/gmock.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { - -using ::testing::_; -using ::testing::AtLeast; -using ::testing::ElementsAreArray; -using ::testing::InSequence; -using ::testing::Matcher; -using ::testing::NiceMock; - -class MockRtcpPacketSink : public RtcpPacketSinkInterface { - public: - MOCK_METHOD1(OnRtcpPacket, void(rtc::ArrayView)); -}; - -class RtcpDemuxerTest : public ::testing::Test { - protected: - ~RtcpDemuxerTest() { - for (auto* sink : sinks_to_tear_down_) { - demuxer_.RemoveSink(sink); - } - for (auto* sink : broadcast_sinks_to_tear_down_) { - demuxer_.RemoveBroadcastSink(sink); - } - } - - void AddSsrcSink(uint32_t ssrc, RtcpPacketSinkInterface* sink) { - demuxer_.AddSink(ssrc, sink); - sinks_to_tear_down_.insert(sink); - } - - void AddRsidSink(const std::string& rsid, RtcpPacketSinkInterface* sink) { - demuxer_.AddSink(rsid, sink); - sinks_to_tear_down_.insert(sink); - } - - void RemoveSink(RtcpPacketSinkInterface* sink) { - sinks_to_tear_down_.erase(sink); - demuxer_.RemoveSink(sink); - } - - void AddBroadcastSink(RtcpPacketSinkInterface* sink) { - demuxer_.AddBroadcastSink(sink); - broadcast_sinks_to_tear_down_.insert(sink); - } - - void RemoveBroadcastSink(RtcpPacketSinkInterface* sink) { - broadcast_sinks_to_tear_down_.erase(sink); - demuxer_.RemoveBroadcastSink(sink); - } - - RtcpDemuxer demuxer_; - std::set sinks_to_tear_down_; - std::set broadcast_sinks_to_tear_down_; -}; - -// Produces a packet buffer representing an RTCP packet with a given SSRC, -// as it would look when sent over the wire. -// |distinguishing_string| allows different RTCP packets with the same SSRC -// to be distinguished. How this is set into the actual packet is -// unimportant, and depends on which RTCP message we choose to use. -rtc::Buffer CreateRtcpPacket(uint32_t ssrc, - const std::string& distinguishing_string = "") { - rtcp::Bye packet; - packet.SetSenderSsrc(ssrc); - if (distinguishing_string != "") { - // Actual way we use |distinguishing_string| is unimportant, so long - // as it ends up in the packet. - packet.SetReason(distinguishing_string); - } - return packet.Build(); -} - -static Matcher> SamePacketAs( - const rtc::Buffer& other) { - return ElementsAreArray(other.cbegin(), other.cend()); -} - -} // namespace - -TEST_F(RtcpDemuxerTest, OnRtcpPacketCalledOnCorrectSinkBySsrc) { - constexpr uint32_t ssrcs[] = {101, 202, 303}; - MockRtcpPacketSink sinks[arraysize(ssrcs)]; - for (size_t i = 0; i < arraysize(ssrcs); i++) { - AddSsrcSink(ssrcs[i], &sinks[i]); - } - - for (size_t i = 0; i < arraysize(ssrcs); i++) { - auto packet = CreateRtcpPacket(ssrcs[i]); - EXPECT_CALL(sinks[i], OnRtcpPacket(SamePacketAs(packet))).Times(1); - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, OnRtcpPacketCalledOnResolvedRsidSink) { - // Set up some RSID sinks. - const std::string rsids[] = {"a", "b", "c"}; - MockRtcpPacketSink sinks[arraysize(rsids)]; - for (size_t i = 0; i < arraysize(rsids); i++) { - AddRsidSink(rsids[i], &sinks[i]); - } - - // Only resolve one of the sinks. - constexpr size_t resolved_sink_index = 0; - constexpr uint32_t ssrc = 345; - demuxer_.OnSsrcBoundToRsid(rsids[resolved_sink_index], ssrc); - - // The resolved sink gets notifications of RTCP messages with its SSRC. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sinks[resolved_sink_index], OnRtcpPacket(SamePacketAs(packet))) - .Times(1); - - // RTCP received; expected calls triggered. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, - SingleCallbackAfterResolutionOfAnRsidToAlreadyRegisteredSsrc) { - // Associate a sink with an SSRC. - MockRtcpPacketSink sink; - constexpr uint32_t ssrc = 999; - AddSsrcSink(ssrc, &sink); - - // Associate the same sink with an RSID. - const std::string rsid = "r"; - AddRsidSink(rsid, &sink); - - // Resolve the RSID to the aforementioned SSRC. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc); - - // OnRtcpPacket still called only a single time for messages with this SSRC. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, - OnRtcpPacketCalledOnAllBroadcastSinksForAllRtcpPackets) { - MockRtcpPacketSink sinks[3]; - for (MockRtcpPacketSink& sink : sinks) { - AddBroadcastSink(&sink); - } - - constexpr uint32_t ssrc = 747; - auto packet = CreateRtcpPacket(ssrc); - - for (MockRtcpPacketSink& sink : sinks) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - // RTCP received; expected calls triggered. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, PacketsDeliveredInRightOrderToNonBroadcastSink) { - constexpr uint32_t ssrc = 101; - MockRtcpPacketSink sink; - AddSsrcSink(ssrc, &sink); - - std::vector packets; - for (size_t i = 0; i < 5; i++) { - packets.push_back(CreateRtcpPacket(ssrc, std::to_string(i))); - } - - InSequence sequence; - for (const auto& packet : packets) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - for (const auto& packet : packets) { - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, PacketsDeliveredInRightOrderToBroadcastSink) { - MockRtcpPacketSink sink; - AddBroadcastSink(&sink); - - std::vector packets; - for (size_t i = 0; i < 5; i++) { - constexpr uint32_t ssrc = 101; - packets.push_back(CreateRtcpPacket(ssrc, std::to_string(i))); - } - - InSequence sequence; - for (const auto& packet : packets) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - for (const auto& packet : packets) { - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, MultipleSinksMappedToSameSsrc) { - MockRtcpPacketSink sinks[3]; - constexpr uint32_t ssrc = 404; - for (auto& sink : sinks) { - AddSsrcSink(ssrc, &sink); - } - - // Reception of an RTCP packet associated with the shared SSRC triggers the - // callback on all of the sinks associated with it. - auto packet = CreateRtcpPacket(ssrc); - for (auto& sink : sinks) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))); - } - - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, SinkMappedToMultipleSsrcs) { - constexpr uint32_t ssrcs[] = {404, 505, 606}; - MockRtcpPacketSink sink; - for (uint32_t ssrc : ssrcs) { - AddSsrcSink(ssrc, &sink); - } - - // The sink which is associated with multiple SSRCs gets the callback - // triggered for each of those SSRCs. - for (uint32_t ssrc : ssrcs) { - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))); - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, MultipleRsidsOnSameSink) { - // Sink associated with multiple sinks. - MockRtcpPacketSink sink; - const std::string rsids[] = {"a", "b", "c"}; - for (const auto& rsid : rsids) { - AddRsidSink(rsid, &sink); - } - - // RSIDs resolved to SSRCs. - uint32_t ssrcs[arraysize(rsids)]; - for (size_t i = 0; i < arraysize(rsids); i++) { - ssrcs[i] = 1000 + static_cast(i); - demuxer_.OnSsrcBoundToRsid(rsids[i], ssrcs[i]); - } - - // Set up packets to match those RSIDs/SSRCs. - std::vector packets; - for (size_t i = 0; i < arraysize(rsids); i++) { - packets.push_back(CreateRtcpPacket(ssrcs[i])); - } - - // The sink expects to receive all of the packets. - for (const auto& packet : packets) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - // Packet demuxed correctly; OnRtcpPacket() triggered on sink. - for (const auto& packet : packets) { - demuxer_.OnRtcpPacket(packet); - } -} - -TEST_F(RtcpDemuxerTest, RsidUsedByMultipleSinks) { - MockRtcpPacketSink sinks[3]; - const std::string shared_rsid = "a"; - - for (MockRtcpPacketSink& sink : sinks) { - AddRsidSink(shared_rsid, &sink); - } - - constexpr uint32_t shared_ssrc = 888; - demuxer_.OnSsrcBoundToRsid(shared_rsid, shared_ssrc); - - auto packet = CreateRtcpPacket(shared_ssrc); - - for (MockRtcpPacketSink& sink : sinks) { - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet))).Times(1); - } - - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnSsrcSinkRemovedBeforeFirstPacket) { - constexpr uint32_t ssrc = 404; - MockRtcpPacketSink sink; - AddSsrcSink(ssrc, &sink); - - RemoveSink(&sink); - - // The removed sink does not get callbacks. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnSsrcSinkRemovedAfterFirstPacket) { - constexpr uint32_t ssrc = 404; - NiceMock sink; - AddSsrcSink(ssrc, &sink); - - auto before_packet = CreateRtcpPacket(ssrc); - demuxer_.OnRtcpPacket(before_packet); - - RemoveSink(&sink); - - // The removed sink does not get callbacks. - auto after_packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(after_packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnRsidSinkRemovedBeforeRsidResolution) { - const std::string rsid = "a"; - constexpr uint32_t ssrc = 404; - MockRtcpPacketSink sink; - AddRsidSink(rsid, &sink); - - // Removal before resolution. - RemoveSink(&sink); - demuxer_.OnSsrcBoundToRsid(rsid, ssrc); - - // The removed sink does not get callbacks. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnRsidSinkRemovedAfterRsidResolution) { - const std::string rsid = "a"; - constexpr uint32_t ssrc = 404; - MockRtcpPacketSink sink; - AddRsidSink(rsid, &sink); - - // Removal after resolution. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc); - RemoveSink(&sink); - - // The removed sink does not get callbacks. - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnBroadcastSinkRemovedBeforeFirstPacket) { - MockRtcpPacketSink sink; - AddBroadcastSink(&sink); - - RemoveBroadcastSink(&sink); - - // The removed sink does not get callbacks. - constexpr uint32_t ssrc = 404; - auto packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(packet); -} - -TEST_F(RtcpDemuxerTest, NoCallbackOnBroadcastSinkRemovedAfterFirstPacket) { - NiceMock sink; - AddBroadcastSink(&sink); - - constexpr uint32_t ssrc = 404; - auto before_packet = CreateRtcpPacket(ssrc); - demuxer_.OnRtcpPacket(before_packet); - - RemoveBroadcastSink(&sink); - - // The removed sink does not get callbacks. - auto after_packet = CreateRtcpPacket(ssrc); - EXPECT_CALL(sink, OnRtcpPacket(_)).Times(0); // Not called. - demuxer_.OnRtcpPacket(after_packet); -} - -// The RSID to SSRC mapping should be one-to-one. If we end up receiving -// two (or more) packets with the same SSRC, but different RSIDs, we guarantee -// remembering the first one; no guarantees are made about further associations. -TEST_F(RtcpDemuxerTest, FirstResolutionOfRsidNotForgotten) { - MockRtcpPacketSink sink; - const std::string rsid = "a"; - AddRsidSink(rsid, &sink); - - constexpr uint32_t ssrc_a = 111; // First resolution - guaranteed effective. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc_a); - - constexpr uint32_t ssrc_b = 222; // Second resolution - no guarantees. - demuxer_.OnSsrcBoundToRsid(rsid, ssrc_b); - - auto packet_a = CreateRtcpPacket(ssrc_a); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet_a))).Times(1); - demuxer_.OnRtcpPacket(packet_a); - - auto packet_b = CreateRtcpPacket(ssrc_b); - EXPECT_CALL(sink, OnRtcpPacket(SamePacketAs(packet_b))).Times(AtLeast(0)); - demuxer_.OnRtcpPacket(packet_b); -} - -#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) - -TEST_F(RtcpDemuxerTest, RepeatedSsrcToSinkAssociationsDisallowed) { - MockRtcpPacketSink sink; - - constexpr uint32_t ssrc = 101; - AddSsrcSink(ssrc, &sink); - EXPECT_DEATH(AddSsrcSink(ssrc, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RepeatedRsidToSinkAssociationsDisallowed) { - MockRtcpPacketSink sink; - - const std::string rsid = "z"; - AddRsidSink(rsid, &sink); - EXPECT_DEATH(AddRsidSink(rsid, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RepeatedBroadcastSinkRegistrationDisallowed) { - MockRtcpPacketSink sink; - - AddBroadcastSink(&sink); - EXPECT_DEATH(AddBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, SsrcSinkCannotAlsoBeRegisteredAsBroadcast) { - MockRtcpPacketSink sink; - - constexpr uint32_t ssrc = 101; - AddSsrcSink(ssrc, &sink); - EXPECT_DEATH(AddBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidSinkCannotAlsoBeRegisteredAsBroadcast) { - MockRtcpPacketSink sink; - - const std::string rsid = "z"; - AddRsidSink(rsid, &sink); - EXPECT_DEATH(AddBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsSsrcSink) { - MockRtcpPacketSink sink; - - AddBroadcastSink(&sink); - constexpr uint32_t ssrc = 101; - EXPECT_DEATH(AddSsrcSink(ssrc, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, BroadcastSinkCannotAlsoBeRegisteredAsRsidSink) { - MockRtcpPacketSink sink; - - AddBroadcastSink(&sink); - const std::string rsid = "j"; - EXPECT_DEATH(AddRsidSink(rsid, &sink), ""); -} - -TEST_F(RtcpDemuxerTest, MayNotCallRemoveSinkOnNeverAddedSink) { - MockRtcpPacketSink sink; - EXPECT_DEATH(RemoveSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, MayNotCallRemoveBroadcastSinkOnNeverAddedSink) { - MockRtcpPacketSink sink; - EXPECT_DEATH(RemoveBroadcastSink(&sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidMustBeNonEmpty) { - MockRtcpPacketSink sink; - EXPECT_DEATH(AddRsidSink("", &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidMustBeAlphaNumeric) { - MockRtcpPacketSink sink; - EXPECT_DEATH(AddRsidSink("a_3", &sink), ""); -} - -TEST_F(RtcpDemuxerTest, RsidMustNotExceedMaximumLength) { - MockRtcpPacketSink sink; - std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a'); - EXPECT_DEATH(AddRsidSink(rsid, &sink), ""); -} - -#endif - -} // namespace webrtc diff --git a/call/rtcp_packet_sink_interface.h b/call/rtcp_packet_sink_interface.h deleted file mode 100644 index 8ea3f7d21c..0000000000 --- a/call/rtcp_packet_sink_interface.h +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef CALL_RTCP_PACKET_SINK_INTERFACE_H_ -#define CALL_RTCP_PACKET_SINK_INTERFACE_H_ - -#include "api/array_view.h" - -namespace webrtc { - -// This class represents a receiver of unparsed RTCP packets. -// TODO(eladalon): Replace this by demuxing over parsed rather than raw data. -// Whether this should be over an entire RTCP packet, or over RTCP blocks, -// is still under discussion. -class RtcpPacketSinkInterface { - public: - virtual ~RtcpPacketSinkInterface() = default; - virtual void OnRtcpPacket(rtc::ArrayView packet) = 0; -}; - -} // namespace webrtc - -#endif // CALL_RTCP_PACKET_SINK_INTERFACE_H_ diff --git a/call/rtp_demuxer.cc b/call/rtp_demuxer.cc index 14725cf023..9fc4ba1c16 100644 --- a/call/rtp_demuxer.cc +++ b/call/rtp_demuxer.cc @@ -11,8 +11,6 @@ #include "call/rtp_demuxer.h" #include "call/rtp_packet_sink_interface.h" -#include "call/rtp_rtcp_demuxer_helper.h" -#include "call/ssrc_binding_observer.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" @@ -20,10 +18,60 @@ #include "rtc_base/strings/string_builder.h" namespace webrtc { +namespace { + +template +size_t RemoveFromMultimapByValue(Container* multimap, const Value& value) { + size_t count = 0; + for (auto it = multimap->begin(); it != multimap->end();) { + if (it->second == value) { + it = multimap->erase(it); + ++count; + } else { + ++it; + } + } + return count; +} + +template +size_t RemoveFromMapByValue(Map* map, const Value& value) { + size_t count = 0; + for (auto it = map->begin(); it != map->end();) { + if (it->second == value) { + it = map->erase(it); + ++count; + } else { + ++it; + } + } + return count; +} + +} // namespace RtpDemuxerCriteria::RtpDemuxerCriteria() = default; RtpDemuxerCriteria::~RtpDemuxerCriteria() = default; +std::string RtpDemuxerCriteria::ToString() const { + rtc::StringBuilder sb; + sb << "{mid: " << (mid.empty() ? "" : mid) + << ", rsid: " << (rsid.empty() ? "" : rsid) << ", ssrcs: ["; + + for (auto ssrc : ssrcs) { + sb << ssrc << ", "; + } + + sb << "], payload_types = ["; + + for (auto pt : payload_types) { + sb << pt << ", "; + } + + sb << "]}"; + return sb.Release(); +} + // static std::string RtpDemuxer::DescribePacket(const RtpPacketReceived& packet) { rtc::StringBuilder sb; @@ -51,7 +99,6 @@ RtpDemuxer::~RtpDemuxer() { RTC_DCHECK(sinks_by_pt_.empty()); RTC_DCHECK(sink_by_mid_and_rsid_.empty()); RTC_DCHECK(sink_by_rsid_.empty()); - RTC_DCHECK(ssrc_binding_observers_.empty()); } bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, @@ -66,6 +113,8 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, // criteria because new sinks are created according to user-specified SDP and // we do not want to crash due to a data validation error. if (CriteriaWouldConflict(criteria)) { + RTC_LOG(LS_ERROR) << "Unable to add sink = " << sink + << " due conflicting criteria " << criteria.ToString(); return false; } @@ -92,6 +141,9 @@ bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, RefreshKnownMids(); + RTC_LOG(LS_INFO) << "Added sink = " << sink << " for criteria " + << criteria.ToString(); + return true; } @@ -105,25 +157,40 @@ bool RtpDemuxer::CriteriaWouldConflict( // Adding this criteria would cause one of these rules to be shadowed, so // reject this new criteria. if (known_mids_.find(criteria.mid) != known_mids_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with known mid"; return true; } } else { // If the exact rule already exists, then reject this duplicate. - if (sink_by_mid_and_rsid_.find(std::make_pair( - criteria.mid, criteria.rsid)) != sink_by_mid_and_rsid_.end()) { + const auto sink_by_mid_and_rsid = sink_by_mid_and_rsid_.find( + std::make_pair(criteria.mid, criteria.rsid)); + if (sink_by_mid_and_rsid != sink_by_mid_and_rsid_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with existing sink = " + << sink_by_mid_and_rsid->second + << " by mid+rsid binding"; return true; } // If there is already a sink registered for the bare MID, then this // criteria will never receive any packets because they will just be // directed to that MID sink, so reject this new criteria. - if (sink_by_mid_.find(criteria.mid) != sink_by_mid_.end()) { + const auto sink_by_mid = sink_by_mid_.find(criteria.mid); + if (sink_by_mid != sink_by_mid_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with existing sink = " + << sink_by_mid->second << " by mid binding"; return true; } } } for (uint32_t ssrc : criteria.ssrcs) { - if (sink_by_ssrc_.find(ssrc) != sink_by_ssrc_.end()) { + const auto sink_by_ssrc = sink_by_ssrc_.find(ssrc); + if (sink_by_ssrc != sink_by_ssrc_.end()) { + RTC_LOG(LS_INFO) << criteria.ToString() + << " would conflict with existing sink = " + << sink_by_ssrc->second << " binding by SSRC=" << ssrc; return true; } } @@ -168,7 +235,11 @@ bool RtpDemuxer::RemoveSink(const RtpPacketSinkInterface* sink) { RemoveFromMapByValue(&sink_by_mid_and_rsid_, sink) + RemoveFromMapByValue(&sink_by_rsid_, sink); RefreshKnownMids(); - return num_removed > 0; + bool removed = num_removed > 0; + if (removed) { + RTC_LOG(LS_INFO) << "Removed sink = " << sink << " bindings"; + } + return removed; } bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) { @@ -284,12 +355,7 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMid(const std::string& mid, const auto it = sink_by_mid_.find(mid); if (it != sink_by_mid_.end()) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToMid(mid, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } return nullptr; @@ -302,39 +368,22 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByMidRsid( const auto it = sink_by_mid_and_rsid_.find(std::make_pair(mid, rsid)); if (it != sink_by_mid_and_rsid_.end()) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToMidRsid(mid, rsid, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } return nullptr; } -void RtpDemuxer::RegisterRsidResolutionObserver(SsrcBindingObserver* observer) { - RegisterSsrcBindingObserver(observer); -} RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByRsid(const std::string& rsid, uint32_t ssrc) { const auto it = sink_by_rsid_.find(rsid); if (it != sink_by_rsid_.end()) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToRsid(rsid, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } return nullptr; } -void RtpDemuxer::DeregisterRsidResolutionObserver( - const SsrcBindingObserver* observer) { - DeregisterSsrcBindingObserver(observer); -} RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByPayloadType( uint8_t payload_type, @@ -345,54 +394,33 @@ RtpPacketSinkInterface* RtpDemuxer::ResolveSinkByPayloadType( const auto end = range.second; if (std::next(it) == end) { RtpPacketSinkInterface* sink = it->second; - bool notify = AddSsrcSinkBinding(ssrc, sink); - if (notify) { - for (auto* observer : ssrc_binding_observers_) { - observer->OnSsrcBoundToPayloadType(payload_type, ssrc); - } - } + AddSsrcSinkBinding(ssrc, sink); return sink; } } return nullptr; } -bool RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc, +void RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink) { if (sink_by_ssrc_.size() >= kMaxSsrcBindings) { RTC_LOG(LS_WARNING) << "New SSRC=" << ssrc << " sink binding ignored; limit of" << kMaxSsrcBindings << " bindings has been reached."; - return false; + return; } auto result = sink_by_ssrc_.emplace(ssrc, sink); auto it = result.first; bool inserted = result.second; if (inserted) { - return true; - } - if (it->second != sink) { + RTC_LOG(LS_INFO) << "Added sink = " << sink + << " binding with SSRC=" << ssrc; + } else if (it->second != sink) { + RTC_LOG(LS_INFO) << "Updated sink = " << sink + << " binding with SSRC=" << ssrc; it->second = sink; - return true; } - return false; -} - -void RtpDemuxer::RegisterSsrcBindingObserver(SsrcBindingObserver* observer) { - RTC_DCHECK(observer); - RTC_DCHECK(!ContainerHasKey(ssrc_binding_observers_, observer)); - - ssrc_binding_observers_.push_back(observer); -} - -void RtpDemuxer::DeregisterSsrcBindingObserver( - const SsrcBindingObserver* observer) { - RTC_DCHECK(observer); - auto it = std::find(ssrc_binding_observers_.begin(), - ssrc_binding_observers_.end(), observer); - RTC_DCHECK(it != ssrc_binding_observers_.end()); - ssrc_binding_observers_.erase(it); } } // namespace webrtc diff --git a/call/rtp_demuxer.h b/call/rtp_demuxer.h index c815c47f72..3aa7e9df26 100644 --- a/call/rtp_demuxer.h +++ b/call/rtp_demuxer.h @@ -21,7 +21,6 @@ namespace webrtc { class RtpPacketReceived; class RtpPacketSinkInterface; -class SsrcBindingObserver; // This struct describes the criteria that will be used to match packets to a // specific sink. @@ -44,6 +43,9 @@ struct RtpDemuxerCriteria { // Will match packets with any of these payload types. std::set payload_types; + + // Return string representation of demux criteria to facilitate logging + std::string ToString() const; }; // This class represents the RTP demuxing, for a single RTP session (i.e., one @@ -130,17 +132,6 @@ class RtpDemuxer { // if the packet was forwarded and false if the packet was dropped. bool OnRtpPacket(const RtpPacketReceived& packet); - // The Observer will be notified when an attribute (e.g., RSID, MID, etc.) is - // bound to an SSRC. - void RegisterSsrcBindingObserver(SsrcBindingObserver* observer); - // Deprecated: Use the above method. - void RegisterRsidResolutionObserver(SsrcBindingObserver* observer); - - // Undo a previous RegisterSsrcBindingObserver(). - void DeregisterSsrcBindingObserver(const SsrcBindingObserver* observer); - // Deprecated: Use the above method. - void DeregisterRsidResolutionObserver(const SsrcBindingObserver* observer); - // Configure whether to look at the MID header extension when demuxing // incoming RTP packets. By default this is enabled. void set_use_mid(bool use_mid) { use_mid_ = use_mid; } @@ -197,14 +188,8 @@ class RtpDemuxer { std::map mid_by_ssrc_; std::map rsid_by_ssrc_; - // Adds a binding from the SSRC to the given sink. Returns true if there was - // not already a sink bound to the SSRC or if the sink replaced a different - // sink. Returns false if the binding was unchanged. - bool AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink); - - // Observers which will be notified when an RSID association to an SSRC is - // resolved by this object. - std::vector ssrc_binding_observers_; + // Adds a binding from the SSRC to the given sink. + void AddSsrcSinkBinding(uint32_t ssrc, RtpPacketSinkInterface* sink); bool use_mid_ = true; }; diff --git a/call/rtp_demuxer_unittest.cc b/call/rtp_demuxer_unittest.cc index 86b458a0cc..a4abab73ed 100644 --- a/call/rtp_demuxer_unittest.cc +++ b/call/rtp_demuxer_unittest.cc @@ -14,7 +14,6 @@ #include #include -#include "call/ssrc_binding_observer.h" #include "call/test/mock_rtp_packet_sink_interface.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -31,31 +30,15 @@ namespace { using ::testing::_; using ::testing::AtLeast; -using ::testing::AtMost; using ::testing::InSequence; using ::testing::NiceMock; -class MockSsrcBindingObserver : public SsrcBindingObserver { - public: - MOCK_METHOD2(OnSsrcBoundToRsid, void(const std::string& rsid, uint32_t ssrc)); - MOCK_METHOD2(OnSsrcBoundToMid, void(const std::string& mid, uint32_t ssrc)); - MOCK_METHOD3(OnSsrcBoundToMidRsid, - void(const std::string& mid, - const std::string& rsid, - uint32_t ssrc)); - MOCK_METHOD2(OnSsrcBoundToPayloadType, - void(uint8_t payload_type, uint32_t ssrc)); -}; - class RtpDemuxerTest : public ::testing::Test { protected: ~RtpDemuxerTest() { for (auto* sink : sinks_to_tear_down_) { demuxer_.RemoveSink(sink); } - for (auto* observer : observers_to_tear_down_) { - demuxer_.DeregisterSsrcBindingObserver(observer); - } } // These are convenience methods for calling demuxer.AddSink with different @@ -103,20 +86,6 @@ class RtpDemuxerTest : public ::testing::Test { return demuxer_.RemoveSink(sink); } - // These are convenience methods for calling - // demuxer.{Register|Unregister}SsrcBindingObserver such that observers are - // automatically removed when the test finishes. - - void RegisterSsrcBindingObserver(SsrcBindingObserver* observer) { - demuxer_.RegisterSsrcBindingObserver(observer); - observers_to_tear_down_.insert(observer); - } - - void DeregisterSsrcBindingObserver(SsrcBindingObserver* observer) { - demuxer_.DeregisterSsrcBindingObserver(observer); - observers_to_tear_down_.erase(observer); - } - // The CreatePacket* methods are helpers for creating new RTP packets with // various attributes set. Tests should use the helper that provides the // minimum information needed to exercise the behavior under test. Tests also @@ -206,10 +175,11 @@ class RtpDemuxerTest : public ::testing::Test { RtpDemuxer demuxer_; std::set sinks_to_tear_down_; - std::set observers_to_tear_down_; uint16_t next_sequence_number_ = 1; }; +class RtpDemuxerDeathTest : public RtpDemuxerTest {}; + MATCHER_P(SamePacketAs, other, "") { return arg.Ssrc() == other.Ssrc() && arg.SequenceNumber() == other.SequenceNumber(); @@ -746,73 +716,6 @@ TEST_F(RtpDemuxerTest, AssociatingByRsidAndBySsrcCannotTriggerDoubleCall) { EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); } -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToMid) { - const std::string mid = "v"; - constexpr uint32_t ssrc = 10; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrcMid(ssrc, mid); - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc)); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} - -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToRsid) { - const std::string rsid = "1"; - constexpr uint32_t ssrc = 111; - - // Only RSIDs which the demuxer knows may be resolved. - NiceMock sink; - AddSinkOnlyRsid(rsid, &sink); - - NiceMock rsid_resolution_observers[3]; - for (auto& observer : rsid_resolution_observers) { - RegisterSsrcBindingObserver(&observer); - EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - } - - // The expected calls to OnSsrcBoundToRsid() will be triggered by this. - auto packet = CreatePacketWithSsrcRsid(ssrc, rsid); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} - -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToMidRsid) { - const std::string mid = "v"; - const std::string rsid = "1"; - constexpr uint32_t ssrc = 10; - - NiceMock sink; - AddSinkBothMidRsid(mid, rsid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrcMidRsid(ssrc, mid, rsid); - EXPECT_CALL(observer, OnSsrcBoundToMidRsid(mid, rsid, ssrc)); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} - -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundToPayloadType) { - constexpr uint8_t payload_type = 3; - constexpr uint32_t ssrc = 10; - - RtpDemuxerCriteria criteria; - criteria.payload_types = {payload_type}; - NiceMock sink; - AddSink(criteria, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrc(ssrc); - packet->SetPayloadType(payload_type); - EXPECT_CALL(observer, OnSsrcBoundToPayloadType(payload_type, ssrc)); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); -} // If one sink is associated with SSRC x, and another sink with RSID y, then if // we receive a packet with both SSRC x and RSID y, route that to only the sink @@ -847,9 +750,6 @@ TEST_F(RtpDemuxerTest, NiceMock rsid_sink; AddSinkOnlyRsid(rsid, &rsid_sink); - NiceMock observer; - RegisterSsrcBindingObserver(&observer); - // The SSRC was mapped to an SSRC sink, but was even active (packets flowed // over it). auto packet = CreatePacketWithSsrcRsid(ssrc, rsid); @@ -860,7 +760,6 @@ TEST_F(RtpDemuxerTest, // is guaranteed. RemoveSink(&ssrc_sink); EXPECT_CALL(rsid_sink, OnRtpPacket(SamePacketAs(*packet))).Times(AtLeast(0)); - EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(AtLeast(0)); EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); } @@ -1355,169 +1254,36 @@ TEST_F(RtpDemuxerTest, PacketWithMidAndUnknownRsidIsNotRoutedByPayloadType) { EXPECT_FALSE(demuxer_.OnRtpPacket(*packet)); } -// Observers are only notified of an SSRC binding to an RSID if we care about -// the RSID (i.e., have a sink added for that RSID). -TEST_F(RtpDemuxerTest, ObserversNotNotifiedOfUntrackedRsids) { - const std::string rsid = "1"; - constexpr uint32_t ssrc = 111; - - MockSsrcBindingObserver rsid_resolution_observers[3]; - for (auto& observer : rsid_resolution_observers) { - RegisterSsrcBindingObserver(&observer); - EXPECT_CALL(observer, OnSsrcBoundToRsid(_, _)).Times(0); - } - - // Since no sink is registered for this SSRC/RSID, expect the packet to not be - // routed and no observers notified of the SSRC -> RSID binding. - EXPECT_FALSE(demuxer_.OnRtpPacket(*CreatePacketWithSsrcRsid(ssrc, rsid))); -} - -// Ensure that observers are notified of SSRC bindings only once per unique -// binding source (e.g., SSRC -> MID, SSRC -> RSID, etc.) -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundtoMidOnlyOnce) { - const std::string mid = "v"; - constexpr uint32_t ssrc = 10; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc)).Times(1); - - demuxer_.OnRtpPacket(*CreatePacketWithSsrcMid(ssrc, mid)); - demuxer_.OnRtpPacket(*CreatePacketWithSsrcMid(ssrc, mid)); -} - -// Ensure that when a new SSRC -> MID binding is discovered observers are also -// notified of that, even if there has already been an SSRC bound to the MID. -TEST_F(RtpDemuxerTest, ObserversNotifiedOfSsrcBoundtoMidWhenSsrcChanges) { - const std::string mid = "v"; - constexpr uint32_t ssrc1 = 10; - constexpr uint32_t ssrc2 = 11; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - InSequence seq; - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc1)).Times(1); - EXPECT_CALL(observer, OnSsrcBoundToMid(mid, ssrc2)).Times(1); - - auto p1 = CreatePacketWithSsrcMid(ssrc1, mid); - demuxer_.OnRtpPacket(*p1); - - auto p2 = CreatePacketWithSsrcMid(ssrc2, mid); - demuxer_.OnRtpPacket(*p2); -} - -TEST_F(RtpDemuxerTest, DeregisteredRsidObserversNotInformedOfResolutions) { - constexpr uint32_t ssrc = 111; - const std::string rsid = "a"; - NiceMock sink; - AddSinkOnlyRsid(rsid, &sink); - - // Register several, then deregister only one, to show that not all of the - // observers had been forgotten when one was removed. - MockSsrcBindingObserver observer_1; - MockSsrcBindingObserver observer_2_removed; - MockSsrcBindingObserver observer_3; - - RegisterSsrcBindingObserver(&observer_1); - RegisterSsrcBindingObserver(&observer_2_removed); - RegisterSsrcBindingObserver(&observer_3); - - DeregisterSsrcBindingObserver(&observer_2_removed); - - EXPECT_CALL(observer_1, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - EXPECT_CALL(observer_2_removed, OnSsrcBoundToRsid(_, _)).Times(0); - EXPECT_CALL(observer_3, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - - // The expected calls to OnSsrcBoundToRsid() will be triggered by this. - demuxer_.OnRtpPacket(*CreatePacketWithSsrcRsid(ssrc, rsid)); -} - -TEST_F(RtpDemuxerTest, - PacketFittingBothRsidSinkAndSsrcSinkTriggersResolutionCallbacks) { - constexpr uint32_t ssrc = 111; - NiceMock ssrc_sink; - AddSinkOnlySsrc(ssrc, &ssrc_sink); - - const std::string rsid = "a"; - NiceMock rsid_sink; - AddSinkOnlyRsid(rsid, &rsid_sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - auto packet = CreatePacketWithSsrcRsid(ssrc, rsid); - EXPECT_CALL(observer, OnSsrcBoundToRsid(rsid, ssrc)).Times(1); - demuxer_.OnRtpPacket(*packet); -} - -TEST_F(RtpDemuxerTest, MaliciousPeerCannotCauseMemoryOveruse) { - const std::string mid = "v"; - - NiceMock sink; - AddSinkOnlyMid(mid, &sink); - - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - - EXPECT_CALL(observer, OnSsrcBoundToMid(_, _)) - .Times(AtMost(RtpDemuxer::kMaxSsrcBindings)); - - for (int i = 0; i < RtpDemuxer::kMaxSsrcBindings + 1; i++) { - auto packet = CreatePacketWithSsrcMid(i, mid); - EXPECT_TRUE(demuxer_.OnRtpPacket(*packet)); - } -} - #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST_F(RtpDemuxerTest, CriteriaMustBeNonEmpty) { +TEST_F(RtpDemuxerDeathTest, CriteriaMustBeNonEmpty) { MockRtpPacketSink sink; RtpDemuxerCriteria criteria; EXPECT_DEATH(AddSink(criteria, &sink), ""); } -TEST_F(RtpDemuxerTest, RsidMustBeAlphaNumeric) { +TEST_F(RtpDemuxerDeathTest, RsidMustBeAlphaNumeric) { MockRtpPacketSink sink; EXPECT_DEATH(AddSinkOnlyRsid("a_3", &sink), ""); } -TEST_F(RtpDemuxerTest, MidMustBeToken) { +TEST_F(RtpDemuxerDeathTest, MidMustBeToken) { MockRtpPacketSink sink; EXPECT_DEATH(AddSinkOnlyMid("a(3)", &sink), ""); } -TEST_F(RtpDemuxerTest, RsidMustNotExceedMaximumLength) { +TEST_F(RtpDemuxerDeathTest, RsidMustNotExceedMaximumLength) { MockRtpPacketSink sink; std::string rsid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a'); EXPECT_DEATH(AddSinkOnlyRsid(rsid, &sink), ""); } -TEST_F(RtpDemuxerTest, MidMustNotExceedMaximumLength) { +TEST_F(RtpDemuxerDeathTest, MidMustNotExceedMaximumLength) { MockRtpPacketSink sink; std::string mid(BaseRtpStringExtension::kMaxValueSizeBytes + 1, 'a'); EXPECT_DEATH(AddSinkOnlyMid(mid, &sink), ""); } -TEST_F(RtpDemuxerTest, DoubleRegisterationOfSsrcBindingObserverDisallowed) { - MockSsrcBindingObserver observer; - RegisterSsrcBindingObserver(&observer); - EXPECT_DEATH(RegisterSsrcBindingObserver(&observer), ""); -} - -TEST_F(RtpDemuxerTest, - DregisterationOfNeverRegisteredSsrcBindingObserverDisallowed) { - MockSsrcBindingObserver observer; - EXPECT_DEATH(DeregisterSsrcBindingObserver(&observer), ""); -} - #endif } // namespace diff --git a/call/rtp_payload_params.cc b/call/rtp_payload_params.cc index 279eb588d7..ad979a590a 100644 --- a/call/rtp_payload_params.cc +++ b/call/rtp_payload_params.cc @@ -93,15 +93,6 @@ void PopulateRtpWithCodecSpecifics(const CodecSpecificInfo& info, h264_header.packetization_mode = info.codecSpecific.H264.packetization_mode; rtp->simulcastIdx = spatial_index.value_or(0); - rtp->frame_marking.temporal_id = kNoTemporalIdx; - if (info.codecSpecific.H264.temporal_idx != kNoTemporalIdx) { - rtp->frame_marking.temporal_id = info.codecSpecific.H264.temporal_idx; - rtp->frame_marking.layer_id = 0; - rtp->frame_marking.independent_frame = - info.codecSpecific.H264.idr_frame; - rtp->frame_marking.base_layer_sync = - info.codecSpecific.H264.base_layer_sync; - } return; } case kVideoCodecMultiplex: @@ -139,10 +130,7 @@ RtpPayloadParams::RtpPayloadParams(const uint32_t ssrc, : ssrc_(ssrc), generic_picture_id_experiment_( absl::StartsWith(trials.Lookup("WebRTC-GenericPictureId"), - "Enabled")), - generic_descriptor_experiment_( - !absl::StartsWith(trials.Lookup("WebRTC-GenericDescriptor"), - "Disabled")) { + "Enabled")) { for (auto& spatial_layer : last_shared_frame_id_) spatial_layer.fill(-1); @@ -186,9 +174,8 @@ RTPVideoHeader RtpPayloadParams::GetRtpVideoHeader( SetCodecSpecific(&rtp_video_header, first_frame_in_picture); - if (generic_descriptor_experiment_) - SetGeneric(codec_specific_info, shared_frame_id, is_keyframe, - &rtp_video_header); + SetGeneric(codec_specific_info, shared_frame_id, is_keyframe, + &rtp_video_header); return rtp_video_header; } @@ -237,14 +224,6 @@ void RtpPayloadParams::SetCodecSpecific(RTPVideoHeader* rtp_video_header, vp9_header.tl0_pic_idx = state_.tl0_pic_idx; } } - if (rtp_video_header->codec == kVideoCodecH264) { - if (rtp_video_header->frame_marking.temporal_id != kNoTemporalIdx) { - if (rtp_video_header->frame_marking.temporal_id == 0) { - ++state_.tl0_pic_idx; - } - rtp_video_header->frame_marking.tl0_pic_idx = state_.tl0_pic_idx; - } - } if (generic_picture_id_experiment_ && rtp_video_header->codec == kVideoCodecGeneric) { rtp_video_header->video_type_header.emplace() @@ -261,9 +240,12 @@ RtpPayloadParams::GenericDescriptorFromFrameInfo( generic.frame_id = frame_id; generic.dependencies = dependencies_calculator_.FromBuffersUsage( frame_type, frame_id, frame_info.encoder_buffers); + generic.chain_diffs = + chains_calculator_.From(frame_id, frame_info.part_of_chain); generic.spatial_index = frame_info.spatial_id; generic.temporal_index = frame_info.temporal_id; generic.decode_target_indications = frame_info.decode_target_indications; + generic.active_decode_targets = frame_info.active_decode_targets; return generic; } @@ -273,6 +255,11 @@ void RtpPayloadParams::SetGeneric(const CodecSpecificInfo* codec_specific_info, RTPVideoHeader* rtp_video_header) { if (codec_specific_info && codec_specific_info->generic_frame_info && !codec_specific_info->generic_frame_info->encoder_buffers.empty()) { + if (is_keyframe) { + // Key frame resets all chains it is in. + chains_calculator_.Reset( + codec_specific_info->generic_frame_info->part_of_chain); + } rtp_video_header->generic = GenericDescriptorFromFrameInfo(*codec_specific_info->generic_frame_info, frame_id, rtp_video_header->frame_type); diff --git a/call/rtp_payload_params.h b/call/rtp_payload_params.h index 13b1050378..2e0faeb5c9 100644 --- a/call/rtp_payload_params.h +++ b/call/rtp_payload_params.h @@ -19,6 +19,7 @@ #include "call/rtp_config.h" #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "modules/video_coding/chain_diff_calculator.h" #include "modules/video_coding/frame_dependencies_calculator.h" #include "modules/video_coding/include/video_codec_interface.h" @@ -88,6 +89,7 @@ class RtpPayloadParams final { RTPVideoHeader::GenericDescriptorInfo* generic); FrameDependenciesCalculator dependencies_calculator_; + ChainDiffCalculator chains_calculator_; // TODO(bugs.webrtc.org/10242): Remove once all encoder-wrappers are updated. // Holds the last shared frame id for a given (spatial, temporal) layer. std::array, @@ -112,7 +114,6 @@ class RtpPayloadParams final { RtpPayloadState state_; const bool generic_picture_id_experiment_; - const bool generic_descriptor_experiment_; }; } // namespace webrtc #endif // CALL_RTP_PAYLOAD_PARAMS_H_ diff --git a/call/rtp_payload_params_unittest.cc b/call/rtp_payload_params_unittest.cc index 1045504b44..a5510b0240 100644 --- a/call/rtp_payload_params_unittest.cc +++ b/call/rtp_payload_params_unittest.cc @@ -32,6 +32,7 @@ using ::testing::ElementsAre; using ::testing::IsEmpty; +using ::testing::SizeIs; namespace webrtc { namespace { @@ -147,54 +148,6 @@ TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_Vp9) { codec_info.codecSpecific.VP9.end_of_picture); } -TEST(RtpPayloadParamsTest, InfoMappedToRtpVideoHeader_H264) { - RtpPayloadState state; - state.picture_id = kPictureId; - state.tl0_pic_idx = kInitialTl0PicIdx1; - RtpPayloadParams params(kSsrc1, &state, FieldTrialBasedConfig()); - - EncodedImage encoded_image; - CodecSpecificInfo codec_info; - CodecSpecificInfoH264* h264info = &codec_info.codecSpecific.H264; - codec_info.codecType = kVideoCodecH264; - h264info->packetization_mode = H264PacketizationMode::SingleNalUnit; - h264info->temporal_idx = kNoTemporalIdx; - - RTPVideoHeader header = - params.GetRtpVideoHeader(encoded_image, &codec_info, 10); - - EXPECT_EQ(0, header.simulcastIdx); - EXPECT_EQ(kVideoCodecH264, header.codec); - const auto& h264 = absl::get(header.video_type_header); - EXPECT_EQ(H264PacketizationMode::SingleNalUnit, h264.packetization_mode); - - // test temporal param 1 - h264info->temporal_idx = 1; - h264info->base_layer_sync = true; - h264info->idr_frame = false; - - header = params.GetRtpVideoHeader(encoded_image, &codec_info, 20); - - EXPECT_EQ(kVideoCodecH264, header.codec); - EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1); - EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx); - EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync); - EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame); - - // test temporal param 2 - h264info->temporal_idx = 0; - h264info->base_layer_sync = false; - h264info->idr_frame = true; - - header = params.GetRtpVideoHeader(encoded_image, &codec_info, 30); - - EXPECT_EQ(kVideoCodecH264, header.codec); - EXPECT_EQ(header.frame_marking.tl0_pic_idx, kInitialTl0PicIdx1 + 1); - EXPECT_EQ(header.frame_marking.temporal_id, h264info->temporal_idx); - EXPECT_EQ(header.frame_marking.base_layer_sync, h264info->base_layer_sync); - EXPECT_EQ(header.frame_marking.independent_frame, h264info->idr_frame); -} - TEST(RtpPayloadParamsTest, PictureIdIsSetForVp8) { RtpPayloadState state; state.picture_id = kInitialPictureId1; @@ -349,8 +302,6 @@ TEST(RtpPayloadParamsTest, PictureIdForOldGenericFormat) { } TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) { - test::ScopedFieldTrials generic_picture_id( - "WebRTC-GenericDescriptor/Enabled/"); RtpPayloadState state{}; EncodedImage encoded_image; @@ -375,8 +326,6 @@ TEST(RtpPayloadParamsTest, GenericDescriptorForGenericCodec) { } TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) { - test::ScopedFieldTrials generic_picture_id( - "WebRTC-GenericDescriptor/Enabled/"); RtpPayloadState state; EncodedImage encoded_image; CodecSpecificInfo codec_info; @@ -388,6 +337,7 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) { GenericFrameInfo::Builder().S(1).T(0).Dtis("S").Build(); codec_info.generic_frame_info->encoder_buffers = { {/*id=*/0, /*referenced=*/false, /*updated=*/true}}; + codec_info.generic_frame_info->part_of_chain = {true, false}; RTPVideoHeader key_header = params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/1); @@ -398,12 +348,14 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) { EXPECT_THAT(key_header.generic->dependencies, IsEmpty()); EXPECT_THAT(key_header.generic->decode_target_indications, ElementsAre(DecodeTargetIndication::kSwitch)); + EXPECT_THAT(key_header.generic->chain_diffs, SizeIs(2)); encoded_image._frameType = VideoFrameType::kVideoFrameDelta; codec_info.generic_frame_info = GenericFrameInfo::Builder().S(2).T(3).Dtis("D").Build(); codec_info.generic_frame_info->encoder_buffers = { {/*id=*/0, /*referenced=*/true, /*updated=*/false}}; + codec_info.generic_frame_info->part_of_chain = {false, false}; RTPVideoHeader delta_header = params.GetRtpVideoHeader(encoded_image, &codec_info, /*frame_id=*/3); @@ -414,6 +366,7 @@ TEST(RtpPayloadParamsTest, SetsGenericFromGenericFrameInfo) { EXPECT_THAT(delta_header.generic->dependencies, ElementsAre(1)); EXPECT_THAT(delta_header.generic->decode_target_indications, ElementsAre(DecodeTargetIndication::kDiscardable)); + EXPECT_THAT(delta_header.generic->chain_diffs, SizeIs(2)); } class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test { @@ -421,9 +374,7 @@ class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test { enum LayerSync { kNoSync, kSync }; RtpPayloadParamsVp8ToGenericTest() - : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"), - state_(), - params_(123, &state_, trials_config_) {} + : state_(), params_(123, &state_, trials_config_) {} void ConvertAndCheck(int temporal_index, int64_t shared_frame_id, @@ -459,7 +410,6 @@ class RtpPayloadParamsVp8ToGenericTest : public ::testing::Test { } protected: - test::ScopedFieldTrials generic_descriptor_field_trial_; FieldTrialBasedConfig trials_config_; RtpPayloadState state_; RtpPayloadParams params_; @@ -518,9 +468,7 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test { enum LayerSync { kNoSync, kSync }; RtpPayloadParamsH264ToGenericTest() - : generic_descriptor_field_trial_("WebRTC-GenericDescriptor/Enabled/"), - state_(), - params_(123, &state_, trials_config_) {} + : state_(), params_(123, &state_, trials_config_) {} void ConvertAndCheck(int temporal_index, int64_t shared_frame_id, @@ -556,7 +504,6 @@ class RtpPayloadParamsH264ToGenericTest : public ::testing::Test { } protected: - test::ScopedFieldTrials generic_descriptor_field_trial_; FieldTrialBasedConfig trials_config_; RtpPayloadState state_; RtpPayloadParams params_; diff --git a/call/rtp_rtcp_demuxer_helper.cc b/call/rtp_rtcp_demuxer_helper.cc deleted file mode 100644 index 125169b077..0000000000 --- a/call/rtp_rtcp_demuxer_helper.cc +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtp_rtcp_demuxer_helper.h" - -#include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "modules/rtp_rtcp/source/rtcp_packet/psfb.h" -#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/rtpfb.h" -#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" - -namespace webrtc { - -absl::optional ParseRtcpPacketSenderSsrc( - rtc::ArrayView packet) { - rtcp::CommonHeader header; - for (const uint8_t* next_packet = packet.begin(); next_packet < packet.end(); - next_packet = header.NextPacket()) { - if (!header.Parse(next_packet, packet.end() - next_packet)) { - return absl::nullopt; - } - - switch (header.type()) { - case rtcp::Bye::kPacketType: - case rtcp::ExtendedReports::kPacketType: - case rtcp::Psfb::kPacketType: - case rtcp::ReceiverReport::kPacketType: - case rtcp::Rtpfb::kPacketType: - case rtcp::SenderReport::kPacketType: { - // Sender SSRC at the beginning of the RTCP payload. - if (header.payload_size_bytes() >= sizeof(uint32_t)) { - const uint32_t ssrc_sender = - ByteReader::ReadBigEndian(header.payload()); - return ssrc_sender; - } else { - return absl::nullopt; - } - } - } - } - - return absl::nullopt; -} - -} // namespace webrtc diff --git a/call/rtp_rtcp_demuxer_helper.h b/call/rtp_rtcp_demuxer_helper.h deleted file mode 100644 index 6134d56143..0000000000 --- a/call/rtp_rtcp_demuxer_helper.h +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef CALL_RTP_RTCP_DEMUXER_HELPER_H_ -#define CALL_RTP_RTCP_DEMUXER_HELPER_H_ - -#include -#include - -#include "absl/types/optional.h" -#include "api/array_view.h" - -namespace webrtc { - -// TODO(eladalon): Remove this in the next CL. -template -bool MultimapAssociationExists(const Container& multimap, - const typename Container::key_type& key, - const typename Container::mapped_type& val) { - auto it_range = multimap.equal_range(key); - using Reference = typename Container::const_reference; - return std::any_of(it_range.first, it_range.second, - [val](Reference elem) { return elem.second == val; }); -} - -template -size_t RemoveFromMultimapByValue(Container* multimap, const Value& value) { - size_t count = 0; - for (auto it = multimap->begin(); it != multimap->end();) { - if (it->second == value) { - it = multimap->erase(it); - ++count; - } else { - ++it; - } - } - return count; -} - -template -size_t RemoveFromMapByValue(Map* map, const Value& value) { - size_t count = 0; - for (auto it = map->begin(); it != map->end();) { - if (it->second == value) { - it = map->erase(it); - ++count; - } else { - ++it; - } - } - return count; -} - -template -bool ContainerHasKey(const Container& c, const Key& k) { - return std::find(c.cbegin(), c.cend(), k) != c.cend(); -} - -// TODO(eladalon): Remove this in the next CL. -template -bool MultimapHasValue(const Container& c, - const typename Container::mapped_type& v) { - auto predicate = [v](const typename Container::value_type& it) { - return it.second == v; - }; - return std::any_of(c.cbegin(), c.cend(), predicate); -} - -template -bool MapHasValue(const Map& map, const typename Map::mapped_type& value) { - auto predicate = [value](const typename Map::value_type& it) { - return it.second == value; - }; - return std::any_of(map.cbegin(), map.cend(), predicate); -} - -template -bool MultimapHasKey(const Container& c, - const typename Container::key_type& key) { - auto it_range = c.equal_range(key); - return it_range.first != it_range.second; -} - -absl::optional ParseRtcpPacketSenderSsrc( - rtc::ArrayView packet); - -} // namespace webrtc - -#endif // CALL_RTP_RTCP_DEMUXER_HELPER_H_ diff --git a/call/rtp_rtcp_demuxer_helper_unittest.cc b/call/rtp_rtcp_demuxer_helper_unittest.cc deleted file mode 100644 index 17e6617fb0..0000000000 --- a/call/rtp_rtcp_demuxer_helper_unittest.cc +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "call/rtp_rtcp_demuxer_helper.h" - -#include - -#include - -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_jitter_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" -#include "modules/rtp_rtcp/source/rtcp_packet/rapid_resync_request.h" -#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" -#include "rtc_base/arraysize.h" -#include "rtc_base/buffer.h" -#include "test/gtest.h" - -namespace webrtc { - -namespace { -constexpr uint32_t kSsrc = 8374; -} // namespace - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_ByePacket) { - webrtc::rtcp::Bye rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, - ParseRtcpPacketSenderSsrc_ExtendedReportsPacket) { - webrtc::rtcp::ExtendedReports rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_PsfbPacket) { - webrtc::rtcp::Pli rtcp_packet; // Psfb is abstract; use a subclass. - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_ReceiverReportPacket) { - webrtc::rtcp::ReceiverReport rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_RtpfbPacket) { - // Rtpfb is abstract; use a subclass. - webrtc::rtcp::RapidResyncRequest rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_SenderReportPacket) { - webrtc::rtcp::SenderReport rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_EQ(ssrc, kSsrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_MalformedRtcpPacket) { - uint8_t garbage[100]; - memset(&garbage[0], 0, arraysize(garbage)); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(garbage); - EXPECT_FALSE(ssrc); -} - -TEST(RtpRtcpDemuxerHelperTest, - ParseRtcpPacketSenderSsrc_RtcpMessageWithoutSenderSsrc) { - webrtc::rtcp::ExtendedJitterReport rtcp_packet; // Has no sender SSRC. - rtc::Buffer raw_packet = rtcp_packet.Build(); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc(raw_packet); - EXPECT_FALSE(ssrc); -} - -TEST(RtpRtcpDemuxerHelperTest, ParseRtcpPacketSenderSsrc_TruncatedRtcpMessage) { - webrtc::rtcp::Bye rtcp_packet; - rtcp_packet.SetSenderSsrc(kSsrc); - rtc::Buffer raw_packet = rtcp_packet.Build(); - - constexpr size_t rtcp_length_bytes = 8; - ASSERT_EQ(rtcp_length_bytes, raw_packet.size()); - - absl::optional ssrc = ParseRtcpPacketSenderSsrc( - rtc::ArrayView(raw_packet.data(), rtcp_length_bytes - 1)); - EXPECT_FALSE(ssrc); -} - -} // namespace webrtc diff --git a/call/rtp_stream_receiver_controller.h b/call/rtp_stream_receiver_controller.h index 045af3cf8d..62447aa521 100644 --- a/call/rtp_stream_receiver_controller.h +++ b/call/rtp_stream_receiver_controller.h @@ -14,7 +14,7 @@ #include "call/rtp_demuxer.h" #include "call/rtp_stream_receiver_controller_interface.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/deprecated/recursive_critical_section.h" namespace webrtc { @@ -63,7 +63,7 @@ class RtpStreamReceiverController // to be called on the same thread, and OnRtpPacket to be called // by a single, but possibly distinct, thread. But applications not // using Call may have use threads differently. - rtc::CriticalSection lock_; + rtc::RecursiveCriticalSection lock_; RtpDemuxer demuxer_ RTC_GUARDED_BY(&lock_); }; diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc index 56c5e55ca1..9baf164a60 100644 --- a/call/rtp_transport_controller_send.cc +++ b/call/rtp_transport_controller_send.cc @@ -91,13 +91,16 @@ RtpTransportControllerSend::RtpTransportControllerSend( event_log, trials, process_thread_.get())), - task_queue_pacer_(use_task_queue_pacer_ - ? new TaskQueuePacedSender(clock, - &packet_router_, - event_log, - trials, - task_queue_factory) - : nullptr), + task_queue_pacer_( + use_task_queue_pacer_ + ? new TaskQueuePacedSender( + clock, + &packet_router_, + event_log, + trials, + task_queue_factory, + /*hold_back_window = */ PacingController::kMinSleepTime) + : nullptr), observer_(nullptr), controller_factory_override_(controller_factory), controller_factory_fallback_( diff --git a/call/rtp_video_sender.cc b/call/rtp_video_sender.cc index 8c31a848aa..fb6825e719 100644 --- a/call/rtp_video_sender.cc +++ b/call/rtp_video_sender.cc @@ -22,21 +22,22 @@ #include "api/video_codecs/video_codec.h" #include "call/rtp_transport_controller_send_interface.h" #include "modules/pacing/packet_router.h" -#include "modules/rtp_rtcp/include/rtp_rtcp.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/utility/include/process_thread.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" +#include "rtc_base/task_queue.h" namespace webrtc { namespace webrtc_internal_rtp_video_sender { RtpStreamSender::RtpStreamSender( - std::unique_ptr rtp_rtcp, + std::unique_ptr rtp_rtcp, std::unique_ptr sender_video, std::unique_ptr fec_generator) : rtp_rtcp(std::move(rtp_rtcp)), @@ -196,10 +197,11 @@ std::vector CreateRtpStreamSenders( FrameEncryptorInterface* frame_encryptor, const CryptoOptions& crypto_options, rtc::scoped_refptr frame_transformer, + bool use_deferred_fec, const WebRtcKeyValueConfig& trials) { RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0); - RtpRtcp::Configuration configuration; + RtpRtcpInterface::Configuration configuration; configuration.clock = clock; configuration.audio = false; configuration.receiver_only = false; @@ -243,7 +245,9 @@ std::vector CreateRtpStreamSenders( std::unique_ptr fec_generator = MaybeCreateFecGenerator(clock, rtp_config, suspended_ssrcs, i, trials); configuration.fec_generator = fec_generator.get(); - video_config.fec_generator = fec_generator.get(); + if (!use_deferred_fec) { + video_config.fec_generator = fec_generator.get(); + } configuration.rtx_send_ssrc = rtp_config.GetRtxSsrcAssociatedWithMediaSsrc(rtp_config.ssrcs[i]); @@ -252,7 +256,8 @@ std::vector CreateRtpStreamSenders( configuration.need_rtp_packet_infos = rtp_config.lntf.enabled; - auto rtp_rtcp = RtpRtcp::Create(configuration); + std::unique_ptr rtp_rtcp( + ModuleRtpRtcpImpl2::Create(configuration)); rtp_rtcp->SetSendingStatus(false); rtp_rtcp->SetSendingMediaStatus(false); rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); @@ -281,6 +286,7 @@ std::vector CreateRtpStreamSenders( video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead(); } video_config.frame_transformer = frame_transformer; + video_config.send_transport_queue = transport->GetWorkerQueue()->Get(); auto sender_video = std::make_unique(video_config); rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video), std::move(fec_generator)); @@ -335,6 +341,9 @@ RtpVideoSender::RtpVideoSender( field_trials_.Lookup("WebRTC-UseEarlyLossDetection"), "Disabled")), has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)), + use_deferred_fec_( + absl::StartsWith(field_trials_.Lookup("WebRTC-DeferredFecGeneration"), + "Enabled")), active_(false), module_process_thread_(nullptr), suspended_ssrcs_(std::move(suspended_ssrcs)), @@ -353,6 +362,7 @@ RtpVideoSender::RtpVideoSender( frame_encryptor, crypto_options, std::move(frame_transformer), + use_deferred_fec_, field_trials_)), rtp_config_(rtp_config), codec_type_(GetVideoCodecType(rtp_config)), @@ -457,15 +467,20 @@ void RtpVideoSender::DeRegisterProcessThread() { } void RtpVideoSender::SetActive(bool active) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); if (active_ == active) return; const std::vector active_modules(rtp_streams_.size(), active); - SetActiveModules(active_modules); + SetActiveModulesLocked(active_modules); } void RtpVideoSender::SetActiveModules(const std::vector active_modules) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); + return SetActiveModulesLocked(active_modules); +} + +void RtpVideoSender::SetActiveModulesLocked( + const std::vector active_modules) { RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size()); active_ = false; for (size_t i = 0; i < active_modules.size(); ++i) { @@ -480,17 +495,21 @@ void RtpVideoSender::SetActiveModules(const std::vector active_modules) { } bool RtpVideoSender::IsActive() { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); + return IsActiveLocked(); +} + +bool RtpVideoSender::IsActiveLocked() { return active_ && !rtp_streams_.empty(); } EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) { + const RTPFragmentationHeader* /*fragmentation*/) { fec_controller_->UpdateWithEncodedData(encoded_image.size(), encoded_image._frameType); - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); RTC_DCHECK(!rtp_streams_.empty()); if (!active_) return Result(Result::ERROR_SEND_FAILED); @@ -540,7 +559,6 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( bool send_result = rtp_streams_[stream_index].sender_video->SendEncodedImage( rtp_config_.payload_type, codec_type_, rtp_timestamp, encoded_image, - fragmentation, params_[stream_index].GetRtpVideoHeader( encoded_image, codec_specific_info, shared_frame_id_), expected_retransmission_time_ms); @@ -564,8 +582,8 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( void RtpVideoSender::OnBitrateAllocationUpdated( const VideoBitrateAllocation& bitrate) { - rtc::CritScope lock(&crit_); - if (IsActive()) { + MutexLock lock(&mutex_); + if (IsActiveLocked()) { if (rtp_streams_.size() == 1) { // If spatial scalability is enabled, it is covered by a single stream. rtp_streams_[0].rtp_rtcp->SetVideoBitrateAllocation(bitrate); @@ -617,7 +635,7 @@ void RtpVideoSender::ConfigureSsrcs() { RTC_CHECK(ssrc_to_rtp_module_.empty()); for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) { uint32_t ssrc = rtp_config_.ssrcs[i]; - RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); + RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); // Restore RTP state if previous existed. auto it = suspended_ssrcs_.find(ssrc); @@ -634,7 +652,7 @@ void RtpVideoSender::ConfigureSsrcs() { RTC_DCHECK_EQ(rtp_config_.rtx.ssrcs.size(), rtp_config_.ssrcs.size()); for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) { uint32_t ssrc = rtp_config_.rtx.ssrcs[i]; - RtpRtcp* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); + RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); auto it = suspended_ssrcs_.find(ssrc); if (it != suspended_ssrcs_.end()) rtp_rtcp->SetRtxState(it->second); @@ -707,7 +725,7 @@ std::map RtpVideoSender::GetRtpStates() const { std::map RtpVideoSender::GetRtpPayloadStates() const { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); std::map payload_states; for (const auto& param : params_) { payload_states[param.ssrc()] = param.state(); @@ -718,7 +736,7 @@ std::map RtpVideoSender::GetRtpPayloadStates() void RtpVideoSender::OnTransportOverheadChanged( size_t transport_overhead_bytes_per_packet) { - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); transport_overhead_bytes_per_packet_ = transport_overhead_bytes_per_packet; size_t max_rtp_packet_size = @@ -732,7 +750,7 @@ void RtpVideoSender::OnTransportOverheadChanged( void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) { // Substract overhead from bitrate. - rtc::CritScope lock(&crit_); + MutexLock lock(&mutex_); size_t num_active_streams = 0; size_t overhead_bytes_per_packet = 0; for (const auto& stream : rtp_streams_) { @@ -836,27 +854,39 @@ int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params, *sent_nack_rate_bps = 0; *sent_fec_rate_bps = 0; for (const RtpStreamSender& stream : rtp_streams_) { - if (stream.fec_generator) { - stream.fec_generator->SetProtectionParameters(*delta_params, *key_params); - *sent_fec_rate_bps += stream.fec_generator->CurrentFecRate().bps(); + if (use_deferred_fec_) { + stream.rtp_rtcp->SetFecProtectionParams(*delta_params, *key_params); + + auto send_bitrate = stream.rtp_rtcp->GetSendRates(); + *sent_video_rate_bps += send_bitrate[RtpPacketMediaType::kVideo].bps(); + *sent_fec_rate_bps += + send_bitrate[RtpPacketMediaType::kForwardErrorCorrection].bps(); + *sent_nack_rate_bps += + send_bitrate[RtpPacketMediaType::kRetransmission].bps(); + } else { + if (stream.fec_generator) { + stream.fec_generator->SetProtectionParameters(*delta_params, + *key_params); + *sent_fec_rate_bps += stream.fec_generator->CurrentFecRate().bps(); + } + *sent_video_rate_bps += stream.sender_video->VideoBitrateSent(); + *sent_nack_rate_bps += + stream.rtp_rtcp->GetSendRates()[RtpPacketMediaType::kRetransmission] + .bps(); } - *sent_video_rate_bps += stream.sender_video->VideoBitrateSent(); - *sent_nack_rate_bps += - stream.rtp_rtcp->GetSendRates()[RtpPacketMediaType::kRetransmission] - .bps(); } return 0; } void RtpVideoSender::SetFecAllowed(bool fec_allowed) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); fec_allowed_ = fec_allowed; } void RtpVideoSender::OnPacketFeedbackVector( std::vector packet_feedback_vector) { if (fec_controller_->UseLossVectorMask()) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); for (const StreamPacketInfo& packet : packet_feedback_vector) { loss_mask_vector_.push_back(!packet.received); } diff --git a/call/rtp_video_sender.h b/call/rtp_video_sender.h index f7d8c763d2..876f6e9cb2 100644 --- a/call/rtp_video_sender.h +++ b/call/rtp_video_sender.h @@ -29,14 +29,15 @@ #include "call/rtp_transport_controller_send_interface.h" #include "call/rtp_video_sender_interface.h" #include "modules/rtp_rtcp/include/flexfec_sender.h" +#include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" #include "modules/utility/include/process_thread.h" #include "rtc_base/constructor_magic.h" -#include "rtc_base/critical_section.h" #include "rtc_base/rate_limiter.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" @@ -44,14 +45,13 @@ namespace webrtc { class FrameEncryptorInterface; class RTPFragmentationHeader; -class RtpRtcp; class RtpTransportControllerSendInterface; namespace webrtc_internal_rtp_video_sender { // RTP state for a single simulcast stream. Internal to the implementation of // RtpVideoSender. struct RtpStreamSender { - RtpStreamSender(std::unique_ptr rtp_rtcp, + RtpStreamSender(std::unique_ptr rtp_rtcp, std::unique_ptr sender_video, std::unique_ptr fec_generator); ~RtpStreamSender(); @@ -60,7 +60,7 @@ struct RtpStreamSender { RtpStreamSender& operator=(RtpStreamSender&&) = default; // Note: Needs pointer stability. - std::unique_ptr rtp_rtcp; + std::unique_ptr rtp_rtcp; std::unique_ptr sender_video; std::unique_ptr fec_generator; }; @@ -96,62 +96,75 @@ class RtpVideoSender : public RtpVideoSenderInterface, // |module_process_thread| was created (libjingle's worker thread). // TODO(perkj): Replace the use of |module_process_thread| with a TaskQueue, // maybe |worker_queue|. - void RegisterProcessThread(ProcessThread* module_process_thread) override; - void DeRegisterProcessThread() override; + void RegisterProcessThread(ProcessThread* module_process_thread) + RTC_LOCKS_EXCLUDED(mutex_) override; + void DeRegisterProcessThread() RTC_LOCKS_EXCLUDED(mutex_) override; // RtpVideoSender will only route packets if being active, all packets will be // dropped otherwise. - void SetActive(bool active) override; + void SetActive(bool active) RTC_LOCKS_EXCLUDED(mutex_) override; // Sets the sending status of the rtp modules and appropriately sets the // payload router to active if any rtp modules are active. - void SetActiveModules(const std::vector active_modules) override; - bool IsActive() override; + void SetActiveModules(const std::vector active_modules) + RTC_LOCKS_EXCLUDED(mutex_) override; + bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override; - void OnNetworkAvailability(bool network_available) override; - std::map GetRtpStates() const override; - std::map GetRtpPayloadStates() const override; + void OnNetworkAvailability(bool network_available) + RTC_LOCKS_EXCLUDED(mutex_) override; + std::map GetRtpStates() const + RTC_LOCKS_EXCLUDED(mutex_) override; + std::map GetRtpPayloadStates() const + RTC_LOCKS_EXCLUDED(mutex_) override; - void DeliverRtcp(const uint8_t* packet, size_t length) override; + void DeliverRtcp(const uint8_t* packet, size_t length) + RTC_LOCKS_EXCLUDED(mutex_) override; // Implements webrtc::VCMProtectionCallback. int ProtectionRequest(const FecProtectionParams* delta_params, const FecProtectionParams* key_params, uint32_t* sent_video_rate_bps, uint32_t* sent_nack_rate_bps, - uint32_t* sent_fec_rate_bps) override; + uint32_t* sent_fec_rate_bps) + RTC_LOCKS_EXCLUDED(mutex_) override; // Implements FecControllerOverride. - void SetFecAllowed(bool fec_allowed) override; + void SetFecAllowed(bool fec_allowed) RTC_LOCKS_EXCLUDED(mutex_) override; // Implements EncodedImageCallback. // Returns 0 if the packet was routed / sent, -1 otherwise. EncodedImageCallback::Result OnEncodedImage( const EncodedImage& encoded_image, const CodecSpecificInfo* codec_specific_info, - const RTPFragmentationHeader* fragmentation) override; + const RTPFragmentationHeader* fragmentation) + RTC_LOCKS_EXCLUDED(mutex_) override; - void OnBitrateAllocationUpdated( - const VideoBitrateAllocation& bitrate) override; + void OnBitrateAllocationUpdated(const VideoBitrateAllocation& bitrate) + RTC_LOCKS_EXCLUDED(mutex_) override; - void OnTransportOverheadChanged( - size_t transport_overhead_bytes_per_packet) override; - void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) override; - uint32_t GetPayloadBitrateBps() const override; - uint32_t GetProtectionBitrateBps() const override; - void SetEncodingData(size_t width, - size_t height, - size_t num_temporal_layers) override; + void OnTransportOverheadChanged(size_t transport_overhead_bytes_per_packet) + RTC_LOCKS_EXCLUDED(mutex_) override; + void OnBitrateUpdated(BitrateAllocationUpdate update, int framerate) + RTC_LOCKS_EXCLUDED(mutex_) override; + uint32_t GetPayloadBitrateBps() const RTC_LOCKS_EXCLUDED(mutex_) override; + uint32_t GetProtectionBitrateBps() const RTC_LOCKS_EXCLUDED(mutex_) override; + void SetEncodingData(size_t width, size_t height, size_t num_temporal_layers) + RTC_LOCKS_EXCLUDED(mutex_) override; std::vector GetSentRtpPacketInfos( uint32_t ssrc, - rtc::ArrayView sequence_numbers) const override; + rtc::ArrayView sequence_numbers) const + RTC_LOCKS_EXCLUDED(mutex_) override; // From StreamFeedbackObserver. void OnPacketFeedbackVector( - std::vector packet_feedback_vector) override; + std::vector packet_feedback_vector) + RTC_LOCKS_EXCLUDED(mutex_) override; private: - void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void SetActiveModulesLocked(const std::vector active_modules) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void ConfigureProtection(); void ConfigureSsrcs(); void ConfigureRids(); @@ -163,18 +176,19 @@ class RtpVideoSender : public RtpVideoSenderInterface, const bool account_for_packetization_overhead_; const bool use_early_loss_detection_; const bool has_packet_feedback_; + const bool use_deferred_fec_; - // TODO(holmer): Remove crit_ once RtpVideoSender runs on the + // TODO(holmer): Remove mutex_ once RtpVideoSender runs on the // transport task queue. - rtc::CriticalSection crit_; - bool active_ RTC_GUARDED_BY(crit_); + mutable Mutex mutex_; + bool active_ RTC_GUARDED_BY(mutex_); ProcessThread* module_process_thread_; rtc::ThreadChecker module_process_thread_checker_; std::map suspended_ssrcs_; const std::unique_ptr fec_controller_; - bool fec_allowed_ RTC_GUARDED_BY(crit_); + bool fec_allowed_ RTC_GUARDED_BY(mutex_); // Rtp modules are assumed to be sorted in simulcast index order. const std::vector @@ -188,21 +202,21 @@ class RtpVideoSender : public RtpVideoSenderInterface, // rewrite the frame id), therefore |shared_frame_id| has to live in a place // where we are aware of all the different streams. int64_t shared_frame_id_ = 0; - std::vector params_ RTC_GUARDED_BY(crit_); + std::vector params_ RTC_GUARDED_BY(mutex_); - size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(crit_); + size_t transport_overhead_bytes_per_packet_ RTC_GUARDED_BY(mutex_); uint32_t protection_bitrate_bps_; uint32_t encoder_target_rate_bps_; - std::vector loss_mask_vector_ RTC_GUARDED_BY(crit_); + std::vector loss_mask_vector_ RTC_GUARDED_BY(mutex_); - std::vector frame_counts_ RTC_GUARDED_BY(crit_); + std::vector frame_counts_ RTC_GUARDED_BY(mutex_); FrameCountObserver* const frame_count_observer_; // Effectively const map from SSRC to RtpRtcp, for all media SSRCs. // This map is set at construction time and never changed, but it's // non-trivial to make it properly const. - std::map ssrc_to_rtp_module_; + std::map ssrc_to_rtp_module_; RTC_DISALLOW_COPY_AND_ASSIGN(RtpVideoSender); }; diff --git a/call/rtp_video_sender_unittest.cc b/call/rtp_video_sender_unittest.cc index a87196111a..8a88a24e3b 100644 --- a/call/rtp_video_sender_unittest.cc +++ b/call/rtp_video_sender_unittest.cc @@ -56,7 +56,7 @@ const int kDependencyDescriptorExtensionId = 8; class MockRtcpIntraFrameObserver : public RtcpIntraFrameObserver { public: - MOCK_METHOD1(OnReceivedIntraFrameRequest, void(uint32_t)); + MOCK_METHOD(void, OnReceivedIntraFrameRequest, (uint32_t), (override)); }; RtpSenderObservers CreateObservers( @@ -361,8 +361,10 @@ TEST(RtpVideoSenderTest, CreateWithPreviousStates) { TEST(RtpVideoSenderTest, FrameCountCallbacks) { class MockFrameCountObserver : public FrameCountObserver { public: - MOCK_METHOD2(FrameCountUpdated, - void(const FrameCounts& frame_counts, uint32_t ssrc)); + MOCK_METHOD(void, + FrameCountUpdated, + (const FrameCounts& frame_counts, uint32_t ssrc), + (override)); } callback; RtpVideoSenderTestFixture test({kSsrc1}, {kRtxSsrc1}, kPayloadType, {}, @@ -676,8 +678,6 @@ TEST(RtpVideoSenderTest, EarlyRetransmits) { } TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { - test::ScopedFieldTrials trials("WebRTC-GenericDescriptor/Enabled/"); - RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); test.router()->SetActive(true); @@ -705,9 +705,9 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { codec_specific.template_structure.emplace(); codec_specific.template_structure->num_decode_targets = 1; codec_specific.template_structure->templates = { - GenericFrameInfo::Builder().T(0).Dtis("S").Build(), - GenericFrameInfo::Builder().T(0).Dtis("S").Fdiffs({2}).Build(), - GenericFrameInfo::Builder().T(1).Dtis("D").Fdiffs({1}).Build(), + FrameDependencyTemplate().T(0).Dtis("S"), + FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}), + FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}), }; // Send two tiny images, mapping to single RTP packets. @@ -742,8 +742,6 @@ TEST(RtpVideoSenderTest, SupportsDependencyDescriptor) { } TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) { - test::ScopedFieldTrials trials("WebRTC-GenericDescriptor/Enabled/"); - RtpVideoSenderTestFixture test({kSsrc1}, {}, kPayloadType, {}); test.router()->SetActive(true); @@ -771,9 +769,9 @@ TEST(RtpVideoSenderTest, SupportsStoppingUsingDependencyDescriptor) { codec_specific.template_structure.emplace(); codec_specific.template_structure->num_decode_targets = 1; codec_specific.template_structure->templates = { - GenericFrameInfo::Builder().T(0).Dtis("S").Build(), - GenericFrameInfo::Builder().T(0).Dtis("S").Fdiffs({2}).Build(), - GenericFrameInfo::Builder().T(1).Dtis("D").Fdiffs({1}).Build(), + FrameDependencyTemplate().T(0).Dtis("S"), + FrameDependencyTemplate().T(0).Dtis("S").FrameDiffs({2}), + FrameDependencyTemplate().T(1).Dtis("D").FrameDiffs({1}), }; // Send two tiny images, mapping to single RTP packets. diff --git a/call/simulated_network.cc b/call/simulated_network.cc index b298fdb4e2..2ed9140fa5 100644 --- a/call/simulated_network.cc +++ b/call/simulated_network.cc @@ -87,7 +87,7 @@ SimulatedNetwork::SimulatedNetwork(Config config, uint64_t random_seed) SimulatedNetwork::~SimulatedNetwork() = default; void SimulatedNetwork::SetConfig(const Config& config) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); config_state_.config = config; // Shallow copy of the struct. double prob_loss = config.loss_percent / 100.0; if (config_state_.config.avg_burst_loss_length == -1) { @@ -113,12 +113,12 @@ void SimulatedNetwork::SetConfig(const Config& config) { void SimulatedNetwork::UpdateConfig( std::function config_modifier) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); config_modifier(&config_state_.config); } void SimulatedNetwork::PauseTransmissionUntil(int64_t until_us) { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); config_state_.pause_transmission_until_us = until_us; } @@ -260,7 +260,7 @@ void SimulatedNetwork::UpdateCapacityQueue(ConfigState state, } SimulatedNetwork::ConfigState SimulatedNetwork::GetConfigState() const { - rtc::CritScope crit(&config_lock_); + MutexLock lock(&config_lock_); return config_state_; } diff --git a/call/simulated_network.h b/call/simulated_network.h index 2ff90ec284..b53ecc0ddb 100644 --- a/call/simulated_network.h +++ b/call/simulated_network.h @@ -20,9 +20,9 @@ #include "api/test/simulated_network.h" #include "api/units/data_size.h" #include "api/units/timestamp.h" -#include "rtc_base/critical_section.h" #include "rtc_base/race_checker.h" #include "rtc_base/random.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_checker.h" @@ -96,7 +96,7 @@ class SimulatedNetwork : public SimulatedNetworkInterface { RTC_RUN_ON(&process_checker_); ConfigState GetConfigState() const; - rtc::CriticalSection config_lock_; + mutable Mutex config_lock_; // |process_checker_| guards the data structures involved in delay and loss // processes, such as the packet queues. diff --git a/call/ssrc_binding_observer.h b/call/ssrc_binding_observer.h deleted file mode 100644 index ada505610f..0000000000 --- a/call/ssrc_binding_observer.h +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef CALL_SSRC_BINDING_OBSERVER_H_ -#define CALL_SSRC_BINDING_OBSERVER_H_ - -#include - -namespace webrtc { - -// With newer versions of SDP, SSRC is often not explicitly signaled and must -// be learned on the fly. This happens by correlating packet SSRCs with included -// RTP extension headers like MID and RSID, or by receiving information from -// RTCP messages. -// SsrcBindingObservers will be notified when a new binding is learned, which -// can happen during call setup and/or during the call. -class SsrcBindingObserver { - public: - virtual ~SsrcBindingObserver() = default; - - virtual void OnSsrcBoundToRsid(const std::string& rsid, uint32_t ssrc) {} - - virtual void OnSsrcBoundToMid(const std::string& mid, uint32_t ssrc) {} - - virtual void OnSsrcBoundToMidRsid(const std::string& mid, - const std::string& rsid, - uint32_t ssrc) {} - - virtual void OnSsrcBoundToPayloadType(uint8_t payload_type, uint32_t ssrc) {} -}; - -} // namespace webrtc - -#endif // CALL_SSRC_BINDING_OBSERVER_H_ diff --git a/call/test/mock_audio_send_stream.h b/call/test/mock_audio_send_stream.h index 489e826d0e..4164dd550e 100644 --- a/call/test/mock_audio_send_stream.h +++ b/call/test/mock_audio_send_stream.h @@ -21,23 +21,26 @@ namespace test { class MockAudioSendStream : public AudioSendStream { public: - MOCK_CONST_METHOD0(GetConfig, const webrtc::AudioSendStream::Config&()); - MOCK_METHOD1(Reconfigure, void(const Config& config)); - MOCK_METHOD0(Start, void()); - MOCK_METHOD0(Stop, void()); + MOCK_METHOD(const webrtc::AudioSendStream::Config&, + GetConfig, + (), + (const, override)); + MOCK_METHOD(void, Reconfigure, (const Config& config), (override)); + MOCK_METHOD(void, Start, (), (override)); + MOCK_METHOD(void, Stop, (), (override)); // GMock doesn't like move-only types, such as std::unique_ptr. - virtual void SendAudioData(std::unique_ptr audio_frame) { + void SendAudioData(std::unique_ptr audio_frame) override { SendAudioDataForMock(audio_frame.get()); } - MOCK_METHOD1(SendAudioDataForMock, void(webrtc::AudioFrame* audio_frame)); - MOCK_METHOD4(SendTelephoneEvent, - bool(int payload_type, - int payload_frequency, - int event, - int duration_ms)); - MOCK_METHOD1(SetMuted, void(bool muted)); - MOCK_CONST_METHOD0(GetStats, Stats()); - MOCK_CONST_METHOD1(GetStats, Stats(bool has_remote_tracks)); + MOCK_METHOD(void, SendAudioDataForMock, (webrtc::AudioFrame*)); + MOCK_METHOD( + bool, + SendTelephoneEvent, + (int payload_type, int payload_frequency, int event, int duration_ms), + (override)); + MOCK_METHOD(void, SetMuted, (bool muted), (override)); + MOCK_METHOD(Stats, GetStats, (), (const, override)); + MOCK_METHOD(Stats, GetStats, (bool has_remote_tracks), (const, override)); }; } // namespace test } // namespace webrtc diff --git a/call/test/mock_bitrate_allocator.h b/call/test/mock_bitrate_allocator.h index f00ed79c59..b08916fe4f 100644 --- a/call/test/mock_bitrate_allocator.h +++ b/call/test/mock_bitrate_allocator.h @@ -18,10 +18,15 @@ namespace webrtc { class MockBitrateAllocator : public BitrateAllocatorInterface { public: - MOCK_METHOD2(AddObserver, - void(BitrateAllocatorObserver*, MediaStreamAllocationConfig)); - MOCK_METHOD1(RemoveObserver, void(BitrateAllocatorObserver*)); - MOCK_CONST_METHOD1(GetStartBitrate, int(BitrateAllocatorObserver*)); + MOCK_METHOD(void, + AddObserver, + (BitrateAllocatorObserver*, MediaStreamAllocationConfig), + (override)); + MOCK_METHOD(void, RemoveObserver, (BitrateAllocatorObserver*), (override)); + MOCK_METHOD(int, + GetStartBitrate, + (BitrateAllocatorObserver*), + (const, override)); }; } // namespace webrtc #endif // CALL_TEST_MOCK_BITRATE_ALLOCATOR_H_ diff --git a/call/test/mock_rtp_packet_sink_interface.h b/call/test/mock_rtp_packet_sink_interface.h index adc804f941..e6d14f05c5 100644 --- a/call/test/mock_rtp_packet_sink_interface.h +++ b/call/test/mock_rtp_packet_sink_interface.h @@ -17,7 +17,7 @@ namespace webrtc { class MockRtpPacketSink : public RtpPacketSinkInterface { public: - MOCK_METHOD1(OnRtpPacket, void(const RtpPacketReceived&)); + MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override)); }; } // namespace webrtc diff --git a/call/test/mock_rtp_transport_controller_send.h b/call/test/mock_rtp_transport_controller_send.h index afc8400f73..308c087a40 100644 --- a/call/test/mock_rtp_transport_controller_send.h +++ b/call/test/mock_rtp_transport_controller_send.h @@ -32,45 +32,73 @@ namespace webrtc { class MockRtpTransportControllerSend : public RtpTransportControllerSendInterface { public: - MOCK_METHOD10( - CreateRtpVideoSender, - RtpVideoSenderInterface*(std::map, - const std::map&, - const RtpConfig&, - int rtcp_report_interval_ms, - Transport*, - const RtpSenderObservers&, - RtcEventLog*, - std::unique_ptr, - const RtpSenderFrameEncryptionConfig&, - rtc::scoped_refptr)); - MOCK_METHOD1(DestroyRtpVideoSender, void(RtpVideoSenderInterface*)); - MOCK_METHOD0(GetWorkerQueue, rtc::TaskQueue*()); - MOCK_METHOD0(packet_router, PacketRouter*()); - MOCK_METHOD0(network_state_estimate_observer, - NetworkStateEstimateObserver*()); - MOCK_METHOD0(transport_feedback_observer, TransportFeedbackObserver*()); - MOCK_METHOD0(packet_sender, RtpPacketSender*()); - MOCK_METHOD1(SetAllocatedSendBitrateLimits, void(BitrateAllocationLimits)); - MOCK_METHOD1(SetPacingFactor, void(float)); - MOCK_METHOD1(SetQueueTimeLimit, void(int)); - MOCK_METHOD0(GetStreamFeedbackProvider, StreamFeedbackProvider*()); - MOCK_METHOD1(RegisterTargetTransferRateObserver, - void(TargetTransferRateObserver*)); - MOCK_METHOD2(OnNetworkRouteChanged, - void(const std::string&, const rtc::NetworkRoute&)); - MOCK_METHOD1(OnNetworkAvailability, void(bool)); - MOCK_METHOD0(GetBandwidthObserver, RtcpBandwidthObserver*()); - MOCK_CONST_METHOD0(GetPacerQueuingDelayMs, int64_t()); - MOCK_CONST_METHOD0(GetFirstPacketTime, absl::optional()); - MOCK_METHOD1(EnablePeriodicAlrProbing, void(bool)); - MOCK_METHOD1(OnSentPacket, void(const rtc::SentPacket&)); - MOCK_METHOD1(SetSdpBitrateParameters, void(const BitrateConstraints&)); - MOCK_METHOD1(SetClientBitratePreferences, void(const BitrateSettings&)); - MOCK_METHOD1(OnTransportOverheadChanged, void(size_t)); - MOCK_METHOD1(AccountForAudioPacketsInPacedSender, void(bool)); - MOCK_METHOD0(IncludeOverheadInPacedSender, void()); - MOCK_METHOD1(OnReceivedPacket, void(const ReceivedPacket&)); + MOCK_METHOD(RtpVideoSenderInterface*, + CreateRtpVideoSender, + ((std::map), + (const std::map&), + const RtpConfig&, + int rtcp_report_interval_ms, + Transport*, + const RtpSenderObservers&, + RtcEventLog*, + std::unique_ptr, + const RtpSenderFrameEncryptionConfig&, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, + DestroyRtpVideoSender, + (RtpVideoSenderInterface*), + (override)); + MOCK_METHOD(rtc::TaskQueue*, GetWorkerQueue, (), (override)); + MOCK_METHOD(PacketRouter*, packet_router, (), (override)); + MOCK_METHOD(NetworkStateEstimateObserver*, + network_state_estimate_observer, + (), + (override)); + MOCK_METHOD(TransportFeedbackObserver*, + transport_feedback_observer, + (), + (override)); + MOCK_METHOD(RtpPacketSender*, packet_sender, (), (override)); + MOCK_METHOD(void, + SetAllocatedSendBitrateLimits, + (BitrateAllocationLimits), + (override)); + MOCK_METHOD(void, SetPacingFactor, (float), (override)); + MOCK_METHOD(void, SetQueueTimeLimit, (int), (override)); + MOCK_METHOD(StreamFeedbackProvider*, + GetStreamFeedbackProvider, + (), + (override)); + MOCK_METHOD(void, + RegisterTargetTransferRateObserver, + (TargetTransferRateObserver*), + (override)); + MOCK_METHOD(void, + OnNetworkRouteChanged, + (const std::string&, const rtc::NetworkRoute&), + (override)); + MOCK_METHOD(void, OnNetworkAvailability, (bool), (override)); + MOCK_METHOD(RtcpBandwidthObserver*, GetBandwidthObserver, (), (override)); + MOCK_METHOD(int64_t, GetPacerQueuingDelayMs, (), (const, override)); + MOCK_METHOD(absl::optional, + GetFirstPacketTime, + (), + (const, override)); + MOCK_METHOD(void, EnablePeriodicAlrProbing, (bool), (override)); + MOCK_METHOD(void, OnSentPacket, (const rtc::SentPacket&), (override)); + MOCK_METHOD(void, + SetSdpBitrateParameters, + (const BitrateConstraints&), + (override)); + MOCK_METHOD(void, + SetClientBitratePreferences, + (const BitrateSettings&), + (override)); + MOCK_METHOD(void, OnTransportOverheadChanged, (size_t), (override)); + MOCK_METHOD(void, AccountForAudioPacketsInPacedSender, (bool), (override)); + MOCK_METHOD(void, IncludeOverheadInPacedSender, (), (override)); + MOCK_METHOD(void, OnReceivedPacket, (const ReceivedPacket&), (override)); }; } // namespace webrtc #endif // CALL_TEST_MOCK_RTP_TRANSPORT_CONTROLLER_SEND_H_ diff --git a/call/video_send_stream.h b/call/video_send_stream.h index 392c955f47..715d5d73e7 100644 --- a/call/video_send_stream.h +++ b/call/video_send_stream.h @@ -18,10 +18,12 @@ #include #include "absl/types/optional.h" +#include "api/adaptation/resource.h" #include "api/call/transport.h" #include "api/crypto/crypto_options.h" #include "api/frame_transformer_interface.h" #include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" #include "api/video/video_content_type.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" @@ -215,6 +217,15 @@ class VideoSendStream { // When a stream is stopped, it can't receive, process or deliver packets. virtual void Stop() = 0; + // If the resource is overusing, the VideoSendStream will try to reduce + // resolution or frame rate until no resource is overusing. + // TODO(https://crbug.com/webrtc/11565): When the ResourceAdaptationProcessor + // is moved to Call this method could be deleted altogether in favor of + // Call-level APIs only. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) = 0; + virtual std::vector> + GetAdaptationResources() = 0; + virtual void SetSource( rtc::VideoSourceInterface* source, const DegradationPreference& degradation_preference) = 0; diff --git a/common_audio/BUILD.gn b/common_audio/BUILD.gn index 72eed1f003..4077486d87 100644 --- a/common_audio/BUILD.gn +++ b/common_audio/BUILD.gn @@ -56,8 +56,8 @@ rtc_library("common_audio") { "../system_wrappers", "../system_wrappers:cpu_features_api", "third_party/ooura:fft_size_256", - "//third_party/abseil-cpp/absl/types:optional", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] defines = [] diff --git a/common_audio/OWNERS.webrtc b/common_audio/OWNERS.webrtc index 7c9c9af12a..ba1c8b11f4 100644 --- a/common_audio/OWNERS.webrtc +++ b/common_audio/OWNERS.webrtc @@ -1,2 +1,3 @@ henrik.lundin@webrtc.org kwiberg@webrtc.org +peah@webrtc.org diff --git a/common_audio/channel_buffer_unittest.cc b/common_audio/channel_buffer_unittest.cc index 8ec42346d1..a8b64891d6 100644 --- a/common_audio/channel_buffer_unittest.cc +++ b/common_audio/channel_buffer_unittest.cc @@ -53,12 +53,12 @@ TEST(IFChannelBufferTest, SettingNumChannelsOfOneChannelBufferSetsTheOther) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(ChannelBufferTest, SetNumChannelsDeathTest) { +TEST(ChannelBufferDeathTest, SetNumChannelsDeathTest) { ChannelBuffer chb(kNumFrames, kMono); RTC_EXPECT_DEATH(chb.set_num_channels(kStereo), "num_channels"); } -TEST(IFChannelBufferTest, SetNumChannelsDeathTest) { +TEST(IFChannelBufferDeathTest, SetNumChannelsDeathTest) { IFChannelBuffer ifchb(kNumFrames, kMono); RTC_EXPECT_DEATH(ifchb.ibuf()->set_num_channels(kStereo), "num_channels"); } diff --git a/common_audio/mocks/mock_smoothing_filter.h b/common_audio/mocks/mock_smoothing_filter.h index 712049fa6a..9df49dd11a 100644 --- a/common_audio/mocks/mock_smoothing_filter.h +++ b/common_audio/mocks/mock_smoothing_filter.h @@ -18,9 +18,9 @@ namespace webrtc { class MockSmoothingFilter : public SmoothingFilter { public: - MOCK_METHOD1(AddSample, void(float)); - MOCK_METHOD0(GetAverage, absl::optional()); - MOCK_METHOD1(SetTimeConstantMs, bool(int)); + MOCK_METHOD(void, AddSample, (float), (override)); + MOCK_METHOD(absl::optional, GetAverage, (), (override)); + MOCK_METHOD(bool, SetTimeConstantMs, (int), (override)); }; } // namespace webrtc diff --git a/common_audio/resampler/include/resampler.h b/common_audio/resampler/include/resampler.h index 04c487b331..41940f9a12 100644 --- a/common_audio/resampler/include/resampler.h +++ b/common_audio/resampler/include/resampler.h @@ -90,8 +90,8 @@ class Resampler { size_t num_channels_; // Extra instance for stereo - Resampler* slave_left_; - Resampler* slave_right_; + Resampler* helper_left_; + Resampler* helper_right_; }; } // namespace webrtc diff --git a/common_audio/resampler/push_resampler_unittest.cc b/common_audio/resampler/push_resampler_unittest.cc index 61b9725b3a..4724833fbb 100644 --- a/common_audio/resampler/push_resampler_unittest.cc +++ b/common_audio/resampler/push_resampler_unittest.cc @@ -31,19 +31,19 @@ TEST(PushResamplerTest, VerifiesInputParameters) { } #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) -TEST(PushResamplerTest, VerifiesBadInputParameters1) { +TEST(PushResamplerDeathTest, VerifiesBadInputParameters1) { PushResampler resampler; RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(-1, 16000, 1), "src_sample_rate_hz"); } -TEST(PushResamplerTest, VerifiesBadInputParameters2) { +TEST(PushResamplerDeathTest, VerifiesBadInputParameters2) { PushResampler resampler; RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, -1, 1), "dst_sample_rate_hz"); } -TEST(PushResamplerTest, VerifiesBadInputParameters3) { +TEST(PushResamplerDeathTest, VerifiesBadInputParameters3) { PushResampler resampler; RTC_EXPECT_DEATH(resampler.InitializeIfNeeded(16000, 16000, 0), "num_channels"); diff --git a/common_audio/resampler/resampler.cc b/common_audio/resampler/resampler.cc index ce38ef56de..ccfed5a014 100644 --- a/common_audio/resampler/resampler.cc +++ b/common_audio/resampler/resampler.cc @@ -37,8 +37,8 @@ Resampler::Resampler() my_out_frequency_khz_(0), my_mode_(kResamplerMode1To1), num_channels_(0), - slave_left_(nullptr), - slave_right_(nullptr) {} + helper_left_(nullptr), + helper_right_(nullptr) {} Resampler::Resampler(int inFreq, int outFreq, size_t num_channels) : Resampler() { @@ -61,11 +61,11 @@ Resampler::~Resampler() { if (out_buffer_) { free(out_buffer_); } - if (slave_left_) { - delete slave_left_; + if (helper_left_) { + delete helper_left_; } - if (slave_right_) { - delete slave_right_; + if (helper_right_) { + delete helper_right_; } } @@ -120,13 +120,13 @@ int Resampler::Reset(int inFreq, int outFreq, size_t num_channels) { free(out_buffer_); out_buffer_ = nullptr; } - if (slave_left_) { - delete slave_left_; - slave_left_ = nullptr; + if (helper_left_) { + delete helper_left_; + helper_left_ = nullptr; } - if (slave_right_) { - delete slave_right_; - slave_right_ = nullptr; + if (helper_right_) { + delete helper_right_; + helper_right_ = nullptr; } in_buffer_size_ = 0; @@ -140,8 +140,8 @@ int Resampler::Reset(int inFreq, int outFreq, size_t num_channels) { if (num_channels_ == 2) { // Create two mono resamplers. - slave_left_ = new Resampler(inFreq, outFreq, 1); - slave_right_ = new Resampler(inFreq, outFreq, 1); + helper_left_ = new Resampler(inFreq, outFreq, 1); + helper_right_ = new Resampler(inFreq, outFreq, 1); } // Now create the states we need. @@ -401,7 +401,7 @@ int Resampler::Push(const int16_t* samplesIn, size_t maxLen, size_t& outLen) { if (num_channels_ == 2) { - // Split up the signal and call the slave object for each channel + // Split up the signal and call the helper object for each channel int16_t* left = static_cast(malloc(lengthIn * sizeof(int16_t) / 2)); int16_t* right = @@ -422,10 +422,10 @@ int Resampler::Push(const int16_t* samplesIn, size_t actualOutLen_left = 0; size_t actualOutLen_right = 0; // Do resampling for right channel - res |= slave_left_->Push(left, lengthIn, out_left, maxLen / 2, - actualOutLen_left); - res |= slave_right_->Push(right, lengthIn, out_right, maxLen / 2, - actualOutLen_right); + res |= helper_left_->Push(left, lengthIn, out_left, maxLen / 2, + actualOutLen_left); + res |= helper_right_->Push(right, lengthIn, out_right, maxLen / 2, + actualOutLen_right); if (res || (actualOutLen_left != actualOutLen_right)) { free(left); free(right); diff --git a/common_audio/resampler/sinc_resampler_unittest.cc b/common_audio/resampler/sinc_resampler_unittest.cc index 7bcd7f146e..b067b23b88 100644 --- a/common_audio/resampler/sinc_resampler_unittest.cc +++ b/common_audio/resampler/sinc_resampler_unittest.cc @@ -40,7 +40,7 @@ static const double kKernelInterpolationFactor = 0.5; // Helper class to ensure ChunkedResample() functions properly. class MockSource : public SincResamplerCallback { public: - MOCK_METHOD2(Run, void(size_t frames, float* destination)); + MOCK_METHOD(void, Run, (size_t frames, float* destination), (override)); }; ACTION(ClearBuffer) { diff --git a/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc index 2918374bba..6b6d6f1fd7 100644 --- a/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft.cc @@ -313,6 +313,14 @@ static void rftbsub_128_C(float* a) { } // namespace +OouraFft::OouraFft(bool sse2_available) { +#if defined(WEBRTC_ARCH_X86_FAMILY) + use_sse2_ = sse2_available; +#else + use_sse2_ = false; +#endif +} + OouraFft::OouraFft() { #if defined(WEBRTC_ARCH_X86_FAMILY) use_sse2_ = (WebRtc_GetCPUInfo(kSSE2) != 0); diff --git a/common_audio/third_party/ooura/fft_size_128/ooura_fft.h b/common_audio/third_party/ooura/fft_size_128/ooura_fft.h index 0cdd6aa66f..8273dfe58e 100644 --- a/common_audio/third_party/ooura/fft_size_128/ooura_fft.h +++ b/common_audio/third_party/ooura/fft_size_128/ooura_fft.h @@ -38,6 +38,10 @@ void rftbsub_128_neon(float* a); class OouraFft { public: + // Ctor allowing the availability of SSE2 support to be specified. + explicit OouraFft(bool sse2_available); + + // Deprecated: This Ctor will soon be removed. OouraFft(); ~OouraFft(); void Fft(float* a) const; diff --git a/common_audio/vad/mock/mock_vad.h b/common_audio/vad/mock/mock_vad.h index afe80ef5e1..5a554ce1f9 100644 --- a/common_audio/vad/mock/mock_vad.h +++ b/common_audio/vad/mock/mock_vad.h @@ -18,14 +18,14 @@ namespace webrtc { class MockVad : public Vad { public: - virtual ~MockVad() { Die(); } - MOCK_METHOD0(Die, void()); + ~MockVad() override { Die(); } + MOCK_METHOD(void, Die, ()); - MOCK_METHOD3(VoiceActivity, - enum Activity(const int16_t* audio, - size_t num_samples, - int sample_rate_hz)); - MOCK_METHOD0(Reset, void()); + MOCK_METHOD(enum Activity, + VoiceActivity, + (const int16_t* audio, size_t num_samples, int sample_rate_hz), + (override)); + MOCK_METHOD(void, Reset, (), (override)); }; } // namespace webrtc diff --git a/common_video/BUILD.gn b/common_video/BUILD.gn index ddf4c2d495..8c25eb0953 100644 --- a/common_video/BUILD.gn +++ b/common_video/BUILD.gn @@ -58,11 +58,12 @@ rtc_library("common_video") { "../rtc_base:checks", "../rtc_base:rtc_task_queue", "../rtc_base:safe_minmax", + "../rtc_base/synchronization:mutex", "../rtc_base/system:rtc_export", "../system_wrappers:metrics", - "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } if (rtc_include_tests) { @@ -113,6 +114,7 @@ if (rtc_include_tests) { "../test:test_support", "../test:video_test_common", "//testing/gtest", + "//third_party/abseil-cpp/absl/types:optional", "//third_party/libyuv", ] diff --git a/common_video/bitrate_adjuster.cc b/common_video/bitrate_adjuster.cc index ca52ed9e69..c53c3a02f6 100644 --- a/common_video/bitrate_adjuster.cc +++ b/common_video/bitrate_adjuster.cc @@ -39,7 +39,7 @@ BitrateAdjuster::BitrateAdjuster(float min_adjusted_bitrate_pct, } void BitrateAdjuster::SetTargetBitrateBps(uint32_t bitrate_bps) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); // If the change in target bitrate is large, update the adjusted bitrate // immediately since it's likely we have gained or lost a sizeable amount of // bandwidth and we'll want to respond quickly. @@ -58,22 +58,22 @@ void BitrateAdjuster::SetTargetBitrateBps(uint32_t bitrate_bps) { } uint32_t BitrateAdjuster::GetTargetBitrateBps() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return target_bitrate_bps_; } uint32_t BitrateAdjuster::GetAdjustedBitrateBps() const { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return adjusted_bitrate_bps_; } absl::optional BitrateAdjuster::GetEstimatedBitrateBps() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); return bitrate_tracker_.Rate(rtc::TimeMillis()); } void BitrateAdjuster::Update(size_t frame_size) { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); uint32_t current_time_ms = rtc::TimeMillis(); bitrate_tracker_.Update(frame_size, current_time_ms); UpdateBitrate(current_time_ms); @@ -100,7 +100,7 @@ uint32_t BitrateAdjuster::GetMaxAdjustedBitrateBps() const { // Only safe to call this after Update calls have stopped void BitrateAdjuster::Reset() { - rtc::CritScope cs(&crit_); + MutexLock lock(&mutex_); target_bitrate_bps_ = 0; adjusted_bitrate_bps_ = 0; last_adjusted_target_bitrate_bps_ = 0; diff --git a/common_video/generic_frame_descriptor/BUILD.gn b/common_video/generic_frame_descriptor/BUILD.gn index 05a4e2396c..ab97e887f2 100644 --- a/common_video/generic_frame_descriptor/BUILD.gn +++ b/common_video/generic_frame_descriptor/BUILD.gn @@ -19,6 +19,8 @@ rtc_library("generic_frame_descriptor") { "../../api/transport/rtp:dependency_descriptor", "../../api/video:video_codec_constants", "../../rtc_base:checks", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/container:inlined_vector", "//third_party/abseil-cpp/absl/strings", "//third_party/abseil-cpp/absl/types:optional", diff --git a/common_video/generic_frame_descriptor/generic_frame_info.cc b/common_video/generic_frame_descriptor/generic_frame_info.cc index ca61072799..af66bbaf67 100644 --- a/common_video/generic_frame_descriptor/generic_frame_info.cc +++ b/common_video/generic_frame_descriptor/generic_frame_info.cc @@ -15,33 +15,6 @@ namespace webrtc { -absl::InlinedVector -GenericFrameInfo::DecodeTargetInfo(absl::string_view indication_symbols) { - absl::InlinedVector decode_targets; - for (char symbol : indication_symbols) { - DecodeTargetIndication indication; - switch (symbol) { - case '-': - indication = DecodeTargetIndication::kNotPresent; - break; - case 'D': - indication = DecodeTargetIndication::kDiscardable; - break; - case 'R': - indication = DecodeTargetIndication::kRequired; - break; - case 'S': - indication = DecodeTargetIndication::kSwitch; - break; - default: - RTC_NOTREACHED(); - } - decode_targets.push_back(indication); - } - - return decode_targets; -} - GenericFrameInfo::GenericFrameInfo() = default; GenericFrameInfo::GenericFrameInfo(const GenericFrameInfo&) = default; GenericFrameInfo::~GenericFrameInfo() = default; @@ -65,14 +38,8 @@ GenericFrameInfo::Builder& GenericFrameInfo::Builder::S(int spatial_id) { GenericFrameInfo::Builder& GenericFrameInfo::Builder::Dtis( absl::string_view indication_symbols) { - info_.decode_target_indications = DecodeTargetInfo(indication_symbols); - return *this; -} - -GenericFrameInfo::Builder& GenericFrameInfo::Builder::Fdiffs( - std::initializer_list frame_diffs) { - info_.frame_diffs.insert(info_.frame_diffs.end(), frame_diffs.begin(), - frame_diffs.end()); + info_.decode_target_indications = + webrtc_impl::StringToDecodeTargetIndications(indication_symbols); return *this; } diff --git a/common_video/generic_frame_descriptor/generic_frame_info.h b/common_video/generic_frame_descriptor/generic_frame_info.h index b602ee06a6..19f413b5d4 100644 --- a/common_video/generic_frame_descriptor/generic_frame_info.h +++ b/common_video/generic_frame_descriptor/generic_frame_info.h @@ -11,7 +11,9 @@ #ifndef COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_ #define COMMON_VIDEO_GENERIC_FRAME_DESCRIPTOR_GENERIC_FRAME_INFO_H_ +#include #include +#include #include "absl/container/inlined_vector.h" #include "absl/strings/string_view.h" @@ -31,17 +33,15 @@ struct CodecBufferUsage { }; struct GenericFrameInfo : public FrameDependencyTemplate { - static absl::InlinedVector DecodeTargetInfo( - absl::string_view indication_symbols); - class Builder; GenericFrameInfo(); GenericFrameInfo(const GenericFrameInfo&); ~GenericFrameInfo(); - int64_t frame_id = 0; absl::InlinedVector encoder_buffers; + std::vector part_of_chain; + std::bitset<32> active_decode_targets = ~uint32_t{0}; }; class GenericFrameInfo::Builder { @@ -53,7 +53,6 @@ class GenericFrameInfo::Builder { Builder& T(int temporal_id); Builder& S(int spatial_id); Builder& Dtis(absl::string_view indication_symbols); - Builder& Fdiffs(std::initializer_list frame_diffs); private: GenericFrameInfo info_; diff --git a/common_video/include/bitrate_adjuster.h b/common_video/include/bitrate_adjuster.h index aea1872216..4b208307a1 100644 --- a/common_video/include/bitrate_adjuster.h +++ b/common_video/include/bitrate_adjuster.h @@ -15,8 +15,8 @@ #include #include "absl/types/optional.h" -#include "rtc_base/critical_section.h" #include "rtc_base/rate_statistics.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" @@ -60,29 +60,31 @@ class RTC_EXPORT BitrateAdjuster { bool IsWithinTolerance(uint32_t bitrate_bps, uint32_t target_bitrate_bps); // Returns smallest possible adjusted value. - uint32_t GetMinAdjustedBitrateBps() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + uint32_t GetMinAdjustedBitrateBps() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); // Returns largest possible adjusted value. - uint32_t GetMaxAdjustedBitrateBps() const RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + uint32_t GetMaxAdjustedBitrateBps() const + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void Reset(); void UpdateBitrate(uint32_t current_time_ms) - RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_); + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - rtc::CriticalSection crit_; + mutable Mutex mutex_; const float min_adjusted_bitrate_pct_; const float max_adjusted_bitrate_pct_; // The bitrate we want. - volatile uint32_t target_bitrate_bps_ RTC_GUARDED_BY(crit_); + volatile uint32_t target_bitrate_bps_ RTC_GUARDED_BY(mutex_); // The bitrate we use to get what we want. - volatile uint32_t adjusted_bitrate_bps_ RTC_GUARDED_BY(crit_); + volatile uint32_t adjusted_bitrate_bps_ RTC_GUARDED_BY(mutex_); // The target bitrate that the adjusted bitrate was computed from. - volatile uint32_t last_adjusted_target_bitrate_bps_ RTC_GUARDED_BY(crit_); + volatile uint32_t last_adjusted_target_bitrate_bps_ RTC_GUARDED_BY(mutex_); // Used to estimate bitrate. - RateStatistics bitrate_tracker_ RTC_GUARDED_BY(crit_); + RateStatistics bitrate_tracker_ RTC_GUARDED_BY(mutex_); // The last time we tried to adjust the bitrate. - uint32_t last_bitrate_update_time_ms_ RTC_GUARDED_BY(crit_); + uint32_t last_bitrate_update_time_ms_ RTC_GUARDED_BY(mutex_); // The number of frames since the last time we tried to adjust the bitrate. - uint32_t frames_since_last_update_ RTC_GUARDED_BY(crit_); + uint32_t frames_since_last_update_ RTC_GUARDED_BY(mutex_); }; } // namespace webrtc diff --git a/docs/faq.md b/docs/faq.md index ed9143812a..9f31f31ee4 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -231,7 +231,7 @@ Yes, you still have the right to redistribute and you still have a patent license for Google's patents that cover the code that Google released. -### What if my competitor uses the code and brings patent litigation against me for something unrelated to the code. Does he or she still have a patent license? +### What if my competitor uses the code and brings patent litigation against me for something unrelated to the code. Do they still have a patent license? -Yes, he/she still has the right to redistribute and he/she still has a patent +Yes, they still have the right to redistribute and they still have a patent license for Google's patents that cover the code that Google released. diff --git a/docs/native-code/android/index.md b/docs/native-code/android/index.md index 07491fdc29..82078210d1 100644 --- a/docs/native-code/android/index.md +++ b/docs/native-code/android/index.md @@ -23,6 +23,7 @@ build config. See [Development][webrtc-development] for instructions on how to update the code, building etc. + ## Compiling 1. Generate projects using GN. @@ -44,9 +45,12 @@ to enable managing multiple configurations in parallel. 2. Compile using: ``` -$ ninja -C out/Debug +$ autoninja -C out/Debug ``` +(To list all available targets, run `autoninja -C out/Debug -t targets all`.) + + ## Using the Bundled Android SDK/NDK In order to use the Android SDK and NDK that is bundled in @@ -59,6 +63,7 @@ $ . build/android/envsetup.sh Then you'll have `adb` and all the other Android tools in your `PATH`. + ## Running the AppRTCMobile App AppRTCMobile is an Android application using WebRTC Native APIs via JNI (JNI @@ -77,7 +82,7 @@ https://bugs.webrtc.org/9282* generating the build files using GN): ``` -$ ninja -C out/Debug AppRTCMobile +$ autoninja -C out/Debug AppRTCMobile ``` 2. Generate the project files: @@ -97,51 +102,54 @@ Android Studio's SDK. When asked whether to use the Gradle wrapper, press AppRTCMobile should now start on the device. If you do any changes to the C++ code, you have to compile the project using -ninja after the changes (see step 1). +autoninja after the changes (see step 1). *Note: Only "arm" is supported as the target_cpu when using Android Studio. This still allows you to run the application on 64-bit ARM devices. x86-based devices are not supported right now.* -## Running WebRTC Native Tests on an Android Device +## Running Tests on an Android Device To build APKs with the WebRTC native tests, follow these instructions. -1. Ensure you have an Android device set in Developer mode connected via -USB. +1. Ensure you have an Android device set in Developer mode connected via USB. -2. Compile as described in the section above. - -3. To see which tests are available: look in `out/Debug/bin`. - -4. Run a test on your device: +2. Compile unit tests and/or instrumentation tests: ``` -$ out/Debug/bin/run_modules_unittests +$ autoninja -C out/Debug android_instrumentation_test_apk +$ autoninja -C out/Debug rtc_unittests ``` -5. If you want to limit to a subset of tests, use the `--gtest_filter flag`, e.g. +3. You can find the generated test binaries in `out/Debug/bin`. To run instrumentation tests: ``` -$ out/Debug/bin/run_modules_unittests \ - --gtest_filter=RtpRtcpAPITest.SSRC:RtpRtcpRtcpTest.* +$ out/Debug/bin/run_android_instrumentation_test_apk -v ``` -6. **NOTICE:** The first time you run a test, you must accept a dialog on +To run unit tests: + +``` +$ out/Debug/bin/run_rtc_unittests -v +``` + +Show verbose output with `-v` and filter tests with `--gtest-filter=SomeTest.*`. For example: + +``` +$ out/Debug/bin/run_android_instrumentation_test_apk -v \ + --gtest_filter=VideoFrameBufferTest.* +``` + +For a full list of command line arguments, use `--help`. + +5. **NOTICE:** The first time you run a test, you must accept a dialog on the device! If want to run Release builds instead; pass `is_debug=false` to GN (and preferably generate the projects files into a directory like `out/Release`). Then use the scripts generated in `out/Release/bin` instead. - -## Running WebRTC Instrumentation Tests on an Android Device - -The instrumentation tests (like AppRTCMobileTest and -libjingle_peerconnection_android_unittest) gets scripts generated in the same -location as the native tests described in the previous section. - [webrtc-prerequisite-sw]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/development/prerequisite-sw/index.md [webrtc-jni-doc]: https://webrtc.googlesource.com/src/+/master/sdk/android/README [apprtc-doc]: https://webrtc.googlesource.com/src/+/master/examples/androidapp/README diff --git a/examples/BUILD.gn b/examples/BUILD.gn index 4d6d14d0d9..f0c5fa8be1 100644 --- a/examples/BUILD.gn +++ b/examples/BUILD.gn @@ -27,6 +27,7 @@ group("examples") { ":AppRTCMobile", ":AppRTCMobile_test_apk", ":libwebrtc_unity", + "androidvoip", ] # TODO(sakal): We include some code from the tests. Remove this dependency @@ -87,7 +88,7 @@ if (is_android) { testonly = true apk_name = "AppRTCMobile" android_manifest = "androidapp/AndroidManifest.xml" - min_sdk_version = 16 + min_sdk_version = 21 target_sdk_version = 29 deps = [ @@ -101,7 +102,7 @@ if (is_android) { rtc_android_library("AppRTCMobile_javalib") { testonly = true - android_manifest_for_lint = "androidapp/AndroidManifest.xml" + android_manifest = "androidapp/AndroidManifest.xml" sources = [ "androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java", @@ -180,10 +181,10 @@ if (is_android) { "androidapp/res/layout/fragment_call.xml", "androidapp/res/layout/fragment_hud.xml", "androidapp/res/menu/connect_menu.xml", - "androidapp/res/values/arrays.xml", - "androidapp/res/values/strings.xml", "androidapp/res/values-v17/styles.xml", "androidapp/res/values-v21/styles.xml", + "androidapp/res/values/arrays.xml", + "androidapp/res/values/strings.xml", "androidapp/res/xml/preferences.xml", ] custom_package = "org.appspot.apprtc" @@ -196,7 +197,7 @@ if (is_android) { rtc_instrumentation_test_apk("AppRTCMobile_test_apk") { apk_name = "AppRTCMobileTest" android_manifest = "androidtests/AndroidManifest.xml" - min_sdk_version = 16 + min_sdk_version = 21 target_sdk_version = 21 sources = [ @@ -207,7 +208,11 @@ if (is_android) { deps = [ ":AppRTCMobile_javalib", + "../sdk/android:base_java", + "../sdk/android:camera_java", "../sdk/android:libjingle_peerconnection_java", + "../sdk/android:peerconnection_java", + "../sdk/android:video_api_java", "../sdk/android:video_java", "//third_party/android_support_test_runner:runner_java", "//third_party/junit", @@ -303,7 +308,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { } else { deps += [ "../sdk:mac_framework_objc+link" ] } - libs = [ + frameworks = [ "CoreMedia.framework", "QuartzCore.framework", ] @@ -345,7 +350,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:ios_framework_bundle", ] - libs = [ "AVFoundation.framework" ] + frameworks = [ "AVFoundation.framework" ] } ios_app_bundle("AppRTCMobile") { @@ -379,14 +384,18 @@ if (is_ios || (is_mac && target_cpu != "x86")) { if (rtc_apprtcmobile_broadcast_extension) { bundle_data("AppRTCMobileBroadcastUpload_extension_bundle") { testonly = true - public_deps = [ ":AppRTCMobileBroadcastUpload" ] # no-presubmit-check TODO(webrtc:8603) + public_deps = [ # no-presubmit-check TODO(webrtc:8603) + ":AppRTCMobileBroadcastUpload", # prevent code format + ] sources = [ "$root_out_dir/AppRTCMobileBroadcastUpload.appex" ] outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ] } bundle_data("AppRTCMobileBroadcastSetupUI_extension_bundle") { testonly = true - public_deps = [ ":AppRTCMobileBroadcastSetupUI" ] # no-presubmit-check TODO(webrtc:8603) + public_deps = [ # no-presubmit-check TODO(webrtc:8603) + ":AppRTCMobileBroadcastSetupUI", # prevent code format + ] sources = [ "$root_out_dir/AppRTCMobileBroadcastSetupUI.appex" ] outputs = [ "{{bundle_contents_dir}}/Plugins/{{source_file_part}}" ] } @@ -404,7 +413,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../sdk:ios_framework_bundle", ] - libs = [ "ReplayKit.framework" ] + frameworks = [ "ReplayKit.framework" ] } ios_appex_bundle("AppRTCMobileBroadcastUpload") { @@ -428,7 +437,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { info_plist = "objc/AppRTCMobile/ios/broadcast_extension/BroadcastSetupUIInfo.plist" - libs = [ "ReplayKit.framework" ] + frameworks = [ "ReplayKit.framework" ] deps = [ ":AppRTCMobile_ios_bundle_data" ] } @@ -484,6 +493,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { "../modules/audio_processing:api", "../pc:libjingle_peerconnection", "../rtc_base", + "../rtc_base/synchronization:mutex", "../sdk:base_objc", "../sdk:default_codec_factory_objc", "../sdk:helpers_objc", @@ -542,7 +552,7 @@ if (is_ios || (is_mac && target_cpu != "x86")) { info_plist = "objc/AppRTCMobile/mac/Info.plist" - libs = [ "AppKit.framework" ] + frameworks = [ "AppKit.framework" ] ldflags = [ "-rpath", @@ -587,10 +597,10 @@ if (is_ios || (is_mac && target_cpu != "x86")) { configs += [ ":socketrocket_warning_config" ] public_configs = [ ":socketrocket_include_config" ] - libs = [ + libs = [ "icucore" ] + frameworks = [ "CFNetwork.framework", "Security.framework", - "icucore", ] } @@ -829,6 +839,7 @@ if (is_android) { "../sdk/android:camera_java", "../sdk/android:libjingle_peerconnection_java", "../sdk/android:peerconnection_java", + "../sdk/android:video_api_java", "../sdk/android:video_java", "//third_party/android_deps:com_android_support_support_annotations_java", ] @@ -859,6 +870,7 @@ if (is_android) { deps = [ ":AppRTCMobile_javalib", + "../sdk/android:peerconnection_java", "//base:base_java_test_support", "//third_party/google-truth:google_truth_java", ] diff --git a/examples/aarproject/app/build.gradle b/examples/aarproject/app/build.gradle index dde0707ace..37499d468b 100644 --- a/examples/aarproject/app/build.gradle +++ b/examples/aarproject/app/build.gradle @@ -5,7 +5,7 @@ android { buildToolsVersion "27.0.1" defaultConfig { applicationId "org.appspot.apprtc" - minSdkVersion 16 + minSdkVersion 21 targetSdkVersion 21 versionCode 1 versionName "1.0" diff --git a/examples/androidapp/AndroidManifest.xml b/examples/androidapp/AndroidManifest.xml index 8a9035e782..c4e1e797d0 100644 --- a/examples/androidapp/AndroidManifest.xml +++ b/examples/androidapp/AndroidManifest.xml @@ -8,7 +8,7 @@ - + diff --git a/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java b/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java index 7ae3d838dd..c32ab964ad 100644 --- a/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java +++ b/examples/androidapp/src/org/appspot/apprtc/AppRTCAudioManager.java @@ -185,8 +185,8 @@ public class AppRTCAudioManager { // Note that, the sensor will not be active until start() has been called. proximitySensor = AppRTCProximitySensor.create(context, // This method will be called each time a state change is detected. - // Example: user holds his hand over the device (closer than ~5 cm), - // or removes his hand from the device. + // Example: user holds their hand over the device (closer than ~5 cm), + // or removes their hand from the device. this ::onProximitySensorChangedState); Log.d(TAG, "defaultAudioDevice: " + defaultAudioDevice); diff --git a/examples/androidnativeapi/AndroidManifest.xml b/examples/androidnativeapi/AndroidManifest.xml index f10f55a1b6..9257c4132e 100644 --- a/examples/androidnativeapi/AndroidManifest.xml +++ b/examples/androidnativeapi/AndroidManifest.xml @@ -2,7 +2,7 @@ - + diff --git a/examples/androidnativeapi/BUILD.gn b/examples/androidnativeapi/BUILD.gn index 9c114e859c..9253c0bcd9 100644 --- a/examples/androidnativeapi/BUILD.gn +++ b/examples/androidnativeapi/BUILD.gn @@ -5,7 +5,7 @@ if (is_android) { testonly = true apk_name = "androidnativeapi" android_manifest = "AndroidManifest.xml" - min_sdk_version = 19 + min_sdk_version = 21 target_sdk_version = 27 sources = [ @@ -16,6 +16,7 @@ if (is_android) { deps = [ ":resources", "//modules/audio_device:audio_device_java", + "//rtc_base:base_java", "//sdk/android:camera_java", "//sdk/android:surfaceviewrenderer_java", "//sdk/android:video_api_java", @@ -47,6 +48,7 @@ if (is_android) { deps = [ ":generated_jni", "../../api:scoped_refptr", + "../../rtc_base/synchronization:mutex", "//api:libjingle_peerconnection_api", "//api/rtc_event_log:rtc_event_log_factory", "//api/task_queue:default_task_queue_factory", diff --git a/examples/androidnativeapi/jni/android_call_client.cc b/examples/androidnativeapi/jni/android_call_client.cc index 03968335d9..f0b060632d 100644 --- a/examples/androidnativeapi/jni/android_call_client.cc +++ b/examples/androidnativeapi/jni/android_call_client.cc @@ -43,7 +43,7 @@ class AndroidCallClient::PCObserver : public webrtc::PeerConnectionObserver { void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override; private: - const AndroidCallClient* client_; + AndroidCallClient* const client_; }; namespace { @@ -88,7 +88,7 @@ void AndroidCallClient::Call(JNIEnv* env, const webrtc::JavaRef& remote_sink) { RTC_DCHECK_RUN_ON(&thread_checker_); - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); if (call_started_) { RTC_LOG(LS_WARNING) << "Call already started."; return; @@ -112,7 +112,7 @@ void AndroidCallClient::Hangup(JNIEnv* env) { call_started_ = false; { - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); if (pc_ != nullptr) { pc_->Close(); pc_ = nullptr; @@ -174,7 +174,7 @@ void AndroidCallClient::CreatePeerConnectionFactory() { } void AndroidCallClient::CreatePeerConnection() { - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); webrtc::PeerConnectionInterface::RTCConfiguration config; config.sdp_semantics = webrtc::SdpSemantics::kUnifiedPlan; // DTLS SRTP has to be disabled for loopback to work. @@ -205,7 +205,7 @@ void AndroidCallClient::CreatePeerConnection() { } void AndroidCallClient::Connect() { - rtc::CritScope lock(&pc_mutex_); + webrtc::MutexLock lock(&pc_mutex_); pc_->CreateOffer(new rtc::RefCountedObject(pc_), webrtc::PeerConnectionInterface::RTCOfferAnswerOptions()); } @@ -240,7 +240,7 @@ void AndroidCallClient::PCObserver::OnIceGatheringChange( void AndroidCallClient::PCObserver::OnIceCandidate( const webrtc::IceCandidateInterface* candidate) { RTC_LOG(LS_INFO) << "OnIceCandidate: " << candidate->server_url(); - rtc::CritScope lock(&client_->pc_mutex_); + webrtc::MutexLock lock(&client_->pc_mutex_); RTC_DCHECK(client_->pc_ != nullptr); client_->pc_->AddIceCandidate(candidate); } diff --git a/examples/androidnativeapi/jni/android_call_client.h b/examples/androidnativeapi/jni/android_call_client.h index 13992f5960..f3f61a4695 100644 --- a/examples/androidnativeapi/jni/android_call_client.h +++ b/examples/androidnativeapi/jni/android_call_client.h @@ -18,7 +18,7 @@ #include "api/peer_connection_interface.h" #include "api/scoped_refptr.h" -#include "rtc_base/critical_section.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_checker.h" #include "sdk/android/native_api/jni/scoped_java_ref.h" #include "sdk/android/native_api/video/video_source.h" @@ -66,7 +66,7 @@ class AndroidCallClient { rtc::scoped_refptr video_source_ RTC_GUARDED_BY(thread_checker_); - rtc::CriticalSection pc_mutex_; + webrtc::Mutex pc_mutex_; rtc::scoped_refptr pc_ RTC_GUARDED_BY(pc_mutex_); }; diff --git a/examples/androidtests/AndroidManifest.xml b/examples/androidtests/AndroidManifest.xml index dae2e980a6..8e995366dc 100644 --- a/examples/androidtests/AndroidManifest.xml +++ b/examples/androidtests/AndroidManifest.xml @@ -14,7 +14,7 @@ package="org.appspot.apprtc.test"> - + diff --git a/examples/androidvoip/AndroidManifest.xml b/examples/androidvoip/AndroidManifest.xml new file mode 100644 index 0000000000..106f71171d --- /dev/null +++ b/examples/androidvoip/AndroidManifest.xml @@ -0,0 +1,38 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/examples/androidvoip/BUILD.gn b/examples/androidvoip/BUILD.gn new file mode 100644 index 0000000000..74341a78ac --- /dev/null +++ b/examples/androidvoip/BUILD.gn @@ -0,0 +1,88 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("//webrtc.gni") + +if (is_android) { + rtc_android_apk("androidvoip") { + testonly = true + apk_name = "androidvoip" + android_manifest = "AndroidManifest.xml" + min_sdk_version = 21 + target_sdk_version = 27 + + sources = [ + "java/org/webrtc/examples/androidvoip/MainActivity.java", + "java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java", + "java/org/webrtc/examples/androidvoip/VoipClient.java", + ] + + deps = [ + ":resources", + "//modules/audio_device:audio_device_java", + "//rtc_base:base_java", + "//sdk/android:java_audio_device_module_java", + "//sdk/android:video_java", + "//third_party/android_deps:androidx_core_core_java", + "//third_party/android_deps:androidx_legacy_legacy_support_v4_java", + ] + + shared_libraries = [ ":examples_androidvoip_jni" ] + } + + generate_jni("generated_jni") { + testonly = true + sources = [ "java/org/webrtc/examples/androidvoip/VoipClient.java" ] + namespace = "webrtc_examples" + jni_generator_include = "//sdk/android/src/jni/jni_generator_helper.h" + } + + rtc_shared_library("examples_androidvoip_jni") { + testonly = true + sources = [ + "jni/android_voip_client.cc", + "jni/android_voip_client.h", + "jni/onload.cc", + ] + + suppressed_configs += [ "//build/config/android:hide_all_but_jni_onload" ] + configs += [ "//build/config/android:hide_all_but_jni" ] + + deps = [ + ":generated_jni", + "//api:transport_api", + "//api/audio_codecs:audio_codecs_api", + "//api/audio_codecs:builtin_audio_decoder_factory", + "//api/audio_codecs:builtin_audio_encoder_factory", + "//api/task_queue:default_task_queue_factory", + "//api/voip:voip_api", + "//api/voip:voip_engine_factory", + "//modules/utility:utility", + "//rtc_base", + "//rtc_base/third_party/sigslot:sigslot", + "//sdk/android:native_api_audio_device_module", + "//sdk/android:native_api_base", + "//sdk/android:native_api_jni", + "//third_party/abseil-cpp/absl/memory:memory", + ] + } + + android_resources("resources") { + testonly = true + custom_package = "org.webrtc.examples.androidvoip" + sources = [ + "res/layout/activity_main.xml", + "res/values/colors.xml", + "res/values/strings.xml", + ] + + # Needed for Bazel converter. + resource_dirs = [ "res" ] + assert(resource_dirs != []) # Mark as used. + } +} diff --git a/examples/androidvoip/DEPS b/examples/androidvoip/DEPS new file mode 100644 index 0000000000..edb714dd44 --- /dev/null +++ b/examples/androidvoip/DEPS @@ -0,0 +1,3 @@ +include_rules = [ + "+sdk/android/native_api", +] diff --git a/examples/androidvoip/OWNERS b/examples/androidvoip/OWNERS new file mode 100644 index 0000000000..0fe5182450 --- /dev/null +++ b/examples/androidvoip/OWNERS @@ -0,0 +1,2 @@ +natim@webrtc.org +sakal@webrtc.org diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java new file mode 100644 index 0000000000..d787de59a0 --- /dev/null +++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/MainActivity.java @@ -0,0 +1,339 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.examples.androidvoip; + +import android.Manifest.permission; +import android.app.Activity; +import android.app.AlertDialog; +import android.content.Context; +import android.content.pm.PackageManager; +import android.os.Bundle; +import android.view.Gravity; +import android.view.View; +import android.widget.AdapterView; +import android.widget.ArrayAdapter; +import android.widget.Button; +import android.widget.EditText; +import android.widget.RelativeLayout; +import android.widget.ScrollView; +import android.widget.Spinner; +import android.widget.Switch; +import android.widget.TextView; +import android.widget.Toast; +import android.widget.ToggleButton; +import androidx.core.app.ActivityCompat; +import androidx.core.content.ContextCompat; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import org.webrtc.ContextUtils; + +public class MainActivity extends Activity implements OnVoipClientTaskCompleted { + private static final int NUM_SUPPORTED_CODECS = 6; + + private VoipClient voipClient; + private List supportedCodecs; + private boolean[] isDecoderSelected; + private Set selectedDecoders; + + private Toast toast; + private ScrollView scrollView; + private TextView localIPAddressTextView; + private EditText localPortNumberEditText; + private EditText remoteIPAddressEditText; + private EditText remotePortNumberEditText; + private Spinner encoderSpinner; + private Button decoderSelectionButton; + private TextView decodersTextView; + private ToggleButton sessionButton; + private RelativeLayout switchLayout; + private Switch sendSwitch; + private Switch playoutSwitch; + + @Override + protected void onCreate(Bundle savedInstance) { + ContextUtils.initialize(getApplicationContext()); + + super.onCreate(savedInstance); + setContentView(R.layout.activity_main); + + System.loadLibrary("examples_androidvoip_jni"); + + voipClient = new VoipClient(getApplicationContext(), this); + voipClient.getAndSetUpLocalIPAddress(); + voipClient.getAndSetUpSupportedCodecs(); + + isDecoderSelected = new boolean[NUM_SUPPORTED_CODECS]; + selectedDecoders = new HashSet<>(); + + toast = Toast.makeText(this, "", Toast.LENGTH_SHORT); + + scrollView = (ScrollView) findViewById(R.id.scroll_view); + localIPAddressTextView = (TextView) findViewById(R.id.local_ip_address_text_view); + localPortNumberEditText = (EditText) findViewById(R.id.local_port_number_edit_text); + remoteIPAddressEditText = (EditText) findViewById(R.id.remote_ip_address_edit_text); + remotePortNumberEditText = (EditText) findViewById(R.id.remote_port_number_edit_text); + encoderSpinner = (Spinner) findViewById(R.id.encoder_spinner); + decoderSelectionButton = (Button) findViewById(R.id.decoder_selection_button); + decodersTextView = (TextView) findViewById(R.id.decoders_text_view); + sessionButton = (ToggleButton) findViewById(R.id.session_button); + switchLayout = (RelativeLayout) findViewById(R.id.switch_layout); + sendSwitch = (Switch) findViewById(R.id.start_send_switch); + playoutSwitch = (Switch) findViewById(R.id.start_playout_switch); + + setUpSessionButton(); + setUpSendAndPlayoutSwitch(); + } + + private void setUpEncoderSpinner(List supportedCodecs) { + ArrayAdapter encoderAdapter = + new ArrayAdapter(this, android.R.layout.simple_spinner_item, supportedCodecs); + encoderAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); + encoderSpinner.setAdapter(encoderAdapter); + encoderSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { + @Override + public void onItemSelected(AdapterView parent, View view, int position, long id) { + voipClient.setEncoder((String) parent.getSelectedItem()); + } + @Override + public void onNothingSelected(AdapterView parent) {} + }); + } + + private List getSelectedDecoders() { + List decoders = new ArrayList<>(); + for (int i = 0; i < supportedCodecs.size(); i++) { + if (selectedDecoders.contains(i)) { + decoders.add(supportedCodecs.get(i)); + } + } + return decoders; + } + + private void setUpDecoderSelectionButton(List supportedCodecs) { + decoderSelectionButton.setOnClickListener((view) -> { + AlertDialog.Builder dialogBuilder = new AlertDialog.Builder(this); + dialogBuilder.setTitle(R.string.dialog_title); + + // Populate multi choice items with supported decoders. + String[] supportedCodecsArray = supportedCodecs.toArray(new String[0]); + dialogBuilder.setMultiChoiceItems( + supportedCodecsArray, isDecoderSelected, (dialog, position, isChecked) -> { + if (isChecked) { + selectedDecoders.add(position); + } else if (!isChecked) { + selectedDecoders.remove(position); + } + }); + + // "Ok" button. + dialogBuilder.setPositiveButton(R.string.ok_label, (dialog, position) -> { + List decoders = getSelectedDecoders(); + String result = decoders.stream().collect(Collectors.joining(", ")); + if (result.isEmpty()) { + decodersTextView.setText(R.string.decoders_text_view_default); + } else { + decodersTextView.setText(result); + } + voipClient.setDecoders(decoders); + }); + + // "Dismiss" button. + dialogBuilder.setNegativeButton( + R.string.dismiss_label, (dialog, position) -> { dialog.dismiss(); }); + + // "Clear All" button. + dialogBuilder.setNeutralButton(R.string.clear_all_label, (dialog, position) -> { + Arrays.fill(isDecoderSelected, false); + selectedDecoders.clear(); + decodersTextView.setText(R.string.decoders_text_view_default); + }); + + AlertDialog dialog = dialogBuilder.create(); + dialog.show(); + }); + } + + private void setUpSessionButton() { + sessionButton.setOnCheckedChangeListener((button, isChecked) -> { + // Ask for permission on RECORD_AUDIO if not granted. + if (ContextCompat.checkSelfPermission(this, permission.RECORD_AUDIO) + != PackageManager.PERMISSION_GRANTED) { + String[] sList = {permission.RECORD_AUDIO}; + ActivityCompat.requestPermissions(this, sList, 1); + } + + if (isChecked) { + // Order matters here, addresses have to be set before starting session + // before setting codec. + voipClient.setLocalAddress(localIPAddressTextView.getText().toString(), + Integer.parseInt(localPortNumberEditText.getText().toString())); + voipClient.setRemoteAddress(remoteIPAddressEditText.getText().toString(), + Integer.parseInt(remotePortNumberEditText.getText().toString())); + voipClient.startSession(); + voipClient.setEncoder((String) encoderSpinner.getSelectedItem()); + voipClient.setDecoders(getSelectedDecoders()); + } else { + voipClient.stopSession(); + } + }); + } + + private void setUpSendAndPlayoutSwitch() { + sendSwitch.setOnCheckedChangeListener((button, isChecked) -> { + if (isChecked) { + voipClient.startSend(); + } else { + voipClient.stopSend(); + } + }); + + playoutSwitch.setOnCheckedChangeListener((button, isChecked) -> { + if (isChecked) { + voipClient.startPlayout(); + } else { + voipClient.stopPlayout(); + } + }); + } + + private void setUpIPAddressEditTexts(String localIPAddress) { + if (localIPAddress.isEmpty()) { + showToast("Please check your network configuration"); + } else { + localIPAddressTextView.setText(localIPAddress); + // By default remote IP address is the same as local IP address. + remoteIPAddressEditText.setText(localIPAddress); + } + } + + private void showToast(String message) { + toast.cancel(); + toast = Toast.makeText(this, message, Toast.LENGTH_SHORT); + toast.setGravity(Gravity.TOP, 0, 200); + toast.show(); + } + + @Override + protected void onDestroy() { + voipClient.close(); + voipClient = null; + + super.onDestroy(); + } + + @Override + public void onGetLocalIPAddressCompleted(String localIPAddress) { + runOnUiThread(() -> { setUpIPAddressEditTexts(localIPAddress); }); + } + + @Override + public void onGetSupportedCodecsCompleted(List supportedCodecs) { + runOnUiThread(() -> { + this.supportedCodecs = supportedCodecs; + setUpEncoderSpinner(supportedCodecs); + setUpDecoderSelectionButton(supportedCodecs); + }); + } + + @Override + public void onVoipClientInitializationCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (!isSuccessful) { + showToast("Error initializing audio device"); + } + }); + } + + @Override + public void onStartSessionCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Session started"); + switchLayout.setVisibility(View.VISIBLE); + scrollView.post(() -> { scrollView.fullScroll(ScrollView.FOCUS_DOWN); }); + } else { + showToast("Failed to start session"); + } + }); + } + + @Override + public void onStopSessionCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Session stopped"); + // Set listeners to null so the checked state can be changed programmatically. + sendSwitch.setOnCheckedChangeListener(null); + playoutSwitch.setOnCheckedChangeListener(null); + sendSwitch.setChecked(false); + playoutSwitch.setChecked(false); + // Redo the switch listener setup. + setUpSendAndPlayoutSwitch(); + switchLayout.setVisibility(View.GONE); + } else { + showToast("Failed to stop session"); + } + }); + } + + @Override + public void onStartSendCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Started sending"); + } else { + showToast("Error initializing microphone"); + } + }); + } + + @Override + public void onStopSendCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Stopped sending"); + } else { + showToast("Microphone termination failed"); + } + }); + } + + @Override + public void onStartPlayoutCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Started playout"); + } else { + showToast("Error initializing speaker"); + } + }); + } + + @Override + public void onStopPlayoutCompleted(boolean isSuccessful) { + runOnUiThread(() -> { + if (isSuccessful) { + showToast("Stopped playout"); + } else { + showToast("Speaker termination failed"); + } + }); + } + + @Override + public void onUninitializedVoipClient() { + runOnUiThread(() -> { showToast("Voip client is uninitialized"); }); + } +} diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java new file mode 100644 index 0000000000..bb85e048bb --- /dev/null +++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/OnVoipClientTaskCompleted.java @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.examples.androidvoip; + +import java.util.List; + +public interface OnVoipClientTaskCompleted { + void onGetLocalIPAddressCompleted(String localIPAddress); + void onGetSupportedCodecsCompleted(List supportedCodecs); + void onVoipClientInitializationCompleted(boolean isSuccessful); + void onStartSessionCompleted(boolean isSuccessful); + void onStopSessionCompleted(boolean isSuccessful); + void onStartSendCompleted(boolean isSuccessful); + void onStopSendCompleted(boolean isSuccessful); + void onStartPlayoutCompleted(boolean isSuccessful); + void onStopPlayoutCompleted(boolean isSuccessful); + void onUninitializedVoipClient(); +} diff --git a/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java b/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java new file mode 100644 index 0000000000..2dcbd99b1d --- /dev/null +++ b/examples/androidvoip/java/org/webrtc/examples/androidvoip/VoipClient.java @@ -0,0 +1,188 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc.examples.androidvoip; + +import android.content.Context; +import android.os.Handler; +import android.os.HandlerThread; +import java.util.ArrayList; +import java.util.List; + +public class VoipClient { + private static final String TAG = "VoipClient"; + + private final HandlerThread thread; + private final Handler handler; + + private long nativeClient; + private OnVoipClientTaskCompleted listener; + + public VoipClient(Context applicationContext, OnVoipClientTaskCompleted listener) { + this.listener = listener; + thread = new HandlerThread(TAG + "Thread"); + thread.start(); + handler = new Handler(thread.getLooper()); + + handler.post(() -> { + nativeClient = nativeCreateClient(applicationContext); + listener.onVoipClientInitializationCompleted(/* isSuccessful */ nativeClient != 0); + }); + } + + private boolean isInitialized() { + return nativeClient != 0; + } + + public void getAndSetUpSupportedCodecs() { + handler.post(() -> { + if (isInitialized()) { + listener.onGetSupportedCodecsCompleted(nativeGetSupportedCodecs(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void getAndSetUpLocalIPAddress() { + handler.post(() -> { + if (isInitialized()) { + listener.onGetLocalIPAddressCompleted(nativeGetLocalIPAddress(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void setEncoder(String encoder) { + handler.post(() -> { + if (isInitialized()) { + nativeSetEncoder(nativeClient, encoder); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void setDecoders(List decoders) { + handler.post(() -> { + if (isInitialized()) { + nativeSetDecoders(nativeClient, decoders); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void setLocalAddress(String ipAddress, int portNumber) { + handler.post(() -> { + if (isInitialized()) { + nativeSetLocalAddress(nativeClient, ipAddress, portNumber); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void setRemoteAddress(String ipAddress, int portNumber) { + handler.post(() -> { + if (isInitialized()) { + nativeSetRemoteAddress(nativeClient, ipAddress, portNumber); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void startSession() { + handler.post(() -> { + if (isInitialized()) { + listener.onStartSessionCompleted(nativeStartSession(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void stopSession() { + handler.post(() -> { + if (isInitialized()) { + listener.onStopSessionCompleted(nativeStopSession(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void startSend() { + handler.post(() -> { + if (isInitialized()) { + listener.onStartSendCompleted(nativeStartSend(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void stopSend() { + handler.post(() -> { + if (isInitialized()) { + listener.onStopSendCompleted(nativeStopSend(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void startPlayout() { + handler.post(() -> { + if (isInitialized()) { + listener.onStartPlayoutCompleted(nativeStartPlayout(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void stopPlayout() { + handler.post(() -> { + if (isInitialized()) { + listener.onStopPlayoutCompleted(nativeStopPlayout(nativeClient)); + } else { + listener.onUninitializedVoipClient(); + } + }); + } + + public void close() { + handler.post(() -> { + nativeDelete(nativeClient); + nativeClient = 0; + }); + thread.quitSafely(); + } + + private static native long nativeCreateClient(Context applicationContext); + private static native List nativeGetSupportedCodecs(long nativeAndroidVoipClient); + private static native String nativeGetLocalIPAddress(long nativeAndroidVoipClient); + private static native void nativeSetEncoder(long nativeAndroidVoipClient, String encoder); + private static native void nativeSetDecoders(long nativeAndroidVoipClient, List decoders); + private static native void nativeSetLocalAddress( + long nativeAndroidVoipClient, String ipAddress, int portNumber); + private static native void nativeSetRemoteAddress( + long nativeAndroidVoipClient, String ipAddress, int portNumber); + private static native boolean nativeStartSession(long nativeAndroidVoipClient); + private static native boolean nativeStopSession(long nativeAndroidVoipClient); + private static native boolean nativeStartSend(long nativeAndroidVoipClient); + private static native boolean nativeStopSend(long nativeAndroidVoipClient); + private static native boolean nativeStartPlayout(long nativeAndroidVoipClient); + private static native boolean nativeStopPlayout(long nativeAndroidVoipClient); + private static native void nativeDelete(long nativeAndroidVoipClient); +} diff --git a/examples/androidvoip/jni/android_voip_client.cc b/examples/androidvoip/jni/android_voip_client.cc new file mode 100644 index 0000000000..13cadf2f3d --- /dev/null +++ b/examples/androidvoip/jni/android_voip_client.cc @@ -0,0 +1,405 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "examples/androidvoip/jni/android_voip_client.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "api/audio_codecs/builtin_audio_decoder_factory.h" +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "api/voip/voip_codec.h" +#include "api/voip/voip_engine_factory.h" +#include "api/voip/voip_network.h" +#include "examples/androidvoip/generated_jni/VoipClient_jni.h" +#include "rtc_base/logging.h" +#include "rtc_base/network.h" +#include "rtc_base/socket_server.h" +#include "sdk/android/native_api/audio_device_module/audio_device_android.h" +#include "sdk/android/native_api/jni/java_types.h" + +namespace { + +// Connects a UDP socket to a public address and returns the local +// address associated with it. Since it binds to the "any" address +// internally, it returns the default local address on a multi-homed +// endpoint. Implementation copied from +// BasicNetworkManager::QueryDefaultLocalAddress. +rtc::IPAddress QueryDefaultLocalAddress(int family) { + const char kPublicIPv4Host[] = "8.8.8.8"; + const char kPublicIPv6Host[] = "2001:4860:4860::8888"; + const int kPublicPort = 53; + std::unique_ptr thread = rtc::Thread::CreateWithSocketServer(); + + RTC_DCHECK(thread->socketserver() != nullptr); + RTC_DCHECK(family == AF_INET || family == AF_INET6); + + std::unique_ptr socket( + thread->socketserver()->CreateAsyncSocket(family, SOCK_DGRAM)); + if (!socket) { + RTC_LOG_ERR(LERROR) << "Socket creation failed"; + return rtc::IPAddress(); + } + + auto host = family == AF_INET ? kPublicIPv4Host : kPublicIPv6Host; + if (socket->Connect(rtc::SocketAddress(host, kPublicPort)) < 0) { + if (socket->GetError() != ENETUNREACH && + socket->GetError() != EHOSTUNREACH) { + RTC_LOG(LS_INFO) << "Connect failed with " << socket->GetError(); + } + return rtc::IPAddress(); + } + return socket->GetLocalAddress().ipaddr(); +} + +// Assigned payload type for supported built-in codecs. PCMU, PCMA, +// and G722 have set payload types. Whereas opus, ISAC, and ILBC +// have dynamic payload types. +enum class PayloadType : int { + kPcmu = 0, + kPcma = 8, + kG722 = 9, + kOpus = 96, + kIsac = 97, + kIlbc = 98, +}; + +// Returns the payload type corresponding to codec_name. Only +// supports the built-in codecs. +int GetPayloadType(const std::string& codec_name) { + RTC_DCHECK(codec_name == "PCMU" || codec_name == "PCMA" || + codec_name == "G722" || codec_name == "opus" || + codec_name == "ISAC" || codec_name == "ILBC"); + + if (codec_name == "PCMU") { + return static_cast(PayloadType::kPcmu); + } else if (codec_name == "PCMA") { + return static_cast(PayloadType::kPcma); + } else if (codec_name == "G722") { + return static_cast(PayloadType::kG722); + } else if (codec_name == "opus") { + return static_cast(PayloadType::kOpus); + } else if (codec_name == "ISAC") { + return static_cast(PayloadType::kIsac); + } else if (codec_name == "ILBC") { + return static_cast(PayloadType::kIlbc); + } + + RTC_NOTREACHED(); + return -1; +} + +} // namespace + +namespace webrtc_examples { + +AndroidVoipClient::AndroidVoipClient( + JNIEnv* env, + const webrtc::JavaParamRef& application_context) { + voip_thread_ = rtc::Thread::CreateWithSocketServer(); + voip_thread_->Start(); + + webrtc::VoipEngineConfig config; + config.encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory(); + config.decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory(); + config.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(); + config.audio_device_module = + webrtc::CreateJavaAudioDeviceModule(env, application_context.obj()); + config.audio_processing = webrtc::AudioProcessingBuilder().Create(); + + supported_codecs_ = config.encoder_factory->GetSupportedEncoders(); + + // Due to consistent thread requirement on + // modules/audio_device/android/audio_device_template.h, + // code is invoked in the context of voip_thread_. + voip_thread_->Invoke(RTC_FROM_HERE, [&] { + voip_engine_ = webrtc::CreateVoipEngine(std::move(config)); + if (!voip_engine_) { + RTC_LOG(LS_ERROR) << "VoipEngine creation failed"; + } + }); +} + +AndroidVoipClient::~AndroidVoipClient() { + voip_thread_->Stop(); +} + +AndroidVoipClient* AndroidVoipClient::Create( + JNIEnv* env, + const webrtc::JavaParamRef& application_context) { + // Using `new` to access a non-public constructor. + auto voip_client = + absl::WrapUnique(new AndroidVoipClient(env, application_context)); + if (!voip_client->voip_engine_) { + return nullptr; + } + return voip_client.release(); +} + +webrtc::ScopedJavaLocalRef AndroidVoipClient::GetSupportedCodecs( + JNIEnv* env) { + std::vector names; + for (const webrtc::AudioCodecSpec& spec : supported_codecs_) { + names.push_back(spec.format.name); + } + webrtc::ScopedJavaLocalRef (*convert_function)( + JNIEnv*, const std::string&) = &webrtc::NativeToJavaString; + return NativeToJavaList(env, names, convert_function); +} + +webrtc::ScopedJavaLocalRef AndroidVoipClient::GetLocalIPAddress( + JNIEnv* env) { + rtc::IPAddress ipv4_address = QueryDefaultLocalAddress(AF_INET); + if (!ipv4_address.IsNil()) { + return webrtc::NativeToJavaString(env, ipv4_address.ToString()); + } + rtc::IPAddress ipv6_address = QueryDefaultLocalAddress(AF_INET6); + if (!ipv6_address.IsNil()) { + return webrtc::NativeToJavaString(env, ipv6_address.ToString()); + } + return webrtc::NativeToJavaString(env, ""); +} + +void AndroidVoipClient::SetEncoder( + JNIEnv* env, + const webrtc::JavaRef& j_encoder_string) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + const std::string& chosen_encoder = + webrtc::JavaToNativeString(env, j_encoder_string); + for (const webrtc::AudioCodecSpec& encoder : supported_codecs_) { + if (encoder.format.name == chosen_encoder) { + voip_engine_->Codec().SetSendCodec( + *channel_, GetPayloadType(encoder.format.name), encoder.format); + break; + } + } +} + +void AndroidVoipClient::SetDecoders( + JNIEnv* env, + const webrtc::JavaParamRef& j_decoder_strings) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + std::vector chosen_decoders = + webrtc::JavaListToNativeVector( + env, j_decoder_strings, &webrtc::JavaToNativeString); + std::map decoder_specs; + + for (const webrtc::AudioCodecSpec& decoder : supported_codecs_) { + if (std::find(chosen_decoders.begin(), chosen_decoders.end(), + decoder.format.name) != chosen_decoders.end()) { + decoder_specs.insert( + {GetPayloadType(decoder.format.name), decoder.format}); + } + } + + voip_engine_->Codec().SetReceiveCodecs(*channel_, decoder_specs); +} + +void AndroidVoipClient::SetLocalAddress( + JNIEnv* env, + const webrtc::JavaRef& j_ip_address_string, + jint j_port_number_int) { + const std::string& ip_address = + webrtc::JavaToNativeString(env, j_ip_address_string); + rtp_local_address_ = rtc::SocketAddress(ip_address, j_port_number_int); + rtcp_local_address_ = rtc::SocketAddress(ip_address, j_port_number_int + 1); +} + +void AndroidVoipClient::SetRemoteAddress( + JNIEnv* env, + const webrtc::JavaRef& j_ip_address_string, + jint j_port_number_int) { + const std::string& ip_address = + webrtc::JavaToNativeString(env, j_ip_address_string); + rtp_remote_address_ = rtc::SocketAddress(ip_address, j_port_number_int); + rtcp_remote_address_ = rtc::SocketAddress(ip_address, j_port_number_int + 1); +} + +jboolean AndroidVoipClient::StartSession(JNIEnv* env) { + // Due to consistent thread requirement on + // modules/utility/source/process_thread_impl.cc, + // code is invoked in the context of voip_thread_. + channel_ = voip_thread_->Invoke>( + RTC_FROM_HERE, + [this] { return voip_engine_->Base().CreateChannel(this, 0); }); + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel creation failed"; + return false; + } + + rtp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(), + rtp_local_address_)); + if (!rtp_socket_) { + RTC_LOG_ERR(LERROR) << "Socket creation failed"; + return false; + } + rtp_socket_->SignalReadPacket.connect( + this, &AndroidVoipClient::OnSignalReadRTPPacket); + + rtcp_socket_.reset(rtc::AsyncUDPSocket::Create(voip_thread_->socketserver(), + rtcp_local_address_)); + if (!rtcp_socket_) { + RTC_LOG_ERR(LERROR) << "Socket creation failed"; + return false; + } + rtcp_socket_->SignalReadPacket.connect( + this, &AndroidVoipClient::OnSignalReadRTCPPacket); + + return true; +} + +jboolean AndroidVoipClient::StopSession(JNIEnv* env) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return false; + } + if (!StopSend(env) || !StopPlayout(env)) { + return false; + } + + rtp_socket_->Close(); + rtcp_socket_->Close(); + // Due to consistent thread requirement on + // modules/utility/source/process_thread_impl.cc, + // code is invoked in the context of voip_thread_. + voip_thread_->Invoke(RTC_FROM_HERE, [this] { + voip_engine_->Base().ReleaseChannel(*channel_); + }); + channel_ = absl::nullopt; + return true; +} + +jboolean AndroidVoipClient::StartSend(JNIEnv* env) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return false; + } + // Due to consistent thread requirement on + // modules/audio_device/android/opensles_recorder.cc, + // code is invoked in the context of voip_thread_. + return voip_thread_->Invoke(RTC_FROM_HERE, [this] { + return voip_engine_->Base().StartSend(*channel_); + }); +} + +jboolean AndroidVoipClient::StopSend(JNIEnv* env) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return false; + } + // Due to consistent thread requirement on + // modules/audio_device/android/opensles_recorder.cc, + // code is invoked in the context of voip_thread_. + return voip_thread_->Invoke(RTC_FROM_HERE, [this] { + return voip_engine_->Base().StopSend(*channel_); + }); +} + +jboolean AndroidVoipClient::StartPlayout(JNIEnv* env) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return false; + } + // Due to consistent thread requirement on + // modules/audio_device/android/opensles_player.cc, + // code is invoked in the context of voip_thread_. + return voip_thread_->Invoke(RTC_FROM_HERE, [this] { + return voip_engine_->Base().StartPlayout(*channel_); + }); +} + +jboolean AndroidVoipClient::StopPlayout(JNIEnv* env) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return false; + } + // Due to consistent thread requirement on + // modules/audio_device/android/opensles_player.cc, + // code is invoked in the context of voip_thread_. + return voip_thread_->Invoke(RTC_FROM_HERE, [this] { + return voip_engine_->Base().StopPlayout(*channel_); + }); +} + +void AndroidVoipClient::Delete(JNIEnv* env) { + delete this; +} + +bool AndroidVoipClient::SendRtp(const uint8_t* packet, + size_t length, + const webrtc::PacketOptions& options) { + if (!rtp_socket_->SendTo(packet, length, rtp_remote_address_, + rtc::PacketOptions())) { + RTC_LOG(LS_ERROR) << "Failed to send RTP packet"; + return false; + } + return true; +} + +bool AndroidVoipClient::SendRtcp(const uint8_t* packet, size_t length) { + if (!rtcp_socket_->SendTo(packet, length, rtcp_remote_address_, + rtc::PacketOptions())) { + RTC_LOG(LS_ERROR) << "Failed to send RTCP packet"; + return false; + } + return true; +} + +void AndroidVoipClient::OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket, + const char* rtp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + voip_engine_->Network().ReceivedRTPPacket( + *channel_, rtc::ArrayView( + reinterpret_cast(rtp_packet), size)); +} + +void AndroidVoipClient::OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket, + const char* rtcp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp) { + if (!channel_) { + RTC_LOG(LS_ERROR) << "Channel has not been created"; + return; + } + voip_engine_->Network().ReceivedRTCPPacket( + *channel_, rtc::ArrayView( + reinterpret_cast(rtcp_packet), size)); +} + +static jlong JNI_VoipClient_CreateClient( + JNIEnv* env, + const webrtc::JavaParamRef& application_context) { + return webrtc::NativeToJavaPointer( + AndroidVoipClient::Create(env, application_context)); +} + +} // namespace webrtc_examples diff --git a/examples/androidvoip/jni/android_voip_client.h b/examples/androidvoip/jni/android_voip_client.h new file mode 100644 index 0000000000..aed652e281 --- /dev/null +++ b/examples/androidvoip/jni/android_voip_client.h @@ -0,0 +1,156 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_ +#define EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_ + +#include + +#include +#include +#include + +#include "api/audio_codecs/audio_format.h" +#include "api/call/transport.h" +#include "api/voip/voip_base.h" +#include "api/voip/voip_engine.h" +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/async_udp_socket.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/third_party/sigslot/sigslot.h" +#include "rtc_base/thread.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc_examples { + +// AndroidVoipClient facilitates the use of the VoIP API defined in +// api/voip/voip_engine.h. One instance of AndroidVoipClient should +// suffice for most VoIP applications. AndroidVoipClient implements +// webrtc::Transport to send RTP/RTCP packets to the remote endpoint. +// It also creates methods (slots) for sockets to connect to in +// order to receive RTP/RTCP packets. AndroidVoipClient does all +// VoipBase related operations with rtc::Thread (voip_thread_), this +// is to comply with consistent thread usage requirement with +// ProcessThread used within VoipEngine. AndroidVoipClient is meant +// to be used by Java through JNI. +class AndroidVoipClient : public webrtc::Transport, + public sigslot::has_slots<> { + public: + // Returns a pointer to an AndroidVoipClient object. Clients should + // use this factory method to create AndroidVoipClient objects. The + // method will return a nullptr in case of initialization errors. + // It is the client's responsibility to delete the pointer when + // they are done with it (this class provides a Delete() method). + static AndroidVoipClient* Create( + JNIEnv* env, + const webrtc::JavaParamRef& application_context); + + ~AndroidVoipClient() override; + + // Returns a Java List of Strings containing names of the built-in + // supported codecs. + webrtc::ScopedJavaLocalRef GetSupportedCodecs(JNIEnv* env); + + // Returns a Java String of the default local IPv4 address. If IPv4 + // address is not found, returns the default local IPv6 address. If + // IPv6 address is not found, returns an empty string. + webrtc::ScopedJavaLocalRef GetLocalIPAddress(JNIEnv* env); + + // Sets the encoder used by the VoIP API. + void SetEncoder(JNIEnv* env, + const webrtc::JavaRef& j_encoder_string); + + // Sets the decoders used by the VoIP API. + void SetDecoders(JNIEnv* env, + const webrtc::JavaParamRef& j_decoder_strings); + + // Sets two local/remote addresses, one for RTP packets, and another for + // RTCP packets. The RTP address will have IP address j_ip_address_string + // and port number j_port_number_int, the RTCP address will have IP address + // j_ip_address_string and port number j_port_number_int+1. + void SetLocalAddress(JNIEnv* env, + const webrtc::JavaRef& j_ip_address_string, + jint j_port_number_int); + void SetRemoteAddress(JNIEnv* env, + const webrtc::JavaRef& j_ip_address_string, + jint j_port_number_int); + + // Starts a VoIP session. The VoIP operations below can only be + // used after a session has already started. Returns true if session + // started successfully and false otherwise. + jboolean StartSession(JNIEnv* env); + + // Stops the current session. Returns true if session stopped + // successfully and false otherwise. + jboolean StopSession(JNIEnv* env); + + // Starts sending RTP/RTCP packets to the remote endpoint. Returns + // the return value of StartSend in api/voip/voip_base.h. + jboolean StartSend(JNIEnv* env); + + // Stops sending RTP/RTCP packets to the remote endpoint. Returns + // the return value of StopSend in api/voip/voip_base.h. + jboolean StopSend(JNIEnv* env); + + // Starts playing out the voice data received from the remote endpoint. + // Returns the return value of StartPlayout in api/voip/voip_base.h. + jboolean StartPlayout(JNIEnv* env); + + // Stops playing out the voice data received from the remote endpoint. + // Returns the return value of StopPlayout in api/voip/voip_base.h. + jboolean StopPlayout(JNIEnv* env); + + // Deletes this object. Used by client when they are done. + void Delete(JNIEnv* env); + + // Implementation for Transport. + bool SendRtp(const uint8_t* packet, + size_t length, + const webrtc::PacketOptions& options) override; + bool SendRtcp(const uint8_t* packet, size_t length) override; + + // Slots for sockets to connect to. + void OnSignalReadRTPPacket(rtc::AsyncPacketSocket* socket, + const char* rtp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp); + void OnSignalReadRTCPPacket(rtc::AsyncPacketSocket* socket, + const char* rtcp_packet, + size_t size, + const rtc::SocketAddress& addr, + const int64_t& timestamp); + + private: + AndroidVoipClient(JNIEnv* env, + const webrtc::JavaParamRef& application_context); + + // Used to invoke VoipBase operations and send/receive + // RTP/RTCP packets. + std::unique_ptr voip_thread_; + // A list of AudioCodecSpec supported by the built-in + // encoder/decoder factories. + std::vector supported_codecs_; + // The entry point to all VoIP APIs. + std::unique_ptr voip_engine_; + // Used by the VoIP API to facilitate a VoIP session. + absl::optional channel_; + // Members below are used for network related operations. + std::unique_ptr rtp_socket_; + std::unique_ptr rtcp_socket_; + rtc::SocketAddress rtp_local_address_; + rtc::SocketAddress rtcp_local_address_; + rtc::SocketAddress rtp_remote_address_; + rtc::SocketAddress rtcp_remote_address_; +}; + +} // namespace webrtc_examples + +#endif // EXAMPLES_ANDROIDVOIP_JNI_ANDROID_VOIP_CLIENT_H_ diff --git a/examples/androidvoip/jni/onload.cc b/examples/androidvoip/jni/onload.cc new file mode 100644 index 0000000000..b952de348b --- /dev/null +++ b/examples/androidvoip/jni/onload.cc @@ -0,0 +1,28 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "rtc_base/ssl_adapter.h" +#include "sdk/android/native_api/base/init.h" + +namespace webrtc_examples { + +extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) { + webrtc::InitAndroid(jvm); + RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()"; + return JNI_VERSION_1_6; +} + +extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) { + RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()"; +} + +} // namespace webrtc_examples diff --git a/examples/androidvoip/res/layout/activity_main.xml b/examples/androidvoip/res/layout/activity_main.xml new file mode 100644 index 0000000000..c7fa5a9b31 --- /dev/null +++ b/examples/androidvoip/res/layout/activity_main.xml @@ -0,0 +1,303 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +