Merge remote-tracking branch 'upstream-master'

Bug: 153469641
Test: run cuttlefish locally
Change-Id: Ida3bfe62ef5c6549278f4c155a1f690b008e9b9d
Merged-In: Ida3bfe62ef5c6549278f4c155a1f690b008e9b9d
This commit is contained in:
Jorge E. Moreira
2020-07-23 13:35:20 -07:00
1236 changed files with 50564 additions and 32463 deletions

View File

@ -20,6 +20,7 @@ Cyril Lashkevich <notorca@gmail.com>
David Porter <david@porter.me>
Dax Booysen <dax@younow.com>
Danail Kirov <dkirovbroadsoft@gmail.com>
Dharmesh Chauhan <dharmesh.r.chauhan@gmail.com>
Dirk-Jan C. Binnema <djcb@djcbsoftware.nl>
Dmitry Lizin <sdkdimon@gmail.com>
Eric Rescorla, RTFM Inc. <ekr@rtfm.com>
@ -90,8 +91,11 @@ CZ Theng <cz.theng@gmail.com>
Miguel Paris <mparisdiaz@gmail.com>
Raman Budny <budnyjj@gmail.com>
Stephan Hartmann <stha09@googlemail.com>
Lennart Grahl <lennart.grahl@gmail.com>
&yet LLC <*@andyet.com>
8x8 Inc. <*@sip-communicator.org>
8x8 Inc. <*@8x8.com>
Agora IO <*@agora.io>
ARM Holdings <*@arm.com>
BroadSoft Inc. <*@broadsoft.com>
@ -108,6 +112,7 @@ Opera Software ASA <*@opera.com>
Optical Tone Ltd <*@opticaltone.com>
Pengutronix e.K. <*@pengutronix.de>
RingCentral, Inc. <*@ringcentral.com>
Signal Messenger, LLC <*@signal.org>
Sinch AB <*@sinch.com>
struktur AG <*@struktur.de>
Telenor Digital AS <*@telenor.com>
@ -124,3 +129,4 @@ Highfive, Inc. <*@highfive.com>
CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io>
Tuple, LLC <*@tuple.app>
Videona Socialmedia <*@videona.com>
Threema GmbH <*@threema.ch>

11095
Android.bp

File diff suppressed because it is too large Load Diff

View File

@ -265,6 +265,10 @@ config("common_config") {
defines += [ "WEBRTC_USE_H264" ]
}
if (rtc_use_absl_mutex) {
defines += [ "WEBRTC_ABSL_MUTEX" ]
}
if (rtc_disable_logging) {
defines += [ "RTC_DISABLE_LOGGING" ]
}
@ -410,7 +414,7 @@ config("common_config") {
}
config("common_objc") {
libs = [ "Foundation.framework" ]
frameworks = [ "Foundation.framework" ]
if (rtc_use_metal_rendering) {
defines = [ "RTC_SUPPORTS_METAL" ]
@ -580,6 +584,14 @@ if (rtc_include_tests) {
}
}
rtc_test("benchmarks") {
testonly = true
deps = [
"rtc_base/synchronization:mutex_benchmark",
"test:benchmark_main",
]
}
# This runs tests that must run in real time and therefore can take some
# time to execute. They are in a separate executable to avoid making the
# regular unittest suite too slow to run frequently.

726
DEPS

File diff suppressed because it is too large Load Diff

View File

@ -14,7 +14,7 @@ from collections import defaultdict
from contextlib import contextmanager
# Files and directories that are *skipped* by cpplint in the presubmit script.
CPPLINT_BLACKLIST = [
CPPLINT_EXCEPTIONS = [
'api/video_codecs/video_decoder.h',
'common_types.cc',
'common_types.h',
@ -45,12 +45,15 @@ CPPLINT_BLACKLIST = [
#
# Justifications for each filter:
# - build/c++11 : Rvalue ref checks are unreliable (false positives),
# include file and feature blacklists are
# include file and feature blocklists are
# google3-specific.
# - runtime/references : Mutable references are not banned by the Google
# C++ style guide anymore (starting from May 2020).
# - whitespace/operators: Same as above (doesn't seem sufficient to eliminate
# all move-related errors).
BLACKLIST_LINT_FILTERS = [
DISABLED_LINT_FILTERS = [
'-build/c++11',
'-runtime/references',
'-whitespace/operators',
]
@ -94,15 +97,20 @@ LEGACY_API_DIRS = (
API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:]
# TARGET_RE matches a GN target, and extracts the target name and the contents.
TARGET_RE = re.compile(r'(?P<indent>\s*)\w+\("(?P<target_name>\w+)"\) {'
r'(?P<target_contents>.*?)'
r'(?P=indent)}',
re.MULTILINE | re.DOTALL)
TARGET_RE = re.compile(
r'(?P<indent>\s*)(?P<target_type>\w+)\("(?P<target_name>\w+)"\) {'
r'(?P<target_contents>.*?)'
r'(?P=indent)}',
re.MULTILINE | re.DOTALL)
# SOURCES_RE matches a block of sources inside a GN target.
SOURCES_RE = re.compile(r'sources \+?= \[(?P<sources>.*?)\]',
re.MULTILINE | re.DOTALL)
# DEPS_RE matches a block of sources inside a GN target.
DEPS_RE = re.compile(r'\bdeps \+?= \[(?P<deps>.*?)\]',
re.MULTILINE | re.DOTALL)
# FILE_PATH_RE matchies a file path.
FILE_PATH_RE = re.compile(r'"(?P<file_path>(\w|\/)+)(?P<extension>\.\w+)"')
@ -168,7 +176,7 @@ def CheckNativeApiHeaderChanges(input_api, output_api):
"""Checks to remind proper changing of native APIs."""
files = []
source_file_filter = lambda x: input_api.FilterSourceFile(
x, white_list=[r'.+\.(gn|gni|h)$'])
x, allow_list=[r'.+\.(gn|gni|h)$'])
for f in input_api.AffectedSourceFiles(source_file_filter):
for path in API_DIRS:
dn = os.path.dirname(f.LocalPath())
@ -254,9 +262,9 @@ def CheckNoFRIEND_TEST(input_api, output_api, # pylint: disable=invalid-name
'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))]
def IsLintBlacklisted(blacklist_paths, file_path):
""" Checks if a file is blacklisted for lint check."""
for path in blacklist_paths:
def IsLintDisabled(disabled_paths, file_path):
""" Checks if a file is disabled for lint check."""
for path in disabled_paths:
if file_path == path or os.path.dirname(file_path).startswith(path):
return True
return False
@ -264,7 +272,7 @@ def IsLintBlacklisted(blacklist_paths, file_path):
def CheckApprovedFilesLintClean(input_api, output_api,
source_file_filter=None):
"""Checks that all new or non-blacklisted .cc and .h files pass cpplint.py.
"""Checks that all new or non-exempt .cc and .h files pass cpplint.py.
This check is based on CheckChangeLintsClean in
depot_tools/presubmit_canned_checks.py but has less filters and only checks
added files."""
@ -277,22 +285,22 @@ def CheckApprovedFilesLintClean(input_api, output_api,
cpplint._cpplint_state.ResetErrorCounts()
lint_filters = cpplint._Filters()
lint_filters.extend(BLACKLIST_LINT_FILTERS)
lint_filters.extend(DISABLED_LINT_FILTERS)
cpplint._SetFilters(','.join(lint_filters))
# Create a platform independent blacklist for cpplint.
blacklist_paths = [input_api.os_path.join(*path.split('/'))
for path in CPPLINT_BLACKLIST]
# Create a platform independent exempt list for cpplint.
disabled_paths = [input_api.os_path.join(*path.split('/'))
for path in CPPLINT_EXCEPTIONS]
# Use the strictest verbosity level for cpplint.py (level 1) which is the
# default when running cpplint.py from command line. To make it possible to
# work with not-yet-converted code, we're only applying it to new (or
# moved/renamed) files and files not listed in CPPLINT_BLACKLIST.
# moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS.
verbosity_level = 1
files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
# Note that moved/renamed files also count as added.
if f.Action() == 'A' or not IsLintBlacklisted(blacklist_paths,
if f.Action() == 'A' or not IsLintDisabled(disabled_paths,
f.LocalPath()):
files.append(f.AbsoluteLocalPath())
@ -338,6 +346,37 @@ def CheckNoSourcesAbove(input_api, gn_files, output_api):
return []
def CheckAbseilDependencies(input_api, gn_files, output_api):
"""Checks that Abseil dependencies are declared in `absl_deps`."""
absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL)
target_types_to_check = [
'rtc_library',
'rtc_source_set',
'rtc_static_library',
'webrtc_fuzzer_test',
]
error_msg = ('Abseil dependencies in target "%s" (file: %s) '
'should be moved to the "absl_deps" parameter.')
errors = []
for gn_file in gn_files:
gn_file_content = input_api.ReadFile(gn_file)
for target_match in TARGET_RE.finditer(gn_file_content):
target_type = target_match.group('target_type')
target_name = target_match.group('target_name')
target_contents = target_match.group('target_contents')
if target_type in target_types_to_check:
for deps_match in DEPS_RE.finditer(target_contents):
deps = deps_match.group('deps').splitlines()
for dep in deps:
if re.search(absl_re, dep):
errors.append(
output_api.PresubmitError(error_msg % (target_name,
gn_file.LocalPath())))
break # no need to warn more than once per target
return errors
def CheckNoMixingSources(input_api, gn_files, output_api):
"""Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target.
@ -566,8 +605,8 @@ def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
def CheckGnChanges(input_api, output_api):
file_filter = lambda x: (input_api.FilterSourceFile(
x, white_list=(r'.+\.(gn|gni)$',),
black_list=(r'.*/presubmit_checks_lib/testdata/.*',)))
x, allow_list=(r'.+\.(gn|gni)$',),
block_list=(r'.*/presubmit_checks_lib/testdata/.*',)))
gn_files = []
for f in input_api.AffectedSourceFiles(file_filter):
@ -577,6 +616,7 @@ def CheckGnChanges(input_api, output_api):
if gn_files:
result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api))
result.extend(CheckNoMixingSources(input_api, gn_files, output_api))
result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files,
output_api))
result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))
@ -756,7 +796,7 @@ def RunPythonTests(input_api, output_api):
input_api,
output_api,
directory,
whitelist=[r'.+_test\.py$']))
allowlist=[r'.+_test\.py$']))
return input_api.RunTests(tests, parallel=True)
@ -810,17 +850,18 @@ def CommonChecks(input_api, output_api):
results = []
# Filter out files that are in objc or ios dirs from being cpplint-ed since
# they do not follow C++ lint rules.
black_list = input_api.DEFAULT_BLACK_LIST + (
exception_list = input_api.DEFAULT_BLACK_LIST + (
r".*\bobjc[\\\/].*",
r".*objc\.[hcm]+$",
)
source_file_filter = lambda x: input_api.FilterSourceFile(x, None, black_list)
source_file_filter = lambda x: input_api.FilterSourceFile(x, None,
exception_list)
results.extend(CheckApprovedFilesLintClean(
input_api, output_api, source_file_filter))
results.extend(input_api.canned_checks.CheckLicense(
input_api, output_api, _LicenseHeader(input_api)))
results.extend(input_api.canned_checks.RunPylint(input_api, output_api,
black_list=(r'^base[\\\/].*\.py$',
block_list=(r'^base[\\\/].*\.py$',
r'^build[\\\/].*\.py$',
r'^buildtools[\\\/].*\.py$',
r'^infra[\\\/].*\.py$',
@ -847,12 +888,12 @@ def CommonChecks(input_api, output_api):
# Also we will skip most checks for third_party directory.
third_party_filter_list = (r'^third_party[\\\/].+',)
eighty_char_sources = lambda x: input_api.FilterSourceFile(x,
black_list=build_file_filter_list + objc_filter_list +
block_list=build_file_filter_list + objc_filter_list +
third_party_filter_list)
hundred_char_sources = lambda x: input_api.FilterSourceFile(x,
white_list=objc_filter_list)
allow_list=objc_filter_list)
non_third_party_sources = lambda x: input_api.FilterSourceFile(x,
black_list=third_party_filter_list)
block_list=third_party_filter_list)
results.extend(input_api.canned_checks.CheckLongLines(
input_api, output_api, maxlen=80, source_file_filter=eighty_char_sources))
@ -900,6 +941,8 @@ def CommonChecks(input_api, output_api):
input_api, output_api, non_third_party_sources))
results.extend(CheckBannedAbslMakeUnique(
input_api, output_api, non_third_party_sources))
results.extend(CheckObjcApiSymbols(
input_api, output_api, non_third_party_sources))
return results
@ -976,6 +1019,35 @@ def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter):
files)]
return []
def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}',
re.MULTILINE | re.DOTALL)
file_filter = lambda f: (f.LocalPath().endswith(('.h'))
and source_file_filter(f))
files = []
file_filter = lambda x: (input_api.FilterSourceFile(x)
and source_file_filter(x))
for f in input_api.AffectedSourceFiles(file_filter):
if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath():
continue
contents = input_api.ReadFile(f)
for match in rtc_objc_export.finditer(contents):
export_block = match.group(0)
if 'RTC_OBJC_TYPE' not in export_block:
files.append(f.LocalPath())
if len(files):
return [output_api.PresubmitError(
'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' +
'macro.\n\n' +
'For example:\n' +
'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' +
'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' +
'Please fix the following files:',
files)]
return []
def CheckAbslMemoryInclude(input_api, output_api, source_file_filter):
pattern = input_api.re.compile(
r'^#include\s*"absl/memory/memory.h"', input_api.re.MULTILINE)
@ -1033,7 +1105,7 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
# eval-ed and thus doesn't have __file__.
error_msg = """{} should be listed in {}."""
results = []
orphan_blacklist = [
exempt_paths = [
os.path.join('tools_webrtc', 'ios', 'SDK'),
]
with _AddToPath(input_api.os_path.join(
@ -1042,7 +1114,7 @@ def CheckOrphanHeaders(input_api, output_api, source_file_filter):
from check_orphan_headers import IsHeaderInBuildGn
file_filter = lambda x: input_api.FilterSourceFile(
x, black_list=orphan_blacklist) and source_file_filter(x)
x, block_list=exempt_paths) and source_file_filter(x)
for f in input_api.AffectedSourceFiles(file_filter):
if f.LocalPath().endswith('.h'):
file_path = os.path.abspath(f.LocalPath())
@ -1061,7 +1133,7 @@ def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, source_file_filter):
error_msg = 'File {} must end with exactly one newline.'
results = []
file_filter = lambda x: input_api.FilterSourceFile(
x, white_list=(r'.+\.proto$',)) and source_file_filter(x)
x, allow_list=(r'.+\.proto$',)) and source_file_filter(x)
for f in input_api.AffectedSourceFiles(file_filter):
file_path = f.LocalPath()
with open(file_path) as f:

View File

@ -1,6 +1,7 @@
Name: WebRTC
URL: http://www.webrtc.org
Version: 90
CPEPrefix: cpe:/a:webrtc_project:webrtc:90
License: BSD
License File: LICENSE

View File

@ -23,9 +23,11 @@ adds the first use.
* `absl::variant` and related stuff from `absl/types/variant.h`.
* The functions in `absl/algorithm/algorithm.h` and
`absl/algorithm/container.h`.
* `absl/base/const_init.h` for mutex initialization.
* The macros in `absl/base/attributes.h`, `absl/base/config.h` and
`absl/base/macros.h`.
## **Disallowed**
### `absl::make_unique`
@ -34,7 +36,7 @@ adds the first use.
### `absl::Mutex`
*Use `rtc::CriticalSection` instead.*
*Use `webrtc::Mutex` instead.*
Chromium has a ban on new static initializers, and `absl::Mutex` uses
one. To make `absl::Mutex` available, we would need to nicely ask the
@ -61,3 +63,12 @@ has decided if they will change `absl::Span` to match.
These are optimized for speed, not binary size. Even `StrCat` calls
with a modest number of arguments can easily add several hundred bytes
to the binary.
## How to depend on Abseil
For build targets `rtc_library`, `rtc_source_set` and `rtc_static_library`,
dependencies on Abseil need to be listed in `absl_deps` instead of `deps`.
This is needed in order to support the Abseil component build in Chromium. In
such build mode, WebRTC will depend on a unique Abseil build target what will
generate a shared library.

View File

@ -71,8 +71,8 @@ rtc_library("rtp_headers") {
"..:webrtc_common",
"units:timestamp",
"video:video_rtp_headers",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("rtp_packet_info") {
@ -90,8 +90,8 @@ rtc_library("rtp_packet_info") {
"..:webrtc_common",
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("media_stream_interface") {
@ -111,8 +111,8 @@ rtc_library("media_stream_interface") {
"../rtc_base/system:rtc_export",
"video:recordable_encoded_frame",
"video:video_frame",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("libjingle_peerconnection_api") {
@ -166,12 +166,14 @@ rtc_library("libjingle_peerconnection_api") {
":media_stream_interface",
":network_state_predictor_api",
":packet_socket_factory",
":priority",
":rtc_error",
":rtc_stats_api",
":rtp_packet_info",
":rtp_parameters",
":rtp_transceiver_direction",
":scoped_refptr",
"adaptation:resource_adaptation_api",
"audio:audio_mixer_api",
"audio_codecs:audio_codecs_api",
"crypto:frame_decryptor_interface",
@ -181,23 +183,15 @@ rtc_library("libjingle_peerconnection_api") {
"rtc_event_log",
"task_queue",
"transport:bitrate_settings",
"transport:datagram_transport_interface",
"transport:enums",
"transport:network_control",
"transport:webrtc_key_value_config",
"transport/media:audio_interfaces",
"transport/media:media_transport_interface",
"transport/media:video_interfaces",
"transport/rtp:rtp_source",
"units:data_rate",
"units:timestamp",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
# Basically, don't add stuff here. You might break sensitive downstream
# targets like pnacl. API should not depend on anything outside of this
@ -212,6 +206,12 @@ rtc_library("libjingle_peerconnection_api") {
"../rtc_base:rtc_base_approved",
"../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/algorithm:container",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
rtc_source_set("frame_transformer_interface") {
@ -221,6 +221,7 @@ rtc_source_set("frame_transformer_interface") {
":scoped_refptr",
"../rtc_base:refcount",
"video:encoded_frame",
"video:video_frame_metadata",
]
}
@ -235,8 +236,8 @@ rtc_library("rtc_error") {
"../rtc_base:logging",
"../rtc_base:macromagic",
"../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("packet_socket_factory") {
@ -272,7 +273,6 @@ rtc_source_set("video_quality_test_fixture_api") {
"../test:video_test_common",
"transport:bitrate_settings",
"transport:network_control",
"transport/media:media_transport_interface",
"video_codecs:video_codecs_api",
]
}
@ -283,18 +283,23 @@ rtc_source_set("video_quality_analyzer_api") {
sources = [ "test/video_quality_analyzer_interface.h" ]
deps = [
":array_view",
":stats_observer_interface",
"video:encoded_image",
"video:video_frame",
"video:video_rtp_headers",
"video_codecs:video_codecs_api",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
rtc_source_set("track_id_stream_label_map") {
rtc_source_set("track_id_stream_info_map") {
visibility = [ "*" ]
sources = [ "test/track_id_stream_label_map.h" ]
sources = [ "test/track_id_stream_info_map.h" ]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_source_set("rtp_transceiver_direction") {
@ -302,6 +307,10 @@ rtc_source_set("rtp_transceiver_direction") {
sources = [ "rtp_transceiver_direction.h" ]
}
rtc_source_set("priority") {
sources = [ "priority.h" ]
}
rtc_library("rtp_parameters") {
visibility = [ "*" ]
sources = [
@ -312,18 +321,21 @@ rtc_library("rtp_parameters") {
]
deps = [
":array_view",
":priority",
":rtp_transceiver_direction",
"../rtc_base:checks",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}
if (is_android) {
java_cpp_enum("rtp_parameters_enums") {
sources = [ "rtp_parameters.h" ]
java_cpp_enum("priority_enums") {
sources = [ "priority.h" ]
}
}
@ -334,7 +346,7 @@ rtc_source_set("audio_quality_analyzer_api") {
deps = [
":stats_observer_interface",
":track_id_stream_label_map",
":track_id_stream_info_map",
]
}
@ -343,11 +355,9 @@ rtc_source_set("stats_observer_interface") {
testonly = true
sources = [ "test/stats_observer_interface.h" ]
deps = [
# For api/stats_types.h
":libjingle_peerconnection_api",
":rtp_parameters",
]
deps = [ ":rtc_stats_api" ]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_source_set("peer_connection_quality_test_fixture_api") {
@ -365,18 +375,21 @@ rtc_source_set("peer_connection_quality_test_fixture_api") {
":media_stream_interface",
":network_state_predictor_api",
":packet_socket_factory",
":rtp_parameters",
":simulated_network_api",
":stats_observer_interface",
":track_id_stream_info_map",
":video_quality_analyzer_api",
"../media:rtc_media_base",
"../rtc_base:rtc_base",
"rtc_event_log",
"task_queue",
"transport:network_control",
"transport/media:media_transport_interface",
"units:time_delta",
"video:video_frame",
"video_codecs:video_codecs_api",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
@ -391,8 +404,8 @@ rtc_source_set("frame_generator_api") {
deps = [
":scoped_refptr",
"video:video_frame",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("test_dependency_factory") {
@ -405,7 +418,7 @@ rtc_library("test_dependency_factory") {
deps = [
":video_quality_test_fixture_api",
"../rtc_base:checks",
"../rtc_base:thread_checker",
"../rtc_base:platform_thread_types",
]
}
@ -451,6 +464,7 @@ if (rtc_include_tests) {
deps = [
":audio_quality_analyzer_api",
":peer_connection_quality_test_fixture_api",
":time_controller",
":video_quality_analyzer_api",
"../test/pc/e2e:peerconnection_quality_test",
]
@ -469,8 +483,8 @@ rtc_library("create_frame_generator") {
"../rtc_base:checks",
"../system_wrappers",
"../test:frame_generator_impl",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("create_peer_connection_quality_test_frame_generator") {
@ -486,8 +500,8 @@ rtc_library("create_peer_connection_quality_test_frame_generator") {
":peer_connection_quality_test_fixture_api",
"../rtc_base:checks",
"../test:fileutils",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("libjingle_logging_api") {
@ -540,8 +554,8 @@ rtc_library("audio_options_api") {
":array_view",
"../rtc_base:stringutils",
"../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("transport_api") {
@ -565,11 +579,8 @@ rtc_source_set("bitrate_allocation") {
rtc_source_set("simulated_network_api") {
visibility = [ "*" ]
sources = [ "test/simulated_network.h" ]
deps = [
"../rtc_base",
"../rtc_base:criticalsection",
"//third_party/abseil-cpp/absl/types:optional",
]
deps = [ "../rtc_base" ]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
# TODO(srte): Move to network_emulation sub directory.
@ -702,6 +713,8 @@ if (rtc_include_tests) {
"../modules/audio_coding:neteq_test_factory",
"../rtc_base:checks",
"neteq:neteq_api",
]
absl_deps = [
"//third_party/abseil-cpp/absl/flags:flag",
"//third_party/abseil-cpp/absl/flags:parse",
"//third_party/abseil-cpp/absl/strings",
@ -852,6 +865,7 @@ if (rtc_include_tests) {
}
rtc_source_set("mock_peerconnectioninterface") {
visibility = [ "*" ]
testonly = true
sources = [ "test/mock_peerconnectioninterface.h" ]
@ -861,6 +875,17 @@ if (rtc_include_tests) {
]
}
rtc_source_set("mock_peer_connection_factory_interface") {
visibility = [ "*" ]
testonly = true
sources = [ "test/mock_peer_connection_factory_interface.h" ]
deps = [
":libjingle_peerconnection_api",
"../test:test_support",
]
}
rtc_source_set("mock_rtp") {
testonly = true
sources = [
@ -874,6 +899,16 @@ if (rtc_include_tests) {
]
}
rtc_source_set("mock_transformable_video_frame") {
testonly = true
sources = [ "test/mock_transformable_video_frame.h" ]
deps = [
":frame_transformer_interface",
"../test:test_support",
]
}
rtc_source_set("mock_video_bitrate_allocator") {
testonly = true
sources = [ "test/mock_video_bitrate_allocator.h" ]
@ -931,39 +966,6 @@ if (rtc_include_tests) {
]
}
rtc_source_set("fake_media_transport") {
testonly = true
sources = [
"test/fake_datagram_transport.h",
"test/fake_media_transport.h",
]
deps = [
"../rtc_base:checks",
"transport:datagram_transport_interface",
"transport/media:media_transport_interface",
"//third_party/abseil-cpp/absl/algorithm:container",
]
}
rtc_library("loopback_media_transport") {
testonly = true
sources = [
"test/loopback_media_transport.cc",
"test/loopback_media_transport.h",
]
deps = [
"../rtc_base",
"../rtc_base:checks",
"transport:datagram_transport_interface",
"transport/media:media_transport_interface",
"//third_party/abseil-cpp/absl/algorithm:container",
]
}
rtc_library("create_time_controller") {
visibility = [ "*" ]
testonly = true
@ -994,7 +996,6 @@ if (rtc_include_tests) {
"rtp_parameters_unittest.cc",
"scoped_refptr_unittest.cc",
"test/create_time_controller_unittest.cc",
"test/loopback_media_transport_unittest.cc",
]
deps = [
@ -1002,7 +1003,6 @@ if (rtc_include_tests) {
":create_time_controller",
":function_view",
":libjingle_peerconnection_api",
":loopback_media_transport",
":rtc_error",
":rtc_event_log_output_file",
":rtp_packet_info",
@ -1033,13 +1033,13 @@ if (rtc_include_tests) {
":dummy_peer_connection",
":fake_frame_decryptor",
":fake_frame_encryptor",
":fake_media_transport",
":loopback_media_transport",
":mock_audio_mixer",
":mock_frame_decryptor",
":mock_frame_encryptor",
":mock_peer_connection_factory_interface",
":mock_peerconnectioninterface",
":mock_rtp",
":mock_transformable_video_frame",
":mock_video_bitrate_allocator",
":mock_video_bitrate_allocator_factory",
":mock_video_codec_factory",

View File

@ -115,11 +115,6 @@ specific_include_rules = {
"+rtc_base/ref_count.h",
],
"media_transport_interface\.h": [
"+rtc_base/copy_on_write_buffer.h", # As used by datachannelinterface.h
"+rtc_base/network_route.h",
],
"packet_socket_factory\.h": [
"+rtc_base/proxy_info.h",
"+rtc_base/async_packet_socket.h",
@ -246,6 +241,10 @@ specific_include_rules = {
"+modules/audio_processing/include/audio_processing.h",
],
"echo_detector_creator\.h": [
"+modules/audio_processing/include/audio_processing.h",
],
"fake_frame_decryptor\.h": [
"+rtc_base/ref_counted_object.h",
],
@ -259,7 +258,6 @@ specific_include_rules = {
],
"simulated_network\.h": [
"+rtc_base/critical_section.h",
"+rtc_base/random.h",
"+rtc_base/thread_annotations.h",
],

23
api/adaptation/BUILD.gn Normal file
View File

@ -0,0 +1,23 @@
# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved.
#
# Use of this source code is governed by a BSD - style license
# that can be found in the LICENSE file in the root of the source
# tree.An additional intellectual property rights grant can be found
# in the file PATENTS.All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../webrtc.gni")
rtc_source_set("resource_adaptation_api") {
visibility = [ "*" ]
sources = [
"resource.cc",
"resource.h",
]
deps = [
"../../api:scoped_refptr",
"../../rtc_base:refcount",
"../../rtc_base:rtc_base_approved",
"../../rtc_base/system:rtc_export",
]
}

7
api/adaptation/DEPS Normal file
View File

@ -0,0 +1,7 @@
specific_include_rules = {
"resource\.h": [
# ref_count.h is a public_deps of rtc_base_approved. Necessary because of
# rtc::RefCountInterface.
"+rtc_base/ref_count.h",
],
}

View File

@ -0,0 +1,30 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/adaptation/resource.h"
namespace webrtc {
const char* ResourceUsageStateToString(ResourceUsageState usage_state) {
switch (usage_state) {
case ResourceUsageState::kOveruse:
return "kOveruse";
case ResourceUsageState::kUnderuse:
return "kUnderuse";
}
}
ResourceListener::~ResourceListener() {}
Resource::Resource() {}
Resource::~Resource() {}
} // namespace webrtc

67
api/adaptation/resource.h Normal file
View File

@ -0,0 +1,67 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_ADAPTATION_RESOURCE_H_
#define API_ADAPTATION_RESOURCE_H_
#include <string>
#include "api/scoped_refptr.h"
#include "rtc_base/ref_count.h"
#include "rtc_base/system/rtc_export.h"
namespace webrtc {
class Resource;
enum class ResourceUsageState {
// Action is needed to minimze the load on this resource.
kOveruse,
// Increasing the load on this resource is desired, if possible.
kUnderuse,
};
RTC_EXPORT const char* ResourceUsageStateToString(
ResourceUsageState usage_state);
class RTC_EXPORT ResourceListener {
public:
virtual ~ResourceListener();
virtual void OnResourceUsageStateMeasured(
rtc::scoped_refptr<Resource> resource,
ResourceUsageState usage_state) = 0;
};
// A Resource monitors an implementation-specific resource. It may report
// kOveruse or kUnderuse when resource usage is high or low enough that we
// should perform some sort of mitigation to fulfil the resource's constraints.
//
// The methods on this interface are invoked on the adaptation task queue.
// Resource usage measurements may be performed on an any task queue.
//
// The Resource is reference counted to prevent use-after-free when posting
// between task queues. As such, the implementation MUST NOT make any
// assumptions about which task queue Resource is destructed on.
class RTC_EXPORT Resource : public rtc::RefCountInterface {
public:
Resource();
// Destruction may happen on any task queue.
~Resource() override;
virtual std::string Name() const = 0;
// The |listener| may be informed of resource usage measurements on any task
// queue, but not after this method is invoked with the null argument.
virtual void SetResourceListener(ResourceListener* listener) = 0;
};
} // namespace webrtc
#endif // API_ADAPTATION_RESOURCE_H_

View File

@ -38,7 +38,7 @@ void CallFixed(ArrayView<T, N> av) {}
} // namespace
TEST(ArrayViewTest, TestConstructFromPtrAndArray) {
TEST(ArrayViewDeathTest, TestConstructFromPtrAndArray) {
char arr[] = "Arrr!";
const char carr[] = "Carrr!";
EXPECT_EQ(6u, Call<const char>(arr));
@ -409,7 +409,7 @@ TEST(FixArrayViewTest, TestSwapFixed) {
// swap(x, w); // Compile error, because different sizes.
}
TEST(ArrayViewTest, TestIndexing) {
TEST(ArrayViewDeathTest, TestIndexing) {
char arr[] = "abcdefg";
ArrayView<char> x(arr);
const ArrayView<char> y(arr);

View File

@ -61,8 +61,8 @@ rtc_library("aec3_config_json") {
"../../rtc_base:rtc_base_approved",
"../../rtc_base:rtc_json",
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/strings",
]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("aec3_factory") {
@ -87,3 +87,17 @@ rtc_source_set("echo_control") {
sources = [ "echo_control.h" ]
deps = [ "../../rtc_base:checks" ]
}
rtc_source_set("echo_detector_creator") {
visibility = [ "*" ]
sources = [
"echo_detector_creator.cc",
"echo_detector_creator.h",
]
deps = [
"../../api:scoped_refptr",
"../../modules/audio_processing:api",
"../../modules/audio_processing:audio_processing",
"../../rtc_base:refcount",
]
}

View File

@ -11,6 +11,8 @@
#include "api/audio/audio_frame.h"
#include <string.h>
#include <algorithm>
#include <utility>
#include "rtc_base/checks.h"
#include "rtc_base/time_utils.h"
@ -22,6 +24,28 @@ AudioFrame::AudioFrame() {
static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes");
}
void swap(AudioFrame& a, AudioFrame& b) {
using std::swap;
swap(a.timestamp_, b.timestamp_);
swap(a.elapsed_time_ms_, b.elapsed_time_ms_);
swap(a.ntp_time_ms_, b.ntp_time_ms_);
swap(a.samples_per_channel_, b.samples_per_channel_);
swap(a.sample_rate_hz_, b.sample_rate_hz_);
swap(a.num_channels_, b.num_channels_);
swap(a.channel_layout_, b.channel_layout_);
swap(a.speech_type_, b.speech_type_);
swap(a.vad_activity_, b.vad_activity_);
swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_);
swap(a.packet_infos_, b.packet_infos_);
const size_t length_a = a.samples_per_channel_ * a.num_channels_;
const size_t length_b = b.samples_per_channel_ * b.num_channels_;
RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples);
RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples);
std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_);
swap(a.muted_, b.muted_);
swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_);
}
void AudioFrame::Reset() {
ResetWithoutMuting();
muted_ = true;

View File

@ -14,6 +14,8 @@
#include <stddef.h>
#include <stdint.h>
#include <utility>
#include "api/audio/channel_layout.h"
#include "api/rtp_packet_infos.h"
#include "rtc_base/constructor_magic.h"
@ -58,6 +60,8 @@ class AudioFrame {
AudioFrame();
friend void swap(AudioFrame& a, AudioFrame& b);
// Resets all members to their default state.
void Reset();
// Same as Reset(), but leaves mute state unchanged. Muting a frame requires

View File

@ -0,0 +1,21 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/audio/echo_detector_creator.h"
#include "modules/audio_processing/residual_echo_detector.h"
#include "rtc_base/ref_counted_object.h"
namespace webrtc {
rtc::scoped_refptr<EchoDetector> CreateEchoDetector() {
return new rtc::RefCountedObject<ResidualEchoDetector>();
}
} // namespace webrtc

View File

@ -0,0 +1,26 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_AUDIO_ECHO_DETECTOR_CREATOR_H_
#define API_AUDIO_ECHO_DETECTOR_CREATOR_H_
#include "api/scoped_refptr.h"
#include "modules/audio_processing/include/audio_processing.h"
namespace webrtc {
// Returns an instance of the WebRTC implementation of a residual echo detector.
// It can be provided to the webrtc::AudioProcessingBuilder to obtain the
// usual residual echo metrics.
rtc::scoped_refptr<EchoDetector> CreateEchoDetector();
} // namespace webrtc
#endif // API_AUDIO_ECHO_DETECTOR_CREATOR_H_

View File

@ -133,4 +133,54 @@ TEST(AudioFrameTest, CopyFrom) {
EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples)));
}
TEST(AudioFrameTest, SwapFrames) {
AudioFrame frame1, frame2;
int16_t samples1[kNumChannelsMono * kSamplesPerChannel];
for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
samples1[i] = i;
}
frame1.UpdateFrame(kTimestamp, samples1, kSamplesPerChannel, kSampleRateHz,
AudioFrame::kPLC, AudioFrame::kVadActive,
kNumChannelsMono);
frame1.set_absolute_capture_timestamp_ms(12345678);
const auto frame1_channel_layout = frame1.channel_layout();
int16_t samples2[(kNumChannelsMono + 1) * (kSamplesPerChannel + 1)];
for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
++i) {
samples2[i] = 1000 + i;
}
frame2.UpdateFrame(kTimestamp + 1, samples2, kSamplesPerChannel + 1,
kSampleRateHz + 1, AudioFrame::kNormalSpeech,
AudioFrame::kVadPassive, kNumChannelsMono + 1);
const auto frame2_channel_layout = frame2.channel_layout();
swap(frame1, frame2);
EXPECT_EQ(kTimestamp + 1, frame1.timestamp_);
ASSERT_EQ(kSamplesPerChannel + 1, frame1.samples_per_channel_);
EXPECT_EQ(kSampleRateHz + 1, frame1.sample_rate_hz_);
EXPECT_EQ(AudioFrame::kNormalSpeech, frame1.speech_type_);
EXPECT_EQ(AudioFrame::kVadPassive, frame1.vad_activity_);
ASSERT_EQ(kNumChannelsMono + 1, frame1.num_channels_);
for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1);
++i) {
EXPECT_EQ(samples2[i], frame1.data()[i]);
}
EXPECT_FALSE(frame1.absolute_capture_timestamp_ms());
EXPECT_EQ(frame2_channel_layout, frame1.channel_layout());
EXPECT_EQ(kTimestamp, frame2.timestamp_);
ASSERT_EQ(kSamplesPerChannel, frame2.samples_per_channel_);
EXPECT_EQ(kSampleRateHz, frame2.sample_rate_hz_);
EXPECT_EQ(AudioFrame::kPLC, frame2.speech_type_);
EXPECT_EQ(AudioFrame::kVadActive, frame2.vad_activity_);
ASSERT_EQ(kNumChannelsMono, frame2.num_channels_);
for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) {
EXPECT_EQ(samples1[i], frame2.data()[i]);
}
EXPECT_EQ(12345678, frame2.absolute_capture_timestamp_ms());
EXPECT_EQ(frame1_channel_layout, frame2.channel_layout());
}
} // namespace webrtc

View File

@ -38,6 +38,8 @@ rtc_library("audio_codecs_api") {
"../../rtc_base:sanitizer",
"../../rtc_base/system:rtc_export",
"../units:time_delta",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View File

@ -25,6 +25,8 @@ rtc_library("audio_encoder_L16") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -42,6 +44,8 @@ rtc_library("audio_decoder_L16") {
"../../../modules/audio_coding:pcm16b",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View File

@ -25,6 +25,8 @@ rtc_library("audio_encoder_g711") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -42,6 +44,8 @@ rtc_library("audio_decoder_g711") {
"../../../modules/audio_coding:g711",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View File

@ -31,6 +31,8 @@ rtc_library("audio_encoder_g722") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -48,6 +50,8 @@ rtc_library("audio_decoder_g722") {
"../../../modules/audio_coding:g722",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View File

@ -30,6 +30,8 @@ rtc_library("audio_encoder_ilbc") {
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:safe_minmax",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -46,6 +48,8 @@ rtc_library("audio_decoder_ilbc") {
"..:audio_codecs_api",
"../../../modules/audio_coding:ilbc",
"../../../rtc_base:rtc_base_approved",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View File

@ -68,6 +68,8 @@ rtc_library("audio_encoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -85,6 +87,8 @@ rtc_library("audio_decoder_isac_fix") {
"../../../modules/audio_coding:isac_fix",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -102,6 +106,8 @@ rtc_library("audio_encoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -119,6 +125,8 @@ rtc_library("audio_decoder_isac_float") {
"../../../modules/audio_coding:isac",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]

View File

@ -23,8 +23,8 @@ rtc_library("audio_encoder_opus_config") {
deps = [
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
defines = []
if (rtc_opus_variable_complexity) {
defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ]
@ -49,6 +49,8 @@ rtc_library("audio_encoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -66,6 +68,8 @@ rtc_library("audio_decoder_opus") {
"../../../modules/audio_coding:webrtc_opus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -82,8 +86,8 @@ rtc_library("audio_encoder_multiopus") {
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
"../opus:audio_encoder_opus_config",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("audio_decoder_multiopus") {
@ -99,6 +103,8 @@ rtc_library("audio_decoder_multiopus") {
"../../../modules/audio_coding:webrtc_multiopus",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",

View File

@ -75,6 +75,8 @@ struct RTC_EXPORT AudioOptions {
// and check if any other AudioOptions members are unused.
absl::optional<bool> combined_audio_video_bwe;
// Enable audio network adaptor.
// TODO(webrtc:11717): Remove this API in favor of adaptivePtime in
// RtpEncodingParameters.
absl::optional<bool> audio_network_adaptor;
// Config string for audio network adaptor.
absl::optional<std::string> audio_network_adaptor_config;

View File

@ -20,6 +20,7 @@
#include <string>
#include "absl/types/optional.h"
#include "api/priority.h"
#include "api/rtc_error.h"
#include "rtc_base/checks.h"
#include "rtc_base/copy_on_write_buffer.h"
@ -61,6 +62,9 @@ struct DataChannelInit {
// The stream id, or SID, for SCTP data channels. -1 if unset (see above).
int id = -1;
// https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member
absl::optional<Priority> priority;
};
// At the JavaScript level, data can be passed in as a string or a blob, so
@ -154,6 +158,7 @@ class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface {
// If negotiated in-band, this ID will be populated once the DTLS role is
// determined, and until then this will return -1.
virtual int id() const = 0;
virtual Priority priority() const { return Priority::kLow; }
virtual DataState state() const = 0;
// When state is kClosed, and the DataChannel was not closed using
// the closing procedure, returns the error information about the closing.

View File

@ -16,6 +16,7 @@
#include "api/scoped_refptr.h"
#include "api/video/encoded_frame.h"
#include "api/video/video_frame_metadata.h"
#include "rtc_base/ref_count.h"
namespace webrtc {
@ -48,6 +49,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface {
// TODO(bugs.webrtc.org/11380) remove from interface once
// webrtc::RtpDescriptorAuthentication is exposed in api/.
virtual std::vector<uint8_t> GetAdditionalData() const = 0;
virtual const VideoFrameMetadata& GetMetadata() const = 0;
};
// Extends the TransformableFrameInterface to expose audio-specific information.

View File

@ -23,8 +23,8 @@ rtc_source_set("neteq_api") {
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
"../audio_codecs:audio_codecs_api",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("custom_neteq_factory") {
@ -56,8 +56,8 @@ rtc_source_set("neteq_controller_api") {
":tick_timer",
"../../rtc_base:rtc_base_approved",
"../../system_wrappers:system_wrappers",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("default_neteq_controller_factory") {

View File

@ -30,7 +30,8 @@ std::string NetEq::Config::ToString() const {
<< ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate="
<< (enable_fast_accelerate ? "true" : "false")
<< ", enable_muted_state=" << (enable_muted_state ? "true" : "false")
<< ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false");
<< ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false")
<< ", extra_output_delay_ms=" << extra_output_delay_ms;
return ss.str();
}

View File

@ -138,6 +138,10 @@ class NetEq {
bool enable_rtx_handling = false;
absl::optional<AudioCodecPairId> codec_pair_id;
bool for_test_no_time_stretching = false; // Use only for testing.
// Adds extra delay to the output of NetEq, without affecting jitter or
// loss behavior. This is mainly for testing. Value must be a non-negative
// multiple of 10 ms.
int extra_output_delay_ms = 0;
};
enum ReturnCodes { kOK = 0, kFail = -1 };

View File

@ -53,27 +53,6 @@ RTCError PeerConnectionInterface::SetConfiguration(
return RTCError();
}
RTCError PeerConnectionInterface::SetBitrate(const BitrateSettings& bitrate) {
BitrateParameters bitrate_parameters;
bitrate_parameters.min_bitrate_bps = bitrate.min_bitrate_bps;
bitrate_parameters.current_bitrate_bps = bitrate.start_bitrate_bps;
bitrate_parameters.max_bitrate_bps = bitrate.max_bitrate_bps;
return SetBitrate(bitrate_parameters);
}
RTCError PeerConnectionInterface::SetBitrate(
const BitrateParameters& bitrate_parameters) {
BitrateSettings bitrate;
bitrate.min_bitrate_bps = bitrate_parameters.min_bitrate_bps;
bitrate.start_bitrate_bps = bitrate_parameters.current_bitrate_bps;
bitrate.max_bitrate_bps = bitrate_parameters.max_bitrate_bps;
return SetBitrate(bitrate);
}
PeerConnectionInterface::BitrateParameters::BitrateParameters() = default;
PeerConnectionInterface::BitrateParameters::~BitrateParameters() = default;
PeerConnectionDependencies::PeerConnectionDependencies(
PeerConnectionObserver* observer_in)
: observer(observer_in) {}

View File

@ -73,6 +73,7 @@
#include <string>
#include <vector>
#include "api/adaptation/resource.h"
#include "api/async_resolver_factory.h"
#include "api/audio/audio_mixer.h"
#include "api/audio_codecs/audio_decoder_factory.h"
@ -102,7 +103,6 @@
#include "api/task_queue/task_queue_factory.h"
#include "api/transport/bitrate_settings.h"
#include "api/transport/enums.h"
#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/transport/webrtc_key_value_config.h"
#include "api/turn_customizer.h"
@ -613,34 +613,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// correctly. This flag will be deprecated soon. Do not rely on it.
bool active_reset_srtp_params = false;
// DEPRECATED. Do not use. This option is ignored by peer connection.
// TODO(webrtc:9719): Delete this option.
bool use_media_transport = false;
// DEPRECATED. Do not use. This option is ignored by peer connection.
// TODO(webrtc:9719): Delete this option.
bool use_media_transport_for_data_channels = false;
// If MediaTransportFactory is provided in PeerConnectionFactory, this flag
// informs PeerConnection that it should use the DatagramTransportInterface
// for packets instead DTLS. It's invalid to set it to |true| if the
// MediaTransportFactory wasn't provided.
absl::optional<bool> use_datagram_transport;
// If MediaTransportFactory is provided in PeerConnectionFactory, this flag
// informs PeerConnection that it should use the DatagramTransport's
// implementation of DataChannelTransportInterface for data channels instead
// of SCTP-DTLS.
absl::optional<bool> use_datagram_transport_for_data_channels;
// If true, this PeerConnection will only use datagram transport for data
// channels when receiving an incoming offer that includes datagram
// transport parameters. It will not request use of a datagram transport
// when it creates the initial, outgoing offer.
// This setting only applies when |use_datagram_transport_for_data_channels|
// is true.
absl::optional<bool> use_datagram_transport_for_data_channels_receive_only;
// Defines advanced optional cryptographic settings related to SRTP and
// frame encryption for native WebRTC. Setting this will overwrite any
// settings set in PeerConnectionFactory (which is deprecated).
@ -666,8 +638,6 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
// Whether network condition based codec switching is allowed.
absl::optional<bool> allow_codec_switching;
bool enable_simulcast_stats = true;
//
// Don't forget to update operator== if adding something.
//
@ -1045,28 +1015,13 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
virtual bool RemoveIceCandidates(
const std::vector<cricket::Candidate>& candidates) = 0;
// 0 <= min <= current <= max should hold for set parameters.
struct BitrateParameters {
BitrateParameters();
~BitrateParameters();
absl::optional<int> min_bitrate_bps;
absl::optional<int> current_bitrate_bps;
absl::optional<int> max_bitrate_bps;
};
// SetBitrate limits the bandwidth allocated for all RTP streams sent by
// this PeerConnection. Other limitations might affect these limits and
// are respected (for example "b=AS" in SDP).
//
// Setting |current_bitrate_bps| will reset the current bitrate estimate
// to the provided value.
virtual RTCError SetBitrate(const BitrateSettings& bitrate);
// TODO(nisse): Deprecated - use version above. These two default
// implementations require subclasses to implement one or the other
// of the methods.
virtual RTCError SetBitrate(const BitrateParameters& bitrate_parameters);
virtual RTCError SetBitrate(const BitrateSettings& bitrate) = 0;
// Enable/disable playout of received audio streams. Enabled by default. Note
// that even if playout is enabled, streams will only be played out if the
@ -1118,6 +1073,14 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface {
return absl::nullopt;
}
// When a resource is overused, the PeerConnection will try to reduce the load
// on the sysem, for example by reducing the resolution or frame rate of
// encoded streams. The Resource API allows injecting platform-specific usage
// measurements. The conditions to trigger kOveruse or kUnderuse are up to the
// implementation.
// TODO(hbos): Make pure virtual when implemented by downstream projects.
virtual void AddAdaptationResource(rtc::scoped_refptr<Resource> resource) {}
// Start RtcEventLog using an existing output-sink. Takes ownership of
// |output| and passes it on to Call, which will take the ownership. If the
// operation fails the output will be closed and deallocated. The event log
@ -1332,7 +1295,6 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final {
std::unique_ptr<NetworkStatePredictorFactoryInterface>
network_state_predictor_factory;
std::unique_ptr<NetworkControllerFactoryInterface> network_controller_factory;
std::unique_ptr<MediaTransportFactory> media_transport_factory;
std::unique_ptr<NetEqFactory> neteq_factory;
std::unique_ptr<WebRtcKeyValueConfig> trials;
};

View File

@ -132,6 +132,7 @@ PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state)
PROXY_METHOD0(PeerConnectionState, peer_connection_state)
PROXY_METHOD0(IceGatheringState, ice_gathering_state)
PROXY_METHOD0(absl::optional<bool>, can_trickle_ice_candidates)
PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr<Resource>)
PROXY_METHOD2(bool,
StartRtcEventLog,
std::unique_ptr<RtcEventLogOutput>,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
@ -8,16 +8,19 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#include <stddef.h>
#include <stdint.h>
#include "api/array_view.h"
#include "call/rtp_rtcp_demuxer_helper.h"
#ifndef API_PRIORITY_H_
#define API_PRIORITY_H_
namespace webrtc {
void FuzzOneInput(const uint8_t* data, size_t size) {
ParseRtcpPacketSenderSsrc(rtc::MakeArrayView(data, size));
}
// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
enum class Priority {
kVeryLow,
kLow,
kMedium,
kHigh,
};
} // namespace webrtc
#endif // API_PRIORITY_H_

View File

@ -55,6 +55,7 @@
#include <memory>
#include <string>
#include <tuple>
#include <type_traits>
#include <utility>
#include "api/scoped_refptr.h"
@ -396,6 +397,16 @@ class ConstMethodCall : public rtc::Message, public rtc::MessageHandler {
return call.Marshal(RTC_FROM_HERE, worker_thread_); \
}
// For use when returning purely const state (set during construction).
// Use with caution. This method should only be used when the return value will
// always be the same.
#define BYPASS_PROXY_CONSTMETHOD0(r, method) \
r method() const override { \
static_assert(!std::is_pointer<r>::value, "Type is a pointer"); \
static_assert(!std::is_reference<r>::value, "Type is a reference"); \
return c_->method(); \
}
} // namespace webrtc
#endif // API_PROXY_H_

View File

@ -141,14 +141,16 @@ TEST_F(RtcEventLogOutputFileTest, AllowReasonableFileSizeLimits) {
}
#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
TEST_F(RtcEventLogOutputFileTest, WritingToInactiveFileForbidden) {
class RtcEventLogOutputFileDeathTest : public RtcEventLogOutputFileTest {};
TEST_F(RtcEventLogOutputFileDeathTest, WritingToInactiveFileForbidden) {
RtcEventLogOutputFile output_file(output_file_name_, 2);
ASSERT_FALSE(output_file.Write("abc"));
ASSERT_FALSE(output_file.IsActive());
EXPECT_DEATH(output_file.Write("abc"), "");
}
TEST_F(RtcEventLogOutputFileTest, DisallowUnreasonableFileSizeLimits) {
TEST_F(RtcEventLogOutputFileDeathTest, DisallowUnreasonableFileSizeLimits) {
// Keeping in a temporary unique_ptr to make it clearer that the death is
// triggered by construction, not destruction.
std::unique_ptr<RtcEventLogOutputFile> output_file;

View File

@ -26,9 +26,7 @@ RTPHeaderExtension::RTPHeaderExtension()
videoRotation(kVideoRotation_0),
hasVideoContentType(false),
videoContentType(VideoContentType::UNSPECIFIED),
has_video_timing(false),
has_frame_marking(false),
frame_marking({false, false, false, false, false, 0xFF, 0, 0}) {}
has_video_timing(false) {}
RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) =
default;

View File

@ -21,10 +21,9 @@
#include "api/units/timestamp.h"
#include "api/video/color_space.h"
#include "api/video/video_content_type.h"
#include "api/video/video_frame_marking.h"
#include "api/video/video_rotation.h"
#include "api/video/video_timing.h"
#include "common_types.h" // NOLINT(build/include)
#include "common_types.h" // NOLINT (build/include)
namespace webrtc {
@ -143,9 +142,6 @@ struct RTPHeaderExtension {
bool has_video_timing;
VideoSendTiming video_timing;
bool has_frame_marking;
FrameMarking frame_marking;
PlayoutDelay playout_delay = {-1, -1};
// For identification of a stream when ssrc is not signaled. See

View File

@ -18,6 +18,20 @@
namespace webrtc {
const char* DegradationPreferenceToString(
DegradationPreference degradation_preference) {
switch (degradation_preference) {
case DegradationPreference::DISABLED:
return "disabled";
case DegradationPreference::MAINTAIN_FRAMERATE:
return "maintain-framerate";
case DegradationPreference::MAINTAIN_RESOLUTION:
return "maintain-resolution";
case DegradationPreference::BALANCED:
return "balanced";
}
}
const double kDefaultBitratePriority = 1.0;
RtcpFeedback::RtcpFeedback() = default;
@ -105,7 +119,6 @@ constexpr char RtpExtension::kAbsoluteCaptureTimeUri[];
constexpr char RtpExtension::kVideoRotationUri[];
constexpr char RtpExtension::kVideoContentTypeUri[];
constexpr char RtpExtension::kVideoTimingUri[];
constexpr char RtpExtension::kFrameMarkingUri[];
constexpr char RtpExtension::kGenericFrameDescriptorUri00[];
constexpr char RtpExtension::kDependencyDescriptorUri[];
constexpr char RtpExtension::kTransportSequenceNumberUri[];
@ -144,7 +157,6 @@ bool RtpExtension::IsSupportedForVideo(absl::string_view uri) {
uri == webrtc::RtpExtension::kVideoContentTypeUri ||
uri == webrtc::RtpExtension::kVideoTimingUri ||
uri == webrtc::RtpExtension::kMidUri ||
uri == webrtc::RtpExtension::kFrameMarkingUri ||
uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00 ||
uri == webrtc::RtpExtension::kDependencyDescriptorUri ||
uri == webrtc::RtpExtension::kColorSpaceUri ||

View File

@ -20,6 +20,7 @@
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/media_types.h"
#include "api/priority.h"
#include "api/rtp_transceiver_direction.h"
#include "rtc_base/system/rtc_export.h"
@ -91,15 +92,10 @@ enum class DegradationPreference {
BALANCED,
};
RTC_EXPORT extern const double kDefaultBitratePriority;
RTC_EXPORT const char* DegradationPreferenceToString(
DegradationPreference degradation_preference);
// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc
enum class Priority {
kVeryLow,
kLow,
kMedium,
kHigh,
};
RTC_EXPORT extern const double kDefaultBitratePriority;
struct RTC_EXPORT RtcpFeedback {
RtcpFeedbackType type = RtcpFeedbackType::CCM;
@ -226,7 +222,7 @@ struct RTC_EXPORT RtpHeaderExtensionCapability {
bool preferred_encrypt = false;
// The direction of the extension. The kStopped value is only used with
// RtpTransceiverInterface::header_extensions_offered() and
// RtpTransceiverInterface::HeaderExtensionsToOffer() and
// SetOfferedRtpHeaderExtensions().
RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv;
@ -314,10 +310,6 @@ struct RTC_EXPORT RtpExtension {
static constexpr char kVideoTimingUri[] =
"http://www.webrtc.org/experiments/rtp-hdrext/video-timing";
// Header extension for video frame marking.
static constexpr char kFrameMarkingUri[] =
"http://tools.ietf.org/html/draft-ietf-avtext-framemarking-07";
// Experimental codec agnostic frame descriptor.
static constexpr char kGenericFrameDescriptorUri00[] =
"http://www.webrtc.org/experiments/rtp-hdrext/"
@ -481,6 +473,10 @@ struct RTC_EXPORT RtpEncodingParameters {
// Called "encodingId" in ORTC.
std::string rid;
// Allow dynamic frame length changes for audio:
// https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime
bool adaptive_ptime = false;
bool operator==(const RtpEncodingParameters& o) const {
return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority &&
network_priority == o.network_priority &&
@ -489,7 +485,8 @@ struct RTC_EXPORT RtpEncodingParameters {
max_framerate == o.max_framerate &&
num_temporal_layers == o.num_temporal_layers &&
scale_resolution_down_by == o.scale_resolution_down_by &&
active == o.active && rid == o.rid;
active == o.active && rid == o.rid &&
adaptive_ptime == o.adaptive_ptime;
}
bool operator!=(const RtpEncodingParameters& o) const {
return !(*this == o);

View File

@ -41,4 +41,10 @@ RtpTransceiverInterface::HeaderExtensionsToOffer() const {
return {};
}
webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions(
rtc::ArrayView<const RtpHeaderExtensionCapability>
header_extensions_to_offer) {
return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION);
}
} // namespace webrtc

View File

@ -133,6 +133,13 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface {
virtual std::vector<RtpHeaderExtensionCapability> HeaderExtensionsToOffer()
const;
// The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation
// so that it negotiates use of header extensions which are not kStopped.
// https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface
virtual webrtc::RTCError SetOfferedRtpHeaderExtensions(
rtc::ArrayView<const RtpHeaderExtensionCapability>
header_extensions_to_offer);
protected:
~RtpTransceiverInterface() override = default;
};

View File

@ -319,6 +319,14 @@ class RTCStatsMember : public RTCStatsMemberInterface {
std::string ValueToString() const override;
std::string ValueToJson() const override;
template <typename U>
inline T ValueOrDefault(U default_value) const {
if (is_defined()) {
return *(*this);
}
return default_value;
}
// Assignment operators.
T& operator=(const T& value) {
value_ = value;

View File

@ -134,7 +134,7 @@ class RTC_EXPORT RTCDataChannelStats final : public RTCStats {
RTCStatsMember<std::string> label;
RTCStatsMember<std::string> protocol;
RTCStatsMember<int32_t> datachannelid;
RTCStatsMember<int32_t> data_channel_identifier;
// TODO(hbos): Support enum types? "RTCStatsMember<RTCDataChannelState>"?
RTCStatsMember<std::string> state;
RTCStatsMember<uint32_t> messages_sent;
@ -419,6 +419,18 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats {
// TODO(hbos): Collect and populate this value for both "audio" and "video",
// currently not collected for "video". https://bugs.webrtc.org/7065
RTCStatsMember<double> jitter;
RTCStatsMember<double> jitter_buffer_delay;
RTCStatsMember<uint64_t> jitter_buffer_emitted_count;
RTCStatsMember<uint64_t> total_samples_received;
RTCStatsMember<uint64_t> concealed_samples;
RTCStatsMember<uint64_t> silent_concealed_samples;
RTCStatsMember<uint64_t> concealment_events;
RTCStatsMember<uint64_t> inserted_samples_for_deceleration;
RTCStatsMember<uint64_t> removed_samples_for_acceleration;
RTCStatsMember<double> audio_level;
RTCStatsMember<double> total_audio_energy;
RTCStatsMember<double> total_samples_duration;
RTCStatsMember<int32_t> frames_received;
// TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065
RTCStatsMember<double> round_trip_time;
// TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065
@ -441,8 +453,13 @@ class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats {
RTCStatsMember<double> gap_loss_rate;
// TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065
RTCStatsMember<double> gap_discard_rate;
RTCStatsMember<uint32_t> frame_width;
RTCStatsMember<uint32_t> frame_height;
RTCStatsMember<uint32_t> frame_bit_depth;
RTCStatsMember<double> frames_per_second;
RTCStatsMember<uint32_t> frames_decoded;
RTCStatsMember<uint32_t> key_frames_decoded;
RTCStatsMember<uint32_t> frames_dropped;
RTCStatsMember<double> total_decode_time;
RTCStatsMember<double> total_inter_frame_delay;
RTCStatsMember<double> total_squared_inter_frame_delay;
@ -602,7 +619,9 @@ class RTC_EXPORT RTCTransportStats final : public RTCStats {
~RTCTransportStats() override;
RTCStatsMember<uint64_t> bytes_sent;
RTCStatsMember<uint64_t> packets_sent;
RTCStatsMember<uint64_t> bytes_received;
RTCStatsMember<uint64_t> packets_received;
RTCStatsMember<std::string> rtcp_transport_stats_id;
// TODO(hbos): Support enum types? "RTCStatsMember<RTCDtlsTransportState>"?
RTCStatsMember<std::string> dtls_state;

View File

@ -21,6 +21,8 @@ rtc_library("task_queue") {
"../../rtc_base:checks",
"../../rtc_base:macromagic",
"../../rtc_base/system:rtc_export",
]
absl_deps = [
"//third_party/abseil-cpp/absl/base:config",
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/strings",
@ -51,6 +53,8 @@ rtc_library("task_queue_test") {
deps = [
"../../../webrtc_overrides:webrtc_component",
"../../test:test_support",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]
@ -62,6 +66,8 @@ rtc_library("task_queue_test") {
"../../rtc_base:timeutils",
"../../rtc_base/task_utils:to_queued_task",
"../../test:test_support",
]
absl_deps = [
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/strings",
]

View File

@ -37,9 +37,11 @@ TEST_P(TaskQueueTest, PostAndCheckCurrent) {
rtc::Event event;
auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent");
// We're not running a task, so there shouldn't be a current queue.
// We're not running a task, so |queue| shouldn't be current.
// Note that because rtc::Thread also supports the TQ interface and
// TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that
// means that TaskQueueBase::Current() will still return a valid value.
EXPECT_FALSE(queue->IsCurrent());
EXPECT_FALSE(TaskQueueBase::Current());
queue->PostTask(ToQueuedTask([&event, &queue] {
EXPECT_TRUE(queue->IsCurrent());
@ -269,5 +271,10 @@ TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) {
EXPECT_TRUE(done.Wait(1000));
}
// TaskQueueTest is a set of tests for any implementation of the TaskQueueBase.
// Tests are instantiated next to the concrete implementation(s).
// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#creating-value-parameterized-abstract-tests
GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TaskQueueTest);
} // namespace
} // namespace webrtc

View File

@ -13,7 +13,6 @@ specific_include_rules = {
],
"loopback_media_transport\.h": [
"+rtc_base/async_invoker.h",
"+rtc_base/critical_section.h",
"+rtc_base/thread.h",
"+rtc_base/thread_checker.h",
],

View File

@ -14,7 +14,7 @@
#include <string>
#include "api/test/stats_observer_interface.h"
#include "api/test/track_id_stream_label_map.h"
#include "api/test/track_id_stream_info_map.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@ -31,7 +31,7 @@ class AudioQualityAnalyzerInterface : public StatsObserverInterface {
// stream_id matching. The caller is responsible for ensuring the
// AnalyzerHelper outlives the instance of the AudioQualityAnalyzerInterface.
virtual void Start(std::string test_case_name,
TrackIdStreamLabelMap* analyzer_helper) = 0;
TrackIdStreamInfoMap* analyzer_helper) = 0;
// Will be called by the framework at the end of the test. The analyzer
// has to finalize all its stats and it should report them.

View File

@ -17,6 +17,12 @@
namespace webrtc {
namespace test {
int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
int argc,
char* argv[]) {
return AudioprocFloatImpl(std::move(audio_processing), argc, argv);
}
int AudioprocFloat(std::unique_ptr<AudioProcessingBuilder> ap_builder,
int argc,
char* argv[]) {

View File

@ -19,6 +19,22 @@
namespace webrtc {
namespace test {
// This is an interface for the audio processing simulation utility. This
// utility can be used to simulate the audioprocessing module using a recording
// (either an AEC dump or wav files), and generate the output as a wav file.
// Any audio_processing object specified in the input is used for the
// simulation. The optional |audio_processing| object provides the
// AudioProcessing instance that is used during the simulation. Note that when
// the audio_processing object is specified all functionality that relies on
// using the AudioProcessingBuilder is deactivated, since the AudioProcessing
// object is already created and the builder is not used in the simulation. It
// is needed to pass the command line flags as |argc| and |argv|, so these can
// be interpreted properly by the utility. To see a list of all supported
// command line flags, run the executable with the '--help' flag.
int AudioprocFloat(rtc::scoped_refptr<AudioProcessing> audio_processing,
int argc,
char* argv[]);
// This is an interface for the audio processing simulation utility. This
// utility can be used to simulate the audioprocessing module using a recording
// (either an AEC dump or wav files), and generate the output as a wav file.

View File

@ -27,16 +27,17 @@
// "api/test/videocodec_test_fixture.h"
// "api/test/videocodec_test_stats.h"
#include "api/test/dummy_peer_connection.h"
#include "api/test/fake_frame_decryptor.h"
#include "api/test/fake_frame_encryptor.h"
#include "api/test/fake_media_transport.h"
#include "api/test/loopback_media_transport.h"
#include "api/test/mock_audio_mixer.h"
#include "api/test/mock_frame_decryptor.h"
#include "api/test/mock_frame_encryptor.h"
#include "api/test/mock_peer_connection_factory_interface.h"
#include "api/test/mock_peerconnectioninterface.h"
#include "api/test/mock_rtpreceiver.h"
#include "api/test/mock_rtpsender.h"
#include "api/test/mock_transformable_video_frame.h"
#include "api/test/mock_video_bitrate_allocator.h"
#include "api/test/mock_video_bitrate_allocator_factory.h"
#include "api/test/mock_video_decoder.h"

View File

@ -1,4 +1,3 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
@ -18,6 +17,7 @@
namespace webrtc {
// Returns a non-null NetworkEmulationManager instance.
std::unique_ptr<NetworkEmulationManager> CreateNetworkEmulationManager(
TimeMode mode = TimeMode::kRealTime);

View File

@ -13,6 +13,7 @@
#include <memory>
#include <utility>
#include "api/test/time_controller.h"
#include "test/pc/e2e/peer_connection_quality_test.h"
namespace webrtc {
@ -21,11 +22,12 @@ namespace webrtc_pc_e2e {
std::unique_ptr<PeerConnectionE2EQualityTestFixture>
CreatePeerConnectionE2EQualityTestFixture(
std::string test_case_name,
TimeController& time_controller,
std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer) {
return std::make_unique<PeerConnectionE2EQualityTest>(
std::move(test_case_name), std::move(audio_quality_analyzer),
std::move(video_quality_analyzer));
std::move(test_case_name), time_controller,
std::move(audio_quality_analyzer), std::move(video_quality_analyzer));
}
} // namespace webrtc_pc_e2e

View File

@ -15,19 +15,25 @@
#include "api/test/audio_quality_analyzer_interface.h"
#include "api/test/peerconnection_quality_test_fixture.h"
#include "api/test/time_controller.h"
#include "api/test/video_quality_analyzer_interface.h"
namespace webrtc {
namespace webrtc_pc_e2e {
// API is in development. Can be changed/removed without notice.
// Create test fixture to establish test call between Alice and Bob.
// During the test Alice will be caller and Bob will answer the call.
// |test_case_name| is a name of test case, that will be used for all metrics
// reporting.
// |time_controller| is used to manage all rtc::Thread's and TaskQueue
// instances. Instance of |time_controller| have to outlive created fixture.
// Returns a non-null PeerConnectionE2EQualityTestFixture instance.
std::unique_ptr<PeerConnectionE2EQualityTestFixture>
CreatePeerConnectionE2EQualityTestFixture(
std::string test_case_name,
TimeController& time_controller,
std::unique_ptr<AudioQualityAnalyzerInterface> audio_quality_analyzer,
std::unique_ptr<VideoQualityAnalyzerInterface> video_quality_analyzer);

View File

@ -35,13 +35,18 @@ std::unique_ptr<CallFactoryInterface> CreateTimeControllerBasedCallFactory(
explicit TimeControllerBasedCallFactory(TimeController* time_controller)
: time_controller_(time_controller) {}
Call* CreateCall(const Call::Config& config) override {
return Call::Create(config, time_controller_->GetClock(),
time_controller_->CreateProcessThread("CallModules"),
if (!module_thread_) {
module_thread_ = SharedModuleThread::Create(
time_controller_->CreateProcessThread("CallModules"),
[this]() { module_thread_ = nullptr; });
}
return Call::Create(config, time_controller_->GetClock(), module_thread_,
time_controller_->CreateProcessThread("Pacer"));
}
private:
TimeController* time_controller_;
rtc::scoped_refptr<SharedModuleThread> module_thread_;
};
return std::make_unique<TimeControllerBasedCallFactory>(time_controller);
}

View File

@ -194,10 +194,6 @@ class DummyPeerConnection : public PeerConnectionInterface {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
}
RTCError SetBitrate(const BitrateParameters& bitrate_parameters) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented");
}
void SetAudioPlayout(bool playout) override { FATAL() << "Not implemented"; }
void SetAudioRecording(bool recording) override {
FATAL() << "Not implemented";

View File

@ -1,121 +0,0 @@
/*
* Copyright 2019 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
#define API_TEST_FAKE_DATAGRAM_TRANSPORT_H_
#include <cstddef>
#include <string>
#include "api/transport/datagram_transport_interface.h"
#include "api/transport/media/media_transport_interface.h"
namespace webrtc {
// Maxmum size of datagrams sent by |FakeDatagramTransport|.
constexpr size_t kMaxFakeDatagramSize = 1000;
// Fake datagram transport. Does not support making an actual connection
// or sending data. Only used for tests that need to stub out a transport.
class FakeDatagramTransport : public DatagramTransportInterface {
public:
FakeDatagramTransport(
const MediaTransportSettings& settings,
std::string transport_parameters,
const std::function<bool(absl::string_view, absl::string_view)>&
are_parameters_compatible)
: settings_(settings),
transport_parameters_(transport_parameters),
are_parameters_compatible_(are_parameters_compatible) {}
~FakeDatagramTransport() override { RTC_DCHECK(!state_callback_); }
void Connect(rtc::PacketTransportInternal* packet_transport) override {
packet_transport_ = packet_transport;
}
CongestionControlInterface* congestion_control() override {
return nullptr; // Datagram interface doesn't provide this yet.
}
void SetTransportStateCallback(
MediaTransportStateCallback* callback) override {
state_callback_ = callback;
}
RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) override {
return RTCError::OK();
}
size_t GetLargestDatagramSize() const override {
return kMaxFakeDatagramSize;
}
void SetDatagramSink(DatagramSinkInterface* sink) override {}
std::string GetTransportParameters() const override {
if (settings_.remote_transport_parameters) {
return *settings_.remote_transport_parameters;
}
return transport_parameters_;
}
RTCError SetRemoteTransportParameters(
absl::string_view remote_parameters) override {
if (are_parameters_compatible_(GetTransportParameters(),
remote_parameters)) {
return RTCError::OK();
}
return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
"Incompatible remote transport parameters");
}
RTCError OpenChannel(int channel_id) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCError CloseChannel(int channel_id) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
void SetDataSink(DataChannelSink* /*sink*/) override {}
bool IsReadyToSend() const override { return false; }
rtc::PacketTransportInternal* packet_transport() { return packet_transport_; }
void set_state(webrtc::MediaTransportState state) {
if (state_callback_) {
state_callback_->OnStateChanged(state);
}
}
const MediaTransportSettings& settings() { return settings_; }
private:
const MediaTransportSettings settings_;
const std::string transport_parameters_;
const std::function<bool(absl::string_view, absl::string_view)>
are_parameters_compatible_;
rtc::PacketTransportInternal* packet_transport_ = nullptr;
MediaTransportStateCallback* state_callback_ = nullptr;
};
} // namespace webrtc
#endif // API_TEST_FAKE_DATAGRAM_TRANSPORT_H_

View File

@ -1,74 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_FAKE_MEDIA_TRANSPORT_H_
#define API_TEST_FAKE_MEDIA_TRANSPORT_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "absl/algorithm/container.h"
#include "api/test/fake_datagram_transport.h"
#include "api/transport/media/media_transport_interface.h"
namespace webrtc {
// Fake media transport factory creates fake media transport.
// Also creates fake datagram transport, since both media and datagram
// transports are created by |MediaTransportFactory|.
class FakeMediaTransportFactory : public MediaTransportFactory {
public:
explicit FakeMediaTransportFactory(
const absl::optional<std::string>& transport_offer = "")
: transport_offer_(transport_offer) {}
~FakeMediaTransportFactory() = default;
std::string GetTransportName() const override { return "fake"; }
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
CreateDatagramTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings) override {
return std::unique_ptr<DatagramTransportInterface>(
new FakeDatagramTransport(settings, transport_offer_.value_or(""),
transport_parameters_comparison_));
}
void set_transport_parameters_comparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
transport_parameters_comparison_ = std::move(comparison);
}
private:
const absl::optional<std::string> transport_offer_;
std::function<bool(absl::string_view, absl::string_view)>
transport_parameters_comparison_ =
[](absl::string_view local, absl::string_view remote) {
return local == remote;
};
};
} // namespace webrtc
#endif // API_TEST_FAKE_MEDIA_TRANSPORT_H_

View File

@ -1,373 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/loopback_media_transport.h"
#include <memory>
#include "absl/algorithm/container.h"
#include "rtc_base/time_utils.h"
namespace webrtc {
namespace {
constexpr size_t kLoopbackMaxDatagramSize = 1200;
class WrapperDatagramTransport : public DatagramTransportInterface {
public:
explicit WrapperDatagramTransport(DatagramTransportInterface* wrapped)
: wrapped_(wrapped) {}
// Datagram transport overrides.
void Connect(rtc::PacketTransportInternal* packet_transport) override {
return wrapped_->Connect(packet_transport);
}
CongestionControlInterface* congestion_control() override {
return wrapped_->congestion_control();
}
void SetTransportStateCallback(
MediaTransportStateCallback* callback) override {
return wrapped_->SetTransportStateCallback(callback);
}
RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) override {
return wrapped_->SendDatagram(data, datagram_id);
}
size_t GetLargestDatagramSize() const override {
return wrapped_->GetLargestDatagramSize();
}
void SetDatagramSink(DatagramSinkInterface* sink) override {
return wrapped_->SetDatagramSink(sink);
}
std::string GetTransportParameters() const override {
return wrapped_->GetTransportParameters();
}
RTCError SetRemoteTransportParameters(absl::string_view parameters) override {
return wrapped_->SetRemoteTransportParameters(parameters);
}
// Data channel overrides.
RTCError OpenChannel(int channel_id) override {
return wrapped_->OpenChannel(channel_id);
}
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override {
return wrapped_->SendData(channel_id, params, buffer);
}
RTCError CloseChannel(int channel_id) override {
return wrapped_->CloseChannel(channel_id);
}
void SetDataSink(DataChannelSink* sink) override {
wrapped_->SetDataSink(sink);
}
bool IsReadyToSend() const override { return wrapped_->IsReadyToSend(); }
private:
DatagramTransportInterface* wrapped_;
};
} // namespace
WrapperMediaTransportFactory::WrapperMediaTransportFactory(
DatagramTransportInterface* wrapped_datagram_transport)
: wrapped_datagram_transport_(wrapped_datagram_transport) {}
WrapperMediaTransportFactory::WrapperMediaTransportFactory(
MediaTransportFactory* wrapped)
: wrapped_factory_(wrapped) {}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
WrapperMediaTransportFactory::CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
WrapperMediaTransportFactory::CreateDatagramTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
created_transport_count_++;
if (wrapped_factory_) {
return wrapped_factory_->CreateDatagramTransport(network_thread, settings);
}
return {
std::make_unique<WrapperDatagramTransport>(wrapped_datagram_transport_)};
}
std::string WrapperMediaTransportFactory::GetTransportName() const {
if (wrapped_factory_) {
return wrapped_factory_->GetTransportName();
}
return "wrapped-transport";
}
int WrapperMediaTransportFactory::created_transport_count() const {
return created_transport_count_;
}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
WrapperMediaTransportFactory::CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return RTCError(RTCErrorType::UNSUPPORTED_OPERATION);
}
MediaTransportPair::MediaTransportPair(rtc::Thread* thread)
: first_datagram_transport_(thread),
second_datagram_transport_(thread),
first_factory_(&first_datagram_transport_),
second_factory_(&second_datagram_transport_) {
first_datagram_transport_.Connect(&second_datagram_transport_);
second_datagram_transport_.Connect(&first_datagram_transport_);
}
MediaTransportPair::~MediaTransportPair() = default;
MediaTransportPair::LoopbackDataChannelTransport::LoopbackDataChannelTransport(
rtc::Thread* thread)
: thread_(thread) {}
MediaTransportPair::LoopbackDataChannelTransport::
~LoopbackDataChannelTransport() {
RTC_CHECK(data_sink_ == nullptr);
}
void MediaTransportPair::LoopbackDataChannelTransport::Connect(
LoopbackDataChannelTransport* other) {
other_ = other;
}
RTCError MediaTransportPair::LoopbackDataChannelTransport::OpenChannel(
int channel_id) {
// No-op. No need to open channels for the loopback.
return RTCError::OK();
}
RTCError MediaTransportPair::LoopbackDataChannelTransport::SendData(
int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_,
[this, channel_id, params, buffer] {
other_->OnData(channel_id, params.type, buffer);
});
return RTCError::OK();
}
RTCError MediaTransportPair::LoopbackDataChannelTransport::CloseChannel(
int channel_id) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, channel_id] {
other_->OnRemoteCloseChannel(channel_id);
rtc::CritScope lock(&sink_lock_);
if (data_sink_) {
data_sink_->OnChannelClosed(channel_id);
}
});
return RTCError::OK();
}
void MediaTransportPair::LoopbackDataChannelTransport::SetDataSink(
DataChannelSink* sink) {
rtc::CritScope lock(&sink_lock_);
data_sink_ = sink;
if (data_sink_ && ready_to_send_) {
data_sink_->OnReadyToSend();
}
}
bool MediaTransportPair::LoopbackDataChannelTransport::IsReadyToSend() const {
rtc::CritScope lock(&sink_lock_);
return ready_to_send_;
}
void MediaTransportPair::LoopbackDataChannelTransport::FlushAsyncInvokes() {
invoker_.Flush(thread_);
}
void MediaTransportPair::LoopbackDataChannelTransport::OnData(
int channel_id,
DataMessageType type,
const rtc::CopyOnWriteBuffer& buffer) {
rtc::CritScope lock(&sink_lock_);
if (data_sink_) {
data_sink_->OnDataReceived(channel_id, type, buffer);
}
}
void MediaTransportPair::LoopbackDataChannelTransport::OnRemoteCloseChannel(
int channel_id) {
rtc::CritScope lock(&sink_lock_);
if (data_sink_) {
data_sink_->OnChannelClosing(channel_id);
data_sink_->OnChannelClosed(channel_id);
}
}
void MediaTransportPair::LoopbackDataChannelTransport::OnReadyToSend(
bool ready_to_send) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, ready_to_send] {
rtc::CritScope lock(&sink_lock_);
ready_to_send_ = ready_to_send;
// Propagate state to data channel sink, if present.
if (data_sink_ && ready_to_send_) {
data_sink_->OnReadyToSend();
}
});
}
MediaTransportPair::LoopbackDatagramTransport::LoopbackDatagramTransport(
rtc::Thread* thread)
: thread_(thread), dc_transport_(thread) {}
void MediaTransportPair::LoopbackDatagramTransport::Connect(
LoopbackDatagramTransport* other) {
other_ = other;
dc_transport_.Connect(&other->dc_transport_);
}
void MediaTransportPair::LoopbackDatagramTransport::Connect(
rtc::PacketTransportInternal* packet_transport) {
if (state_after_connect_) {
SetState(*state_after_connect_);
}
}
CongestionControlInterface*
MediaTransportPair::LoopbackDatagramTransport::congestion_control() {
return nullptr;
}
void MediaTransportPair::LoopbackDatagramTransport::SetTransportStateCallback(
MediaTransportStateCallback* callback) {
RTC_DCHECK_RUN_ON(thread_);
state_callback_ = callback;
if (state_callback_) {
state_callback_->OnStateChanged(state_);
}
}
RTCError MediaTransportPair::LoopbackDatagramTransport::SendDatagram(
rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) {
rtc::CopyOnWriteBuffer buffer;
buffer.SetData(data.data(), data.size());
invoker_.AsyncInvoke<void>(
RTC_FROM_HERE, thread_, [this, datagram_id, buffer = std::move(buffer)] {
RTC_DCHECK_RUN_ON(thread_);
other_->DeliverDatagram(std::move(buffer));
if (sink_) {
DatagramAck ack;
ack.datagram_id = datagram_id;
ack.receive_timestamp = Timestamp::Micros(rtc::TimeMicros());
sink_->OnDatagramAcked(ack);
}
});
return RTCError::OK();
}
size_t MediaTransportPair::LoopbackDatagramTransport::GetLargestDatagramSize()
const {
return kLoopbackMaxDatagramSize;
}
void MediaTransportPair::LoopbackDatagramTransport::SetDatagramSink(
DatagramSinkInterface* sink) {
RTC_DCHECK_RUN_ON(thread_);
sink_ = sink;
}
std::string
MediaTransportPair::LoopbackDatagramTransport::GetTransportParameters() const {
return transport_parameters_;
}
RTCError
MediaTransportPair::LoopbackDatagramTransport::SetRemoteTransportParameters(
absl::string_view remote_parameters) {
RTC_DCHECK_RUN_ON(thread_);
if (transport_parameters_comparison_(GetTransportParameters(),
remote_parameters)) {
return RTCError::OK();
}
return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
"Incompatible remote transport parameters");
}
RTCError MediaTransportPair::LoopbackDatagramTransport::OpenChannel(
int channel_id) {
return dc_transport_.OpenChannel(channel_id);
}
RTCError MediaTransportPair::LoopbackDatagramTransport::SendData(
int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) {
return dc_transport_.SendData(channel_id, params, buffer);
}
RTCError MediaTransportPair::LoopbackDatagramTransport::CloseChannel(
int channel_id) {
return dc_transport_.CloseChannel(channel_id);
}
void MediaTransportPair::LoopbackDatagramTransport::SetDataSink(
DataChannelSink* sink) {
dc_transport_.SetDataSink(sink);
}
bool MediaTransportPair::LoopbackDatagramTransport::IsReadyToSend() const {
return dc_transport_.IsReadyToSend();
}
void MediaTransportPair::LoopbackDatagramTransport::SetState(
MediaTransportState state) {
invoker_.AsyncInvoke<void>(RTC_FROM_HERE, thread_, [this, state] {
RTC_DCHECK_RUN_ON(thread_);
state_ = state;
if (state_callback_) {
state_callback_->OnStateChanged(state_);
}
});
dc_transport_.OnReadyToSend(state == MediaTransportState::kWritable);
}
void MediaTransportPair::LoopbackDatagramTransport::SetStateAfterConnect(
MediaTransportState state) {
state_after_connect_ = state;
}
void MediaTransportPair::LoopbackDatagramTransport::FlushAsyncInvokes() {
dc_transport_.FlushAsyncInvokes();
}
void MediaTransportPair::LoopbackDatagramTransport::DeliverDatagram(
rtc::CopyOnWriteBuffer buffer) {
RTC_DCHECK_RUN_ON(thread_);
if (sink_) {
sink_->OnDatagramReceived(buffer);
}
}
} // namespace webrtc

View File

@ -1,269 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
#define API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include "api/transport/datagram_transport_interface.h"
#include "api/transport/media/media_transport_interface.h"
#include "rtc_base/async_invoker.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/thread.h"
#include "rtc_base/thread_checker.h"
namespace webrtc {
// Wrapper used to hand out unique_ptrs to loopback media
// transport without ownership changes to the underlying
// transport.
// It works in two modes:
// It can either wrap a factory, or it can wrap an existing interface.
// In the former mode, it delegates the work to the wrapped factory.
// In the latter mode, it always returns static instance of the transport
// interface.
//
// Example use:
// Factory wrap_static_interface = Wrapper(media_transport_interface);
// Factory wrap_factory = Wrapper(wrap_static_interface);
// The second factory may be created multiple times, and ownership may be passed
// to the client. The first factory counts the number of invocations of
// CreateMediaTransport();
class WrapperMediaTransportFactory : public MediaTransportFactory {
public:
explicit WrapperMediaTransportFactory(
DatagramTransportInterface* wrapped_datagram_transport);
explicit WrapperMediaTransportFactory(MediaTransportFactory* wrapped);
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override;
RTCErrorOr<std::unique_ptr<MediaTransportInterface>> CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) override;
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
CreateDatagramTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings) override;
std::string GetTransportName() const override;
int created_transport_count() const;
private:
DatagramTransportInterface* wrapped_datagram_transport_ = nullptr;
MediaTransportFactory* wrapped_factory_ = nullptr;
int created_transport_count_ = 0;
};
// Contains two MediaTransportsInterfaces that are connected to each other.
// Currently supports audio only.
class MediaTransportPair {
public:
struct Stats {
int sent_audio_frames = 0;
int received_audio_frames = 0;
int sent_video_frames = 0;
int received_video_frames = 0;
};
explicit MediaTransportPair(rtc::Thread* thread);
~MediaTransportPair();
DatagramTransportInterface* first_datagram_transport() {
return &first_datagram_transport_;
}
DatagramTransportInterface* second_datagram_transport() {
return &second_datagram_transport_;
}
std::unique_ptr<MediaTransportFactory> first_factory() {
return std::make_unique<WrapperMediaTransportFactory>(&first_factory_);
}
std::unique_ptr<MediaTransportFactory> second_factory() {
return std::make_unique<WrapperMediaTransportFactory>(&second_factory_);
}
void SetState(MediaTransportState state) {
first_datagram_transport_.SetState(state);
second_datagram_transport_.SetState(state);
}
void SetFirstState(MediaTransportState state) {
first_datagram_transport_.SetState(state);
}
void SetSecondStateAfterConnect(MediaTransportState state) {
second_datagram_transport_.SetState(state);
}
void SetFirstDatagramTransportParameters(const std::string& params) {
first_datagram_transport_.set_transport_parameters(params);
}
void SetSecondDatagramTransportParameters(const std::string& params) {
second_datagram_transport_.set_transport_parameters(params);
}
void SetFirstDatagramTransportParametersComparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
first_datagram_transport_.set_transport_parameters_comparison(
std::move(comparison));
}
void SetSecondDatagramTransportParametersComparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
second_datagram_transport_.set_transport_parameters_comparison(
std::move(comparison));
}
void FlushAsyncInvokes() {
first_datagram_transport_.FlushAsyncInvokes();
second_datagram_transport_.FlushAsyncInvokes();
}
int first_factory_transport_count() const {
return first_factory_.created_transport_count();
}
int second_factory_transport_count() const {
return second_factory_.created_transport_count();
}
private:
class LoopbackDataChannelTransport : public DataChannelTransportInterface {
public:
explicit LoopbackDataChannelTransport(rtc::Thread* thread);
~LoopbackDataChannelTransport() override;
void Connect(LoopbackDataChannelTransport* other);
RTCError OpenChannel(int channel_id) override;
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override;
RTCError CloseChannel(int channel_id) override;
bool IsReadyToSend() const override;
void SetDataSink(DataChannelSink* sink) override;
void OnReadyToSend(bool ready_to_send);
void FlushAsyncInvokes();
private:
void OnData(int channel_id,
DataMessageType type,
const rtc::CopyOnWriteBuffer& buffer);
void OnRemoteCloseChannel(int channel_id);
rtc::Thread* const thread_;
rtc::CriticalSection sink_lock_;
DataChannelSink* data_sink_ RTC_GUARDED_BY(sink_lock_) = nullptr;
bool ready_to_send_ RTC_GUARDED_BY(sink_lock_) = false;
LoopbackDataChannelTransport* other_;
rtc::AsyncInvoker invoker_;
};
class LoopbackDatagramTransport : public DatagramTransportInterface {
public:
explicit LoopbackDatagramTransport(rtc::Thread* thread);
void Connect(LoopbackDatagramTransport* other);
// Datagram transport overrides.
void Connect(rtc::PacketTransportInternal* packet_transport) override;
CongestionControlInterface* congestion_control() override;
void SetTransportStateCallback(
MediaTransportStateCallback* callback) override;
RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) override;
size_t GetLargestDatagramSize() const override;
void SetDatagramSink(DatagramSinkInterface* sink) override;
std::string GetTransportParameters() const override;
RTCError SetRemoteTransportParameters(
absl::string_view remote_parameters) override;
// Data channel overrides.
RTCError OpenChannel(int channel_id) override;
RTCError SendData(int channel_id,
const SendDataParams& params,
const rtc::CopyOnWriteBuffer& buffer) override;
RTCError CloseChannel(int channel_id) override;
void SetDataSink(DataChannelSink* sink) override;
bool IsReadyToSend() const override;
// Loopback-specific functionality.
void SetState(MediaTransportState state);
// When Connect() is called, the datagram transport will enter this state.
// This is useful for mimicking zero-RTT connectivity, for example.
void SetStateAfterConnect(MediaTransportState state);
void FlushAsyncInvokes();
void set_transport_parameters(const std::string& value) {
transport_parameters_ = value;
}
void set_transport_parameters_comparison(
std::function<bool(absl::string_view, absl::string_view)> comparison) {
thread_->Invoke<void>(
RTC_FROM_HERE, [this, comparison = std::move(comparison)] {
RTC_DCHECK_RUN_ON(thread_);
transport_parameters_comparison_ = std::move(comparison);
});
}
private:
void DeliverDatagram(rtc::CopyOnWriteBuffer buffer);
rtc::Thread* thread_;
LoopbackDataChannelTransport dc_transport_;
MediaTransportState state_ RTC_GUARDED_BY(thread_) =
MediaTransportState::kPending;
DatagramSinkInterface* sink_ RTC_GUARDED_BY(thread_) = nullptr;
MediaTransportStateCallback* state_callback_ RTC_GUARDED_BY(thread_) =
nullptr;
LoopbackDatagramTransport* other_;
std::string transport_parameters_;
std::function<bool(absl::string_view, absl::string_view)>
transport_parameters_comparison_ RTC_GUARDED_BY(thread_) =
[](absl::string_view a, absl::string_view b) { return a == b; };
absl::optional<MediaTransportState> state_after_connect_;
rtc::AsyncInvoker invoker_;
};
LoopbackDatagramTransport first_datagram_transport_;
LoopbackDatagramTransport second_datagram_transport_;
WrapperMediaTransportFactory first_factory_;
WrapperMediaTransportFactory second_factory_;
};
} // namespace webrtc
#endif // API_TEST_LOOPBACK_MEDIA_TRANSPORT_H_

View File

@ -1,201 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/test/loopback_media_transport.h"
#include <algorithm>
#include <memory>
#include <vector>
#include "test/gmock.h"
namespace webrtc {
namespace {
class MockMediaTransportAudioSinkInterface
: public MediaTransportAudioSinkInterface {
public:
MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedAudioFrame));
};
class MockMediaTransportVideoSinkInterface
: public MediaTransportVideoSinkInterface {
public:
MOCK_METHOD2(OnData, void(uint64_t, MediaTransportEncodedVideoFrame));
};
class MockMediaTransportKeyFrameRequestCallback
: public MediaTransportKeyFrameRequestCallback {
public:
MOCK_METHOD1(OnKeyFrameRequested, void(uint64_t));
};
class MockDataChannelSink : public DataChannelSink {
public:
MOCK_METHOD3(OnDataReceived,
void(int, DataMessageType, const rtc::CopyOnWriteBuffer&));
MOCK_METHOD1(OnChannelClosing, void(int));
MOCK_METHOD1(OnChannelClosed, void(int));
MOCK_METHOD0(OnReadyToSend, void());
};
class MockStateCallback : public MediaTransportStateCallback {
public:
MOCK_METHOD1(OnStateChanged, void(MediaTransportState));
};
} // namespace
TEST(LoopbackMediaTransport, DataDeliveredToSink) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink sink;
transport_pair.first_datagram_transport()->SetDataSink(&sink);
const int channel_id = 1;
EXPECT_CALL(
sink, OnDataReceived(
channel_id, DataMessageType::kText,
::testing::Property<rtc::CopyOnWriteBuffer, const char*>(
&rtc::CopyOnWriteBuffer::cdata, ::testing::StrEq("foo"))));
SendDataParams params;
params.type = DataMessageType::kText;
rtc::CopyOnWriteBuffer buffer("foo");
transport_pair.second_datagram_transport()->SendData(channel_id, params,
buffer);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, CloseDeliveredToSink) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink first_sink;
transport_pair.first_datagram_transport()->SetDataSink(&first_sink);
MockDataChannelSink second_sink;
transport_pair.second_datagram_transport()->SetDataSink(&second_sink);
const int channel_id = 1;
{
::testing::InSequence s;
EXPECT_CALL(second_sink, OnChannelClosing(channel_id));
EXPECT_CALL(second_sink, OnChannelClosed(channel_id));
EXPECT_CALL(first_sink, OnChannelClosed(channel_id));
}
transport_pair.first_datagram_transport()->CloseChannel(channel_id);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
transport_pair.second_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, InitialStateDeliveredWhenCallbackSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockStateCallback state_callback;
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
transport_pair.first_datagram_transport()->SetTransportStateCallback(
&state_callback);
});
transport_pair.FlushAsyncInvokes();
}
TEST(LoopbackMediaTransport, ChangedStateDeliveredWhenCallbackSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
MockStateCallback state_callback;
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
transport_pair.first_datagram_transport()->SetTransportStateCallback(
&state_callback);
});
transport_pair.FlushAsyncInvokes();
}
TEST(LoopbackMediaTransport, StateChangeDeliveredToCallback) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockStateCallback state_callback;
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kPending));
EXPECT_CALL(state_callback, OnStateChanged(MediaTransportState::kWritable));
thread->Invoke<void>(RTC_FROM_HERE, [&transport_pair, &state_callback] {
transport_pair.first_datagram_transport()->SetTransportStateCallback(
&state_callback);
});
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
}
TEST(LoopbackMediaTransport, NotReadyToSendWhenDataSinkSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink data_channel_sink;
EXPECT_CALL(data_channel_sink, OnReadyToSend()).Times(0);
transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, ReadyToSendWhenDataSinkSet) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
MockDataChannelSink data_channel_sink;
EXPECT_CALL(data_channel_sink, OnReadyToSend());
transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
TEST(LoopbackMediaTransport, StateChangeDeliveredToDataSink) {
std::unique_ptr<rtc::Thread> thread = rtc::Thread::Create();
thread->Start();
MediaTransportPair transport_pair(thread.get());
MockDataChannelSink data_channel_sink;
EXPECT_CALL(data_channel_sink, OnReadyToSend());
transport_pair.first_datagram_transport()->SetDataSink(&data_channel_sink);
transport_pair.SetState(MediaTransportState::kWritable);
transport_pair.FlushAsyncInvokes();
transport_pair.first_datagram_transport()->SetDataSink(nullptr);
}
} // namespace webrtc

View File

@ -19,12 +19,9 @@ namespace test {
class MockAudioMixer : public AudioMixer {
public:
MOCK_METHOD(bool, AddSource, (Source * audio_source), (override));
MOCK_METHOD(void, RemoveSource, (Source * audio_source), (override));
MOCK_METHOD(void,
Mix,
(size_t number_of_channels, AudioFrame* audio_frame_for_mixing),
(override));
MOCK_METHOD(bool, AddSource, (Source*), (override));
MOCK_METHOD(void, RemoveSource, (Source*), (override));
MOCK_METHOD(void, Mix, (size_t number_of_channels, AudioFrame*), (override));
};
} // namespace test
} // namespace webrtc

View File

@ -0,0 +1,75 @@
/*
* Copyright 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
#define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_
#include <memory>
#include <string>
#include "api/peer_connection_interface.h"
#include "test/gmock.h"
namespace webrtc {
class MockPeerConnectionFactoryInterface final
: public rtc::RefCountedObject<webrtc::PeerConnectionFactoryInterface> {
public:
rtc::scoped_refptr<MockPeerConnectionFactoryInterface> Create() {
return new MockPeerConnectionFactoryInterface();
}
MOCK_METHOD(void, SetOptions, (const Options&), (override));
MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
CreatePeerConnection,
(const PeerConnectionInterface::RTCConfiguration&,
PeerConnectionDependencies),
(override));
MOCK_METHOD(rtc::scoped_refptr<PeerConnectionInterface>,
CreatePeerConnection,
(const PeerConnectionInterface::RTCConfiguration&,
std::unique_ptr<cricket::PortAllocator>,
std::unique_ptr<rtc::RTCCertificateGeneratorInterface>,
PeerConnectionObserver*),
(override));
MOCK_METHOD(RtpCapabilities,
GetRtpSenderCapabilities,
(cricket::MediaType),
(const override));
MOCK_METHOD(RtpCapabilities,
GetRtpReceiverCapabilities,
(cricket::MediaType),
(const override));
MOCK_METHOD(rtc::scoped_refptr<MediaStreamInterface>,
CreateLocalMediaStream,
(const std::string&),
(override));
MOCK_METHOD(rtc::scoped_refptr<AudioSourceInterface>,
CreateAudioSource,
(const cricket::AudioOptions&),
(override));
MOCK_METHOD(rtc::scoped_refptr<VideoTrackInterface>,
CreateVideoTrack,
(const std::string&, VideoTrackSourceInterface*),
(override));
MOCK_METHOD(rtc::scoped_refptr<AudioTrackInterface>,
CreateAudioTrack,
(const std::string&, AudioSourceInterface*),
(override));
MOCK_METHOD(bool, StartAecDump, (FILE*, int64_t), (override));
MOCK_METHOD(void, StopAecDump, (), (override));
protected:
MockPeerConnectionFactoryInterface() = default;
};
} // namespace webrtc
#endif // API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_

View File

@ -167,7 +167,6 @@ class MockPeerConnectionInterface
(const std::vector<cricket::Candidate>&),
(override));
MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override));
MOCK_METHOD(RTCError, SetBitrate, (const BitrateParameters&), (override));
MOCK_METHOD(void, SetAudioPlayout, (bool), (override));
MOCK_METHOD(void, SetAudioRecording, (bool), (override));
MOCK_METHOD(rtc::scoped_refptr<DtlsTransportInterface>,

View File

@ -0,0 +1,38 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
#define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_
#include <vector>
#include "api/frame_transformer_interface.h"
#include "test/gmock.h"
namespace webrtc {
class MockTransformableVideoFrame
: public webrtc::TransformableVideoFrameInterface {
public:
MOCK_METHOD(rtc::ArrayView<const uint8_t>, GetData, (), (const override));
MOCK_METHOD(void, SetData, (rtc::ArrayView<const uint8_t> data), (override));
MOCK_METHOD(uint32_t, GetTimestamp, (), (const override));
MOCK_METHOD(uint32_t, GetSsrc, (), (const, override));
MOCK_METHOD(bool, IsKeyFrame, (), (const, override));
MOCK_METHOD(std::vector<uint8_t>, GetAdditionalData, (), (const, override));
MOCK_METHOD(const webrtc::VideoFrameMetadata&,
GetMetadata,
(),
(const, override));
};
} // namespace webrtc
#endif // API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_

View File

@ -26,7 +26,7 @@ class MockVideoBitrateAllocatorFactory
CreateVideoBitrateAllocator,
(const VideoCodec&),
(override));
MOCK_METHOD(void, Die, (), ());
MOCK_METHOD(void, Die, ());
};
} // namespace webrtc

View File

@ -27,12 +27,12 @@ class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory {
MOCK_METHOD(std::vector<webrtc::SdpVideoFormat>,
GetSupportedFormats,
(),
(const override));
(const, override));
MOCK_METHOD(std::unique_ptr<webrtc::VideoDecoder>,
CreateVideoDecoder,
(const webrtc::SdpVideoFormat&),
(override));
MOCK_METHOD(void, Die, (), ());
MOCK_METHOD(void, Die, ());
};
} // namespace webrtc

View File

@ -23,8 +23,8 @@ class MockEncodedImageCallback : public EncodedImageCallback {
MOCK_METHOD(Result,
OnEncodedImage,
(const EncodedImage& encodedImage,
const CodecSpecificInfo* codecSpecificInfo,
const RTPFragmentationHeader* fragmentation),
const CodecSpecificInfo*,
const RTPFragmentationHeader*),
(override));
MOCK_METHOD(void, OnDroppedFrame, (DropReason reason), (override));
};
@ -33,31 +33,41 @@ class MockVideoEncoder : public VideoEncoder {
public:
MOCK_METHOD(void,
SetFecControllerOverride,
(FecControllerOverride * fec_controller_override),
(FecControllerOverride*),
(override));
MOCK_METHOD(int32_t,
InitEncode,
(const VideoCodec* codecSettings,
int32_t numberOfCores,
size_t maxPayloadSize),
(const VideoCodec*, int32_t numberOfCores, size_t maxPayloadSize),
(override));
MOCK_METHOD(int32_t,
InitEncode,
(const VideoCodec*, const VideoEncoder::Settings& settings),
(override));
MOCK_METHOD2(InitEncode,
int32_t(const VideoCodec* codecSettings,
const VideoEncoder::Settings& settings));
MOCK_METHOD2(Encode,
int32_t(const VideoFrame& inputImage,
const std::vector<VideoFrameType>* frame_types));
MOCK_METHOD1(RegisterEncodeCompleteCallback,
int32_t(EncodedImageCallback* callback));
MOCK_METHOD0(Release, int32_t());
MOCK_METHOD0(Reset, int32_t());
MOCK_METHOD1(SetRates, void(const RateControlParameters& parameters));
MOCK_METHOD1(OnPacketLossRateUpdate, void(float packet_loss_rate));
MOCK_METHOD1(OnRttUpdate, void(int64_t rtt_ms));
MOCK_METHOD1(OnLossNotification,
void(const LossNotification& loss_notification));
MOCK_CONST_METHOD0(GetEncoderInfo, EncoderInfo(void));
MOCK_METHOD(int32_t,
Encode,
(const VideoFrame& inputImage,
const std::vector<VideoFrameType>*),
(override));
MOCK_METHOD(int32_t,
RegisterEncodeCompleteCallback,
(EncodedImageCallback*),
(override));
MOCK_METHOD(int32_t, Release, (), (override));
MOCK_METHOD(void,
SetRates,
(const RateControlParameters& parameters),
(override));
MOCK_METHOD(void,
OnPacketLossRateUpdate,
(float packet_loss_rate),
(override));
MOCK_METHOD(void, OnRttUpdate, (int64_t rtt_ms), (override));
MOCK_METHOD(void,
OnLossNotification,
(const LossNotification& loss_notification),
(override));
MOCK_METHOD(EncoderInfo, GetEncoderInfo, (), (const, override));
};
} // namespace webrtc

View File

@ -27,17 +27,17 @@ class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory {
MOCK_METHOD(std::vector<SdpVideoFormat>,
GetSupportedFormats,
(),
(const override));
(const, override));
MOCK_METHOD(CodecInfo,
QueryVideoEncoder,
(const SdpVideoFormat&),
(const override));
(const, override));
MOCK_METHOD(std::unique_ptr<VideoEncoder>,
CreateVideoEncoder,
(const SdpVideoFormat&),
(override));
MOCK_METHOD(void, Die, (), ());
MOCK_METHOD(void, Die, ());
};
} // namespace webrtc

View File

@ -23,6 +23,6 @@ rtc_library("network_emulation") {
"../../units:data_rate",
"../../units:data_size",
"../../units:timestamp",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}

View File

@ -10,6 +10,9 @@
#ifndef API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_
#define API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_
#include <map>
#include <vector>
#include "absl/types/optional.h"
#include "api/units/data_rate.h"
#include "api/units/data_size.h"
@ -56,9 +59,7 @@ class EmulatedNetworkReceiverInterface {
virtual void OnPacketReceived(EmulatedIpPacket packet) = 0;
};
struct EmulatedNetworkStats {
int64_t packets_sent = 0;
DataSize bytes_sent = DataSize::Zero();
struct EmulatedNetworkIncomingStats {
// Total amount of packets received with or without destination.
int64_t packets_received = 0;
// Total amount of bytes in received packets.
@ -69,22 +70,122 @@ struct EmulatedNetworkStats {
DataSize bytes_dropped = DataSize::Zero();
DataSize first_received_packet_size = DataSize::Zero();
DataSize first_sent_packet_size = DataSize::Zero();
Timestamp first_packet_sent_time = Timestamp::PlusInfinity();
Timestamp last_packet_sent_time = Timestamp::PlusInfinity();
// Timestamps are initialized to different infinities for simplifying
// computations. Client have to assume that it is some infinite value
// if unset. Client mustn't consider sign of infinit value.
Timestamp first_packet_received_time = Timestamp::PlusInfinity();
Timestamp last_packet_received_time = Timestamp::PlusInfinity();
Timestamp last_packet_received_time = Timestamp::MinusInfinity();
DataRate AverageReceiveRate() const {
RTC_DCHECK_GE(packets_received, 2);
RTC_DCHECK(first_packet_received_time.IsFinite());
RTC_DCHECK(last_packet_received_time.IsFinite());
return (bytes_received - first_received_packet_size) /
(last_packet_received_time - first_packet_received_time);
}
};
struct EmulatedNetworkStats {
int64_t packets_sent = 0;
DataSize bytes_sent = DataSize::Zero();
DataSize first_sent_packet_size = DataSize::Zero();
Timestamp first_packet_sent_time = Timestamp::PlusInfinity();
Timestamp last_packet_sent_time = Timestamp::MinusInfinity();
// List of IP addresses that were used to send data considered in this stats
// object.
std::vector<rtc::IPAddress> local_addresses;
std::map<rtc::IPAddress, EmulatedNetworkIncomingStats>
incoming_stats_per_source;
DataRate AverageSendRate() const {
RTC_DCHECK_GE(packets_sent, 2);
return (bytes_sent - first_sent_packet_size) /
(last_packet_sent_time - first_packet_sent_time);
}
// Total amount of packets received regardless of the destination address.
int64_t PacketsReceived() const {
int64_t packets_received = 0;
for (const auto& incoming_stats : incoming_stats_per_source) {
packets_received += incoming_stats.second.packets_received;
}
return packets_received;
}
// Total amount of bytes in received packets.
DataSize BytesReceived() const {
DataSize bytes_received = DataSize::Zero();
for (const auto& incoming_stats : incoming_stats_per_source) {
bytes_received += incoming_stats.second.bytes_received;
}
return bytes_received;
}
// Total amount of packets that were received, but no destination was found.
int64_t PacketsDropped() const {
int64_t packets_dropped = 0;
for (const auto& incoming_stats : incoming_stats_per_source) {
packets_dropped += incoming_stats.second.packets_dropped;
}
return packets_dropped;
}
// Total amount of bytes in dropped packets.
DataSize BytesDropped() const {
DataSize bytes_dropped = DataSize::Zero();
for (const auto& incoming_stats : incoming_stats_per_source) {
bytes_dropped += incoming_stats.second.bytes_dropped;
}
return bytes_dropped;
}
DataSize FirstReceivedPacketSize() const {
Timestamp first_packet_received_time = Timestamp::PlusInfinity();
DataSize first_received_packet_size = DataSize::Zero();
for (const auto& incoming_stats : incoming_stats_per_source) {
if (first_packet_received_time >
incoming_stats.second.first_packet_received_time) {
first_packet_received_time =
incoming_stats.second.first_packet_received_time;
first_received_packet_size =
incoming_stats.second.first_received_packet_size;
}
}
return first_received_packet_size;
}
Timestamp FirstPacketReceivedTime() const {
Timestamp first_packet_received_time = Timestamp::PlusInfinity();
for (const auto& incoming_stats : incoming_stats_per_source) {
if (first_packet_received_time >
incoming_stats.second.first_packet_received_time) {
first_packet_received_time =
incoming_stats.second.first_packet_received_time;
}
}
return first_packet_received_time;
}
Timestamp LastPacketReceivedTime() const {
Timestamp last_packet_received_time = Timestamp::MinusInfinity();
for (const auto& incoming_stats : incoming_stats_per_source) {
if (last_packet_received_time <
incoming_stats.second.last_packet_received_time) {
last_packet_received_time =
incoming_stats.second.last_packet_received_time;
}
}
return last_packet_received_time;
}
DataRate AverageReceiveRate() const {
RTC_DCHECK_GE(packets_received, 2);
return (bytes_received - first_received_packet_size) /
(last_packet_received_time - first_packet_received_time);
RTC_DCHECK_GE(PacketsReceived(), 2);
return (BytesReceived() - FirstReceivedPacketSize()) /
(LastPacketReceivedTime() - FirstPacketReceivedTime());
}
};

View File

@ -26,13 +26,14 @@
#include "api/media_stream_interface.h"
#include "api/peer_connection_interface.h"
#include "api/rtc_event_log/rtc_event_log_factory_interface.h"
#include "api/rtp_parameters.h"
#include "api/task_queue/task_queue_factory.h"
#include "api/test/audio_quality_analyzer_interface.h"
#include "api/test/frame_generator_interface.h"
#include "api/test/simulated_network.h"
#include "api/test/stats_observer_interface.h"
#include "api/test/track_id_stream_info_map.h"
#include "api/test/video_quality_analyzer_interface.h"
#include "api/transport/media/media_transport_interface.h"
#include "api/transport/network_control.h"
#include "api/units/time_delta.h"
#include "api/video_codecs/video_decoder_factory.h"
@ -53,6 +54,12 @@ constexpr size_t kDefaultSlidesHeight = 1110;
// API is in development. Can be changed/removed without notice.
class PeerConnectionE2EQualityTestFixture {
public:
// The index of required capturing device in OS provided list of video
// devices. On Linux and Windows the list will be obtained via
// webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via
// [RTCCameraVideoCapturer captureDevices].
enum class CapturingDeviceIndex : size_t {};
// Contains parameters for screen share scrolling.
//
// If scrolling is enabled, then it will be done by putting sliding window
@ -116,8 +123,6 @@ class PeerConnectionE2EQualityTestFixture {
std::vector<std::string> slides_yuv_file_names;
};
enum VideoGeneratorType { kDefault, kI420A, kI010 };
// Config for Vp8 simulcast or Vp9 SVC testing.
//
// SVC support is limited:
@ -160,6 +165,14 @@ class PeerConnectionE2EQualityTestFixture {
// It requires Selective Forwarding Unit (SFU) to be configured in the
// network.
absl::optional<int> target_spatial_index;
// Encoding parameters per simulcast layer. If not empty, |encoding_params|
// size have to be equal to |simulcast_streams_count|. Will be used to set
// transceiver send encoding params for simulcast layers. Applicable only
// for codecs that support simulcast (ex. Vp8) and will be ignored
// otherwise. RtpEncodingParameters::rid may be changed by fixture
// implementation to ensure signaling correctness.
std::vector<RtpEncodingParameters> encoding_params;
};
// Contains properties of single video stream.
@ -178,12 +191,6 @@ class PeerConnectionE2EQualityTestFixture {
// Will be set for current video track. If equals to kText or kDetailed -
// screencast in on.
absl::optional<VideoTrackInterface::ContentHint> content_hint;
// If specified this capturing device will be used to get input video. The
// |capturing_device_index| is the index of required capturing device in OS
// provided list of video devices. On Linux and Windows the list will be
// obtained via webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via
// [RTCCameraVideoCapturer captureDevices].
absl::optional<size_t> capturing_device_index;
// If presented video will be transfered in simulcast/SVC mode depending on
// which encoder is used.
//
@ -222,8 +229,7 @@ class PeerConnectionE2EQualityTestFixture {
bool show_on_screen = false;
// If specified, determines a sync group to which this video stream belongs.
// According to bugs.webrtc.org/4762 WebRTC supports synchronization only
// for pair of single audio and single video stream. Framework won't do any
// enforcements on this field.
// for pair of single audio and single video stream.
absl::optional<std::string> sync_group;
};
@ -250,8 +256,7 @@ class PeerConnectionE2EQualityTestFixture {
int sampling_frequency_in_hz = 48000;
// If specified, determines a sync group to which this audio stream belongs.
// According to bugs.webrtc.org/4762 WebRTC supports synchronization only
// for pair of single audio and single video stream. Framework won't do any
// enforcements on this field.
// for pair of single audio and single video stream.
absl::optional<std::string> sync_group;
};
@ -280,8 +285,6 @@ class PeerConnectionE2EQualityTestFixture {
virtual PeerConfigurer* SetNetworkControllerFactory(
std::unique_ptr<NetworkControllerFactoryInterface>
network_controller_factory) = 0;
virtual PeerConfigurer* SetMediaTransportFactory(
std::unique_ptr<MediaTransportFactory> media_transport_factory) = 0;
virtual PeerConfigurer* SetVideoEncoderFactory(
std::unique_ptr<VideoEncoderFactory> video_encoder_factory) = 0;
virtual PeerConfigurer* SetVideoDecoderFactory(
@ -312,6 +315,11 @@ class PeerConnectionE2EQualityTestFixture {
virtual PeerConfigurer* AddVideoConfig(
VideoConfig config,
std::unique_ptr<test::FrameGeneratorInterface> generator) = 0;
// Add new video stream to the call that will be sent from this peer.
// Capturing device with specified index will be used to get input video.
virtual PeerConfigurer* AddVideoConfig(
VideoConfig config,
CapturingDeviceIndex capturing_device_index) = 0;
// Set the audio stream for the call from this peer. If this method won't
// be invoked, this peer will send no audio.
virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0;
@ -325,8 +333,8 @@ class PeerConnectionE2EQualityTestFixture {
PeerConnectionInterface::RTCConfiguration configuration) = 0;
// Set bitrate parameters on PeerConnection. This constraints will be
// applied to all summed RTP streams for this peer.
virtual PeerConfigurer* SetBitrateParameters(
PeerConnectionInterface::BitrateParameters bitrate_params) = 0;
virtual PeerConfigurer* SetBitrateSettings(
BitrateSettings bitrate_settings) = 0;
};
// Contains configuration for echo emulator.
@ -400,7 +408,14 @@ class PeerConnectionE2EQualityTestFixture {
// Invoked by framework after peer connection factory and peer connection
// itself will be created but before offer/answer exchange will be started.
virtual void Start(absl::string_view test_case_name) = 0;
// |test_case_name| is name of test case, that should be used to report all
// metrics.
// |reporter_helper| is a pointer to a class that will allow track_id to
// stream_id matching. The caller is responsible for ensuring the
// TrackIdStreamInfoMap will be valid from Start() to
// StopAndReportResults().
virtual void Start(absl::string_view test_case_name,
const TrackIdStreamInfoMap* reporter_helper) = 0;
// Invoked by framework after call is ended and peer connection factory and
// peer connection are destroyed.
@ -436,6 +451,12 @@ class PeerConnectionE2EQualityTestFixture {
virtual void AddPeer(rtc::Thread* network_thread,
rtc::NetworkManager* network_manager,
rtc::FunctionView<void(PeerConfigurer*)> configurer) = 0;
// Runs the media quality test, which includes setting up the call with
// configured participants, running it according to provided |run_params| and
// terminating it properly at the end. During call duration media quality
// metrics are gathered, which are then reported to stdout and (if configured)
// to the json/protobuf output file through the WebRTC perf test results
// reporting system.
virtual void Run(RunParams run_params) = 0;
// Returns real test duration - the time of test execution measured during

View File

@ -19,7 +19,6 @@
#include <vector>
#include "absl/types/optional.h"
#include "rtc_base/critical_section.h"
#include "rtc_base/random.h"
#include "rtc_base/thread_annotations.h"

View File

@ -11,9 +11,8 @@
#ifndef API_TEST_STATS_OBSERVER_INTERFACE_H_
#define API_TEST_STATS_OBSERVER_INTERFACE_H_
#include <string>
#include "api/stats_types.h"
#include "absl/strings/string_view.h"
#include "api/stats/rtc_stats_report.h"
namespace webrtc {
namespace webrtc_pc_e2e {
@ -25,8 +24,9 @@ class StatsObserverInterface {
// Method called when stats reports are available for the PeerConnection
// identified by |pc_label|.
virtual void OnStatsReports(const std::string& pc_label,
const StatsReports& reports) = 0;
virtual void OnStatsReports(
absl::string_view pc_label,
const rtc::scoped_refptr<const RTCStatsReport>& report) = 0;
};
} // namespace webrtc_pc_e2e

View File

@ -14,22 +14,24 @@
#include <utility>
#include "rtc_base/checks.h"
#include "rtc_base/thread_checker.h"
#include "rtc_base/platform_thread_types.h"
namespace webrtc {
namespace {
// This checks everything in this file gets called on the same thread. It's
// static because it needs to look at the static methods too.
rtc::ThreadChecker* GetThreadChecker() {
static rtc::ThreadChecker checker;
return &checker;
bool IsValidTestDependencyFactoryThread() {
const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef();
return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef());
}
} // namespace
std::unique_ptr<TestDependencyFactory> TestDependencyFactory::instance_ =
nullptr;
const TestDependencyFactory& TestDependencyFactory::GetInstance() {
RTC_DCHECK(GetThreadChecker()->IsCurrent());
RTC_DCHECK(IsValidTestDependencyFactoryThread());
if (instance_ == nullptr) {
instance_ = std::make_unique<TestDependencyFactory>();
}
@ -38,14 +40,14 @@ const TestDependencyFactory& TestDependencyFactory::GetInstance() {
void TestDependencyFactory::SetInstance(
std::unique_ptr<TestDependencyFactory> instance) {
RTC_DCHECK(GetThreadChecker()->IsCurrent());
RTC_DCHECK(IsValidTestDependencyFactoryThread());
RTC_CHECK(instance_ == nullptr);
instance_ = std::move(instance);
}
std::unique_ptr<VideoQualityTestFixtureInterface::InjectionComponents>
TestDependencyFactory::CreateComponents() const {
RTC_DCHECK(GetThreadChecker()->IsCurrent());
RTC_DCHECK(IsValidTestDependencyFactoryThread());
return nullptr;
}

View File

@ -46,6 +46,7 @@ class TimeController {
const char* thread_name) = 0;
// Creates an rtc::Thread instance. If |socket_server| is nullptr, a default
// noop socket server is created.
// Returned thread is not null and started.
virtual std::unique_ptr<rtc::Thread> CreateThread(
const std::string& name,
std::unique_ptr<rtc::SocketServer> socket_server = nullptr) = 0;
@ -59,6 +60,8 @@ class TimeController {
// Waits until condition() == true, polling condition() in small time
// intervals.
// Returns true if condition() was evaluated to true before |max_duration|
// elapsed and false otherwise.
bool Wait(const std::function<bool()>& condition,
TimeDelta max_duration = TimeDelta::Seconds(5));
};

View File

@ -0,0 +1,42 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_TRACK_ID_STREAM_INFO_MAP_H_
#define API_TEST_TRACK_ID_STREAM_INFO_MAP_H_
#include "absl/strings/string_view.h"
namespace webrtc {
namespace webrtc_pc_e2e {
// Instances of |TrackIdStreamInfoMap| provide bookkeeping capabilities that
// are useful to associate stats reports track_ids to the remote stream info.
class TrackIdStreamInfoMap {
public:
virtual ~TrackIdStreamInfoMap() = default;
// These methods must be called on the same thread where
// StatsObserverInterface::OnStatsReports is invoked.
// Returns a reference to a stream label owned by the TrackIdStreamInfoMap.
// Precondition: |track_id| must be already mapped to stream label.
virtual absl::string_view GetStreamLabelFromTrackId(
absl::string_view track_id) const = 0;
// Returns a reference to a sync group name owned by the TrackIdStreamInfoMap.
// Precondition: |track_id| must be already mapped to sync group.
virtual absl::string_view GetSyncGroupLabelFromTrackId(
absl::string_view track_id) const = 0;
};
} // namespace webrtc_pc_e2e
} // namespace webrtc
#endif // API_TEST_TRACK_ID_STREAM_INFO_MAP_H_

View File

@ -1,36 +0,0 @@
/*
* Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_
#define API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_
#include <string>
namespace webrtc {
namespace webrtc_pc_e2e {
// Instances of |TrackIdStreamLabelMap| provide bookkeeping capabilities that
// are useful to associate stats reports track_ids to the remote stream_id.
class TrackIdStreamLabelMap {
public:
virtual ~TrackIdStreamLabelMap() = default;
// This method must be called on the same thread where
// StatsObserverInterface::OnStatsReports is invoked.
// Returns a reference to a stream label owned by the TrackIdStreamLabelMap.
// Precondition: |track_id| must be already mapped to a stream_label.
virtual const std::string& GetStreamLabelFromTrackId(
const std::string& track_id) const = 0;
};
} // namespace webrtc_pc_e2e
} // namespace webrtc
#endif // API_TEST_TRACK_ID_STREAM_LABEL_MAP_H_

View File

@ -14,7 +14,9 @@
#include <memory>
#include <string>
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/test/stats_observer_interface.h"
#include "api/video/encoded_image.h"
#include "api/video/video_frame.h"
@ -76,42 +78,65 @@ class VideoQualityAnalyzerInterface : public StatsObserverInterface {
// calculations. Analyzer can perform simple calculations on the calling
// thread in each method, but should remember, that it is the same thread,
// that is used in video pipeline.
virtual void Start(std::string test_case_name, int max_threads_count) {}
virtual void Start(std::string test_case_name,
rtc::ArrayView<const std::string> peer_names,
int max_threads_count) {}
// Will be called when frame was generated from the input stream.
// |peer_name| is name of the peer on which side frame was captured.
// Returns frame id, that will be set by framework to the frame.
virtual uint16_t OnFrameCaptured(const std::string& stream_label,
virtual uint16_t OnFrameCaptured(absl::string_view peer_name,
const std::string& stream_label,
const VideoFrame& frame) = 0;
// Will be called before calling the encoder.
virtual void OnFramePreEncode(const VideoFrame& frame) {}
// |peer_name| is name of the peer on which side frame came to encoder.
virtual void OnFramePreEncode(absl::string_view peer_name,
const VideoFrame& frame) {}
// Will be called for each EncodedImage received from encoder. Single
// VideoFrame can produce multiple EncodedImages. Each encoded image will
// have id from VideoFrame.
virtual void OnFrameEncoded(uint16_t frame_id,
// |peer_name| is name of the peer on which side frame was encoded.
virtual void OnFrameEncoded(absl::string_view peer_name,
uint16_t frame_id,
const EncodedImage& encoded_image,
const EncoderStats& stats) {}
// Will be called for each frame dropped by encoder.
virtual void OnFrameDropped(EncodedImageCallback::DropReason reason) {}
// |peer_name| is name of the peer on which side frame drop was detected.
virtual void OnFrameDropped(absl::string_view peer_name,
EncodedImageCallback::DropReason reason) {}
// Will be called before calling the decoder.
virtual void OnFramePreDecode(uint16_t frame_id,
// |peer_name| is name of the peer on which side frame was received.
virtual void OnFramePreDecode(absl::string_view peer_name,
uint16_t frame_id,
const EncodedImage& encoded_image) {}
// Will be called after decoding the frame.
virtual void OnFrameDecoded(const VideoFrame& frame,
// |peer_name| is name of the peer on which side frame was decoded.
virtual void OnFrameDecoded(absl::string_view peer_name,
const VideoFrame& frame,
const DecoderStats& stats) {}
// Will be called when frame will be obtained from PeerConnection stack.
virtual void OnFrameRendered(const VideoFrame& frame) {}
// |peer_name| is name of the peer on which side frame was rendered.
virtual void OnFrameRendered(absl::string_view peer_name,
const VideoFrame& frame) {}
// Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
virtual void OnEncoderError(const VideoFrame& frame, int32_t error_code) {}
// |peer_name| is name of the peer on which side error acquired.
virtual void OnEncoderError(absl::string_view peer_name,
const VideoFrame& frame,
int32_t error_code) {}
// Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK.
// All available codes are listed in
// modules/video_coding/include/video_error_codes.h
virtual void OnDecoderError(uint16_t frame_id, int32_t error_code) {}
// |peer_name| is name of the peer on which side error acquired.
virtual void OnDecoderError(absl::string_view peer_name,
uint16_t frame_id,
int32_t error_code) {}
// Will be called every time new stats reports are available for the
// Peer Connection identified by |pc_label|.
void OnStatsReports(const std::string& pc_label,
const StatsReports& stats_reports) override {}
void OnStatsReports(
absl::string_view pc_label,
const rtc::scoped_refptr<const RTCStatsReport>& report) override {}
// Tells analyzer that analysis complete and it should calculate final
// statistics.

View File

@ -22,6 +22,7 @@
#include "api/test/simulated_network.h"
#include "api/transport/bitrate_settings.h"
#include "api/transport/network_control.h"
#include "api/video_codecs/sdp_video_format.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_config.h"
#include "api/video_codecs/video_encoder_factory.h"
@ -31,60 +32,56 @@ namespace webrtc {
class VideoQualityTestFixtureInterface {
public:
// Parameters are grouped into smaller structs to make it easier to set
// the desired elements and skip unused, using aggregate initialization.
// Unfortunately, C++11 (as opposed to C11) doesn't support unnamed structs,
// which makes the implementation of VideoQualityTest a bit uglier.
// the desired elements and skip unused.
struct Params {
Params();
~Params();
struct CallConfig {
bool send_side_bwe;
bool generic_descriptor;
bool send_side_bwe = false;
bool generic_descriptor = false;
BitrateConstraints call_bitrate_config;
int num_thumbnails;
int num_thumbnails = 0;
// Indicates if secondary_(video|ss|screenshare) structures are used.
bool dual_video;
bool dual_video = false;
} call;
struct Video {
bool enabled;
size_t width;
size_t height;
int32_t fps;
int min_bitrate_bps;
int target_bitrate_bps;
int max_bitrate_bps;
bool suspend_below_min_bitrate;
std::string codec;
int num_temporal_layers;
int selected_tl;
int min_transmit_bps;
bool ulpfec;
bool flexfec;
bool automatic_scaling;
bool enabled = false;
size_t width = 640;
size_t height = 480;
int32_t fps = 30;
int min_bitrate_bps = 50;
int target_bitrate_bps = 800;
int max_bitrate_bps = 800;
bool suspend_below_min_bitrate = false;
std::string codec = "VP8";
int num_temporal_layers = 1;
int selected_tl = -1;
int min_transmit_bps = 0;
bool ulpfec = false;
bool flexfec = false;
bool automatic_scaling = false;
std::string clip_path; // "Generator" to generate frames instead.
size_t capture_device_index;
size_t capture_device_index = 0;
SdpVideoFormat::Parameters sdp_params;
double encoder_overshoot_factor;
double encoder_overshoot_factor = 0.0;
} video[2];
struct Audio {
bool enabled;
bool sync_video;
bool dtx;
bool use_real_adm;
bool enabled = false;
bool sync_video = false;
bool dtx = false;
bool use_real_adm = false;
absl::optional<std::string> ana_config;
} audio;
struct Screenshare {
bool enabled;
bool generate_slides;
int32_t slide_change_interval;
int32_t scroll_duration;
bool enabled = false;
bool generate_slides = false;
int32_t slide_change_interval = 10;
int32_t scroll_duration = 0;
std::vector<std::string> slides;
} screenshare[2];
struct Analyzer {
std::string test_label;
double avg_psnr_threshold; // (*)
double avg_ssim_threshold; // (*)
int test_durations_secs;
double avg_psnr_threshold = 0.0; // (*)
double avg_ssim_threshold = 0.0; // (*)
int test_durations_secs = 0;
std::string graph_data_output_filename;
std::string graph_title;
} analyzer;
@ -95,14 +92,14 @@ class VideoQualityTestFixtureInterface {
absl::optional<BuiltInNetworkBehaviorConfig> config;
struct SS { // Spatial scalability.
std::vector<VideoStream> streams; // If empty, one stream is assumed.
size_t selected_stream;
int num_spatial_layers;
int selected_sl;
InterLayerPredMode inter_layer_pred;
size_t selected_stream = 0;
int num_spatial_layers = 0;
int selected_sl = -1;
InterLayerPredMode inter_layer_pred = InterLayerPredMode::kOn;
// If empty, bitrates are generated in VP9Impl automatically.
std::vector<SpatialLayer> spatial_layers;
// If set, default parameters will be used instead of |streams|.
bool infer_streams;
bool infer_streams = false;
} ss[2];
struct Logging {
std::string rtc_event_log_name;

View File

@ -14,10 +14,8 @@ rtc_library("bitrate_settings") {
"bitrate_settings.cc",
"bitrate_settings.h",
]
deps = [
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/types:optional",
]
deps = [ "../../rtc_base/system:rtc_export" ]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("enums") {
@ -41,6 +39,8 @@ rtc_library("network_control") {
"../units:data_size",
"../units:time_delta",
"../units:timestamp",
]
absl_deps = [
"//third_party/abseil-cpp/absl/base:core_headers",
"//third_party/abseil-cpp/absl/types:optional",
]
@ -49,10 +49,8 @@ rtc_library("network_control") {
rtc_source_set("webrtc_key_value_config") {
visibility = [ "*" ]
sources = [ "webrtc_key_value_config.h" ]
deps = [
"../../rtc_base/system:rtc_export",
"//third_party/abseil-cpp/absl/strings",
]
deps = [ "../../rtc_base/system:rtc_export" ]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
rtc_library("field_trial_based_config") {
@ -64,26 +62,20 @@ rtc_library("field_trial_based_config") {
deps = [
":webrtc_key_value_config",
"../../system_wrappers:field_trial",
"//third_party/abseil-cpp/absl/strings",
]
absl_deps = [ "//third_party/abseil-cpp/absl/strings" ]
}
# TODO(nisse): Rename?
rtc_source_set("datagram_transport_interface") {
visibility = [ "*" ]
sources = [
"congestion_control_interface.h",
"data_channel_transport_interface.h",
"datagram_transport_interface.h",
]
sources = [ "data_channel_transport_interface.h" ]
deps = [
":network_control",
"..:array_view",
"..:rtc_error",
"../../rtc_base:rtc_base_approved",
"../units:data_rate",
"../units:timestamp",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_library("goog_cc") {

View File

@ -1,75 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media and datagram transports.
#ifndef API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
#define API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_
#include <memory>
#include <string>
#include <utility>
#include "api/transport/network_control.h"
#include "api/units/data_rate.h"
namespace webrtc {
// TODO(nisse): Defined together with MediaTransportInterface. But we should use
// types that aren't tied to media, so that MediaTransportInterface can depend
// on CongestionControlInterface, but not the other way around.
// api/transport/network_control.h may be a reasonable place.
class MediaTransportRttObserver;
struct MediaTransportAllocatedBitrateLimits;
struct MediaTransportTargetRateConstraints;
// Defines congestion control feedback interface for media and datagram
// transports.
class CongestionControlInterface {
public:
virtual ~CongestionControlInterface() = default;
// Updates allocation limits.
virtual void SetAllocatedBitrateLimits(
const MediaTransportAllocatedBitrateLimits& limits) = 0;
// Sets starting rate.
virtual void SetTargetBitrateLimits(
const MediaTransportTargetRateConstraints& target_rate_constraints) = 0;
// Intended for receive side. AddRttObserver registers an observer to be
// called for each RTT measurement, typically once per ACK. Before media
// transport is destructed the observer must be unregistered.
//
// TODO(sukhanov): Looks like AddRttObserver and RemoveRttObserver were
// never implemented for media transport, so keeping noop implementation.
virtual void AddRttObserver(MediaTransportRttObserver* observer) {}
virtual void RemoveRttObserver(MediaTransportRttObserver* observer) {}
// Adds a target bitrate observer. Before media transport is destructed
// the observer must be unregistered (by calling
// RemoveTargetTransferRateObserver).
// A newly registered observer will be called back with the latest recorded
// target rate, if available.
virtual void AddTargetTransferRateObserver(
TargetTransferRateObserver* observer) = 0;
// Removes an existing |observer| from observers. If observer was never
// registered, an error is logged and method does nothing.
virtual void RemoveTargetTransferRateObserver(
TargetTransferRateObserver* observer) = 0;
// Returns the last known target transfer rate as reported to the above
// observers.
virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate() = 0;
};
} // namespace webrtc
#endif // API_TRANSPORT_CONGESTION_CONTROL_INTERFACE_H_

View File

@ -35,8 +35,8 @@ enum class DataMessageType {
// sent reliably and in-order, even if the data channel is configured for
// unreliable delivery.
struct SendDataParams {
SendDataParams();
SendDataParams(const SendDataParams&);
SendDataParams() = default;
SendDataParams(const SendDataParams&) = default;
DataMessageType type = DataMessageType::kText;

View File

@ -1,151 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media and datagram transports.
#ifndef API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
#define API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_
#include <memory>
#include <string>
#include <utility>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/rtc_error.h"
#include "api/transport/congestion_control_interface.h"
#include "api/transport/data_channel_transport_interface.h"
#include "api/units/data_rate.h"
#include "api/units/timestamp.h"
namespace rtc {
class PacketTransportInternal;
} // namespace rtc
namespace webrtc {
class MediaTransportStateCallback;
typedef int64_t DatagramId;
struct DatagramAck {
// |datagram_id| is same as passed in
// DatagramTransportInterface::SendDatagram.
DatagramId datagram_id;
// The timestamp at which the remote peer received the identified datagram,
// according to that peer's clock.
Timestamp receive_timestamp = Timestamp::MinusInfinity();
};
// All sink methods are called on network thread.
class DatagramSinkInterface {
public:
virtual ~DatagramSinkInterface() {}
// Called when new packet is received.
virtual void OnDatagramReceived(rtc::ArrayView<const uint8_t> data) = 0;
// Called when datagram is actually sent (datragram can be delayed due
// to congestion control or fusing). |datagram_id| is same as passed in
// DatagramTransportInterface::SendDatagram.
virtual void OnDatagramSent(DatagramId datagram_id) = 0;
// Called when datagram is ACKed.
virtual void OnDatagramAcked(const DatagramAck& datagram_ack) = 0;
// Called when a datagram is lost.
virtual void OnDatagramLost(DatagramId datagram_id) = 0;
};
// Datagram transport allows to send and receive unreliable packets (datagrams)
// and receive feedback from congestion control (via
// CongestionControlInterface). The idea is to send RTP packets as datagrams and
// have underlying implementation of datagram transport to use QUIC datagram
// protocol.
class DatagramTransportInterface : public DataChannelTransportInterface {
public:
virtual ~DatagramTransportInterface() = default;
// Connect the datagram transport to the ICE transport.
// The implementation must be able to ignore incoming packets that don't
// belong to it.
virtual void Connect(rtc::PacketTransportInternal* packet_transport) = 0;
// Returns congestion control feedback interface or nullptr if datagram
// transport does not implement congestion control.
//
// Note that right now datagram transport is used without congestion control,
// but we plan to use it in the future.
virtual CongestionControlInterface* congestion_control() = 0;
// Sets a state observer callback. Before datagram transport is destroyed, the
// callback must be unregistered by setting it to nullptr.
// A newly registered callback will be called with the current state.
// Datagram transport does not invoke this callback concurrently.
virtual void SetTransportStateCallback(
MediaTransportStateCallback* callback) = 0;
// Start asynchronous send of datagram. The status returned by this method
// only pertains to the synchronous operations (e.g. serialization /
// packetization), not to the asynchronous operation.
//
// Datagrams larger than GetLargestDatagramSize() will fail and return error.
//
// Datagrams are sent in FIFO order.
//
// |datagram_id| is only used in ACK/LOST notifications in
// DatagramSinkInterface and does not need to be unique.
virtual RTCError SendDatagram(rtc::ArrayView<const uint8_t> data,
DatagramId datagram_id) = 0;
// Returns maximum size of datagram message, does not change.
// TODO(sukhanov): Because value may be undefined before connection setup
// is complete, consider returning error when called before connection is
// established. Currently returns hardcoded const, because integration
// prototype may call before connection is established.
virtual size_t GetLargestDatagramSize() const = 0;
// Sets packet sink. Sink must be unset by calling
// SetDataTransportSink(nullptr) before the data transport is destroyed or
// before new sink is set.
virtual void SetDatagramSink(DatagramSinkInterface* sink) = 0;
// Retrieves transport parameters for this datagram transport. May be called
// on either client- or server-perspective transports.
//
// For servers, the parameters represent what kind of connections and data the
// server is prepared to accept. This is generally a superset of acceptable
// parameters.
//
// For clients, the parameters echo the server configuration used to create
// the client, possibly removing any fields or parameters which the client
// does not understand.
virtual std::string GetTransportParameters() const = 0;
// Sets remote transport parameters. |remote_params| is a serialized string
// of opaque parameters, understood by the datagram transport implementation.
// Returns an error if |remote_params| are not compatible with this transport.
//
// TODO(mellem): Make pure virtual. The default implementation maintains
// original negotiation behavior (negotiation falls back to RTP if the
// remote datagram transport fails to echo exactly the local parameters).
virtual RTCError SetRemoteTransportParameters(
absl::string_view remote_params) {
if (remote_params == GetTransportParameters()) {
return RTCError::OK();
}
return RTCError(RTCErrorType::UNSUPPORTED_PARAMETER,
"Local and remote transport parameters do not match");
}
};
} // namespace webrtc
#endif // API_TRANSPORT_DATAGRAM_TRANSPORT_INTERFACE_H_

View File

@ -1,52 +0,0 @@
# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../../../webrtc.gni")
rtc_library("media_transport_interface") {
visibility = [ "*" ]
sources = [
"media_transport_config.cc",
"media_transport_config.h",
"media_transport_interface.cc",
"media_transport_interface.h",
]
deps = [
":audio_interfaces",
":video_interfaces",
"..:datagram_transport_interface",
"..:network_control",
"../..:array_view",
"../..:rtc_error",
"../../..:webrtc_common",
"../../../rtc_base",
"../../../rtc_base:checks",
"../../../rtc_base:rtc_base_approved",
"../../../rtc_base:stringutils",
"../../units:data_rate",
"//third_party/abseil-cpp/absl/types:optional",
]
}
rtc_library("audio_interfaces") {
visibility = [ "*" ]
sources = [
"audio_transport.cc",
"audio_transport.h",
]
deps = [ "../..:array_view" ]
}
rtc_library("video_interfaces") {
visibility = [ "*" ]
sources = [
"video_transport.cc",
"video_transport.h",
]
deps = [ "../../video:encoded_image" ]
}

View File

@ -1,54 +0,0 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#include "api/transport/media/audio_transport.h"
#include <utility>
namespace webrtc {
MediaTransportEncodedAudioFrame::~MediaTransportEncodedAudioFrame() {}
MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
int sampling_rate_hz,
int starting_sample_index,
int samples_per_channel,
int sequence_number,
FrameType frame_type,
int payload_type,
std::vector<uint8_t> encoded_data)
: sampling_rate_hz_(sampling_rate_hz),
starting_sample_index_(starting_sample_index),
samples_per_channel_(samples_per_channel),
sequence_number_(sequence_number),
frame_type_(frame_type),
payload_type_(payload_type),
encoded_data_(std::move(encoded_data)) {}
MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
const MediaTransportEncodedAudioFrame&) = default;
MediaTransportEncodedAudioFrame& MediaTransportEncodedAudioFrame::operator=(
MediaTransportEncodedAudioFrame&&) = default;
MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
const MediaTransportEncodedAudioFrame&) = default;
MediaTransportEncodedAudioFrame::MediaTransportEncodedAudioFrame(
MediaTransportEncodedAudioFrame&&) = default;
} // namespace webrtc

View File

@ -1,120 +0,0 @@
/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#ifndef API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
#define API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_
#include <vector>
#include "api/array_view.h"
namespace webrtc {
// Represents encoded audio frame in any encoding (type of encoding is opaque).
// To avoid copying of encoded data use move semantics when passing by value.
class MediaTransportEncodedAudioFrame final {
public:
enum class FrameType {
// Normal audio frame (equivalent to webrtc::kAudioFrameSpeech).
kSpeech,
// DTX frame (equivalent to webrtc::kAudioFrameCN).
kDiscontinuousTransmission,
// TODO(nisse): Mis-spelled version, update users, then delete.
kDiscountinuousTransmission = kDiscontinuousTransmission,
};
MediaTransportEncodedAudioFrame(
// Audio sampling rate, for example 48000.
int sampling_rate_hz,
// Starting sample index of the frame, i.e. how many audio samples were
// before this frame since the beginning of the call or beginning of time
// in one channel (the starting point should not matter for NetEq). In
// WebRTC it is used as a timestamp of the frame.
// TODO(sukhanov): Starting_sample_index is currently adjusted on the
// receiver side in RTP path. Non-RTP implementations should preserve it.
// For NetEq initial offset should not matter so we should consider fixing
// RTP path.
int starting_sample_index,
// Number of audio samples in audio frame in 1 channel.
int samples_per_channel,
// Sequence number of the frame in the order sent, it is currently
// required by NetEq, but we can fix NetEq, because starting_sample_index
// should be enough.
int sequence_number,
// If audio frame is a speech or discontinued transmission.
FrameType frame_type,
// Opaque payload type. In RTP codepath payload type is stored in RTP
// header. In other implementations it should be simply passed through the
// wire -- it's needed for decoder.
int payload_type,
// Vector with opaque encoded data.
std::vector<uint8_t> encoded_data);
~MediaTransportEncodedAudioFrame();
MediaTransportEncodedAudioFrame(const MediaTransportEncodedAudioFrame&);
MediaTransportEncodedAudioFrame& operator=(
const MediaTransportEncodedAudioFrame& other);
MediaTransportEncodedAudioFrame& operator=(
MediaTransportEncodedAudioFrame&& other);
MediaTransportEncodedAudioFrame(MediaTransportEncodedAudioFrame&&);
// Getters.
int sampling_rate_hz() const { return sampling_rate_hz_; }
int starting_sample_index() const { return starting_sample_index_; }
int samples_per_channel() const { return samples_per_channel_; }
int sequence_number() const { return sequence_number_; }
int payload_type() const { return payload_type_; }
FrameType frame_type() const { return frame_type_; }
rtc::ArrayView<const uint8_t> encoded_data() const { return encoded_data_; }
private:
int sampling_rate_hz_;
int starting_sample_index_;
int samples_per_channel_;
// TODO(sukhanov): Refactor NetEq so we don't need sequence number.
// Having sample_index and samples_per_channel should be enough.
int sequence_number_;
FrameType frame_type_;
int payload_type_;
std::vector<uint8_t> encoded_data_;
};
// Interface for receiving encoded audio frames from MediaTransportInterface
// implementations.
class MediaTransportAudioSinkInterface {
public:
virtual ~MediaTransportAudioSinkInterface() = default;
// Called when new encoded audio frame is received.
virtual void OnData(uint64_t channel_id,
MediaTransportEncodedAudioFrame frame) = 0;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_AUDIO_TRANSPORT_H_

View File

@ -1,29 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/transport/media/media_transport_config.h"
#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
MediaTransportConfig::MediaTransportConfig(size_t rtp_max_packet_size)
: rtp_max_packet_size(rtp_max_packet_size) {
RTC_DCHECK_GT(rtp_max_packet_size, 0);
}
std::string MediaTransportConfig::DebugString() const {
rtc::StringBuilder result;
result << "{rtp_max_packet_size: " << rtp_max_packet_size.value_or(0) << "}";
return result.Release();
}
} // namespace webrtc

View File

@ -1,38 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_
#include <memory>
#include <string>
#include <utility>
#include "absl/types/optional.h"
namespace webrtc {
// Media transport config is made available to both transport and audio / video
// layers, but access to individual interfaces should not be open without
// necessity.
struct MediaTransportConfig {
// Default constructor for no-media transport scenarios.
MediaTransportConfig() = default;
// Constructor for datagram transport scenarios.
explicit MediaTransportConfig(size_t rtp_max_packet_size);
std::string DebugString() const;
// If provided, limits RTP packet size (excludes ICE, IP or network overhead).
absl::optional<size_t> rtp_max_packet_size;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_CONFIG_H_

View File

@ -1,108 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#include "api/transport/media/media_transport_interface.h"
#include <cstdint>
#include <utility>
#include "api/transport/datagram_transport_interface.h"
namespace webrtc {
MediaTransportSettings::MediaTransportSettings() = default;
MediaTransportSettings::MediaTransportSettings(const MediaTransportSettings&) =
default;
MediaTransportSettings& MediaTransportSettings::operator=(
const MediaTransportSettings&) = default;
MediaTransportSettings::~MediaTransportSettings() = default;
SendDataParams::SendDataParams() = default;
SendDataParams::SendDataParams(const SendDataParams&) = default;
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
MediaTransportFactory::CreateMediaTransport(
rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return std::unique_ptr<MediaTransportInterface>(nullptr);
}
RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
MediaTransportFactory::CreateMediaTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return std::unique_ptr<MediaTransportInterface>(nullptr);
}
RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
MediaTransportFactory::CreateDatagramTransport(
rtc::Thread* network_thread,
const MediaTransportSettings& settings) {
return std::unique_ptr<DatagramTransportInterface>(nullptr);
}
std::string MediaTransportFactory::GetTransportName() const {
return "";
}
MediaTransportInterface::MediaTransportInterface() = default;
MediaTransportInterface::~MediaTransportInterface() = default;
absl::optional<std::string>
MediaTransportInterface::GetTransportParametersOffer() const {
return absl::nullopt;
}
void MediaTransportInterface::Connect(
rtc::PacketTransportInternal* packet_transport) {}
void MediaTransportInterface::SetKeyFrameRequestCallback(
MediaTransportKeyFrameRequestCallback* callback) {}
absl::optional<TargetTransferRate>
MediaTransportInterface::GetLatestTargetTransferRate() {
return absl::nullopt;
}
void MediaTransportInterface::AddNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback) {}
void MediaTransportInterface::RemoveNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback) {}
void MediaTransportInterface::SetFirstAudioPacketReceivedObserver(
AudioPacketReceivedObserver* observer) {}
void MediaTransportInterface::AddTargetTransferRateObserver(
TargetTransferRateObserver* observer) {}
void MediaTransportInterface::RemoveTargetTransferRateObserver(
TargetTransferRateObserver* observer) {}
void MediaTransportInterface::AddRttObserver(
MediaTransportRttObserver* observer) {}
void MediaTransportInterface::RemoveRttObserver(
MediaTransportRttObserver* observer) {}
size_t MediaTransportInterface::GetAudioPacketOverhead() const {
return 0;
}
void MediaTransportInterface::SetAllocatedBitrateLimits(
const MediaTransportAllocatedBitrateLimits& limits) {}
} // namespace webrtc

View File

@ -1,320 +0,0 @@
/* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#ifndef API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
#define API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_
#include <memory>
#include <string>
#include <utility>
#include "absl/types/optional.h"
#include "api/array_view.h"
#include "api/rtc_error.h"
#include "api/transport/data_channel_transport_interface.h"
#include "api/transport/media/audio_transport.h"
#include "api/transport/media/video_transport.h"
#include "api/transport/network_control.h"
#include "api/units/data_rate.h"
#include "rtc_base/copy_on_write_buffer.h"
#include "rtc_base/network_route.h"
namespace rtc {
class PacketTransportInternal;
class Thread;
} // namespace rtc
namespace webrtc {
class DatagramTransportInterface;
class RtcEventLog;
class AudioPacketReceivedObserver {
public:
virtual ~AudioPacketReceivedObserver() = default;
// Invoked for the first received audio packet on a given channel id.
// It will be invoked once for each channel id.
virtual void OnFirstAudioPacketReceived(int64_t channel_id) = 0;
};
// Used to configure stream allocations.
struct MediaTransportAllocatedBitrateLimits {
DataRate min_pacing_rate = DataRate::Zero();
DataRate max_padding_bitrate = DataRate::Zero();
DataRate max_total_allocated_bitrate = DataRate::Zero();
};
// Used to configure target bitrate constraints.
// If the value is provided, the constraint is updated.
// If the value is omitted, the value is left unchanged.
struct MediaTransportTargetRateConstraints {
absl::optional<DataRate> min_bitrate;
absl::optional<DataRate> max_bitrate;
absl::optional<DataRate> starting_bitrate;
};
// A collection of settings for creation of media transport.
struct MediaTransportSettings final {
MediaTransportSettings();
MediaTransportSettings(const MediaTransportSettings&);
MediaTransportSettings& operator=(const MediaTransportSettings&);
~MediaTransportSettings();
// Group calls are not currently supported, in 1:1 call one side must set
// is_caller = true and another is_caller = false.
bool is_caller;
// Must be set if a pre-shared key is used for the call.
// TODO(bugs.webrtc.org/9944): This should become zero buffer in the distant
// future.
absl::optional<std::string> pre_shared_key;
// If present, this is a config passed from the caller to the answerer in the
// offer. Each media transport knows how to understand its own parameters.
absl::optional<std::string> remote_transport_parameters;
// If present, provides the event log that media transport should use.
// Media transport does not own it. The lifetime of |event_log| will exceed
// the lifetime of the instance of MediaTransportInterface instance.
RtcEventLog* event_log = nullptr;
};
// Callback to notify about network route changes.
class MediaTransportNetworkChangeCallback {
public:
virtual ~MediaTransportNetworkChangeCallback() = default;
// Called when the network route is changed, with the new network route.
virtual void OnNetworkRouteChanged(
const rtc::NetworkRoute& new_network_route) = 0;
};
// State of the media transport. Media transport begins in the pending state.
// It transitions to writable when it is ready to send media. It may transition
// back to pending if the connection is blocked. It may transition to closed at
// any time. Closed is terminal: a transport will never re-open once closed.
enum class MediaTransportState {
kPending,
kWritable,
kClosed,
};
// Callback invoked whenever the state of the media transport changes.
class MediaTransportStateCallback {
public:
virtual ~MediaTransportStateCallback() = default;
// Invoked whenever the state of the media transport changes.
virtual void OnStateChanged(MediaTransportState state) = 0;
};
// Callback for RTT measurements on the receive side.
// TODO(nisse): Related interfaces: CallStatsObserver and RtcpRttStats. It's
// somewhat unclear what type of measurement is needed. It's used to configure
// NACK generation and playout buffer. Either raw measurement values or recent
// maximum would make sense for this use. Need consolidation of RTT signalling.
class MediaTransportRttObserver {
public:
virtual ~MediaTransportRttObserver() = default;
// Invoked when a new RTT measurement is available, typically once per ACK.
virtual void OnRttUpdated(int64_t rtt_ms) = 0;
};
// Media transport interface for sending / receiving encoded audio/video frames
// and receiving bandwidth estimate update from congestion control.
class MediaTransportInterface : public DataChannelTransportInterface {
public:
MediaTransportInterface();
virtual ~MediaTransportInterface();
// Retrieves callers config (i.e. media transport offer) that should be passed
// to the callee, before the call is connected. Such config is opaque to SDP
// (sdp just passes it through). The config is a binary blob, so SDP may
// choose to use base64 to serialize it (or any other approach that guarantees
// that the binary blob goes through). This should only be called for the
// caller's perspective.
//
// This may return an unset optional, which means that the given media
// transport is not supported / disabled and shouldn't be reported in SDP.
//
// It may also return an empty string, in which case the media transport is
// supported, but without any extra settings.
// TODO(psla): Make abstract.
virtual absl::optional<std::string> GetTransportParametersOffer() const;
// Connect the media transport to the ICE transport.
// The implementation must be able to ignore incoming packets that don't
// belong to it.
// TODO(psla): Make abstract.
virtual void Connect(rtc::PacketTransportInternal* packet_transport);
// Start asynchronous send of audio frame. The status returned by this method
// only pertains to the synchronous operations (e.g.
// serialization/packetization), not to the asynchronous operation.
virtual RTCError SendAudioFrame(uint64_t channel_id,
MediaTransportEncodedAudioFrame frame) = 0;
// Start asynchronous send of video frame. The status returned by this method
// only pertains to the synchronous operations (e.g.
// serialization/packetization), not to the asynchronous operation.
virtual RTCError SendVideoFrame(
uint64_t channel_id,
const MediaTransportEncodedVideoFrame& frame) = 0;
// Used by video sender to be notified on key frame requests.
virtual void SetKeyFrameRequestCallback(
MediaTransportKeyFrameRequestCallback* callback);
// Requests a keyframe for the particular channel (stream). The caller should
// check that the keyframe is not present in a jitter buffer already (i.e.
// don't request a keyframe if there is one that you will get from the jitter
// buffer in a moment).
virtual RTCError RequestKeyFrame(uint64_t channel_id) = 0;
// Sets audio sink. Sink must be unset by calling SetReceiveAudioSink(nullptr)
// before the media transport is destroyed or before new sink is set.
virtual void SetReceiveAudioSink(MediaTransportAudioSinkInterface* sink) = 0;
// Registers a video sink. Before destruction of media transport, you must
// pass a nullptr.
virtual void SetReceiveVideoSink(MediaTransportVideoSinkInterface* sink) = 0;
// Adds a target bitrate observer. Before media transport is destructed
// the observer must be unregistered (by calling
// RemoveTargetTransferRateObserver).
// A newly registered observer will be called back with the latest recorded
// target rate, if available.
virtual void AddTargetTransferRateObserver(
TargetTransferRateObserver* observer);
// Removes an existing |observer| from observers. If observer was never
// registered, an error is logged and method does nothing.
virtual void RemoveTargetTransferRateObserver(
TargetTransferRateObserver* observer);
// Sets audio packets observer, which gets informed about incoming audio
// packets. Before destruction, the observer must be unregistered by setting
// nullptr.
//
// This method may be temporary, when the multiplexer is implemented (or
// multiplexer may use it to demultiplex channel ids).
virtual void SetFirstAudioPacketReceivedObserver(
AudioPacketReceivedObserver* observer);
// Intended for receive side. AddRttObserver registers an observer to be
// called for each RTT measurement, typically once per ACK. Before media
// transport is destructed the observer must be unregistered.
virtual void AddRttObserver(MediaTransportRttObserver* observer);
virtual void RemoveRttObserver(MediaTransportRttObserver* observer);
// Returns the last known target transfer rate as reported to the above
// observers.
virtual absl::optional<TargetTransferRate> GetLatestTargetTransferRate();
// Gets the audio packet overhead in bytes. Returned overhead does not include
// transport overhead (ipv4/6, turn channeldata, tcp/udp, etc.).
// If the transport is capable of fusing packets together, this overhead
// might not be a very accurate number.
// TODO(nisse): Deprecated.
virtual size_t GetAudioPacketOverhead() const;
// Corresponding observers for audio and video overhead. Before destruction,
// the observers must be unregistered by setting nullptr.
// Registers an observer for network change events. If the network route is
// already established when the callback is added, |callback| will be called
// immediately with the current network route. Before media transport is
// destroyed, the callback must be removed.
virtual void AddNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback);
virtual void RemoveNetworkChangeCallback(
MediaTransportNetworkChangeCallback* callback);
// Sets a state observer callback. Before media transport is destroyed, the
// callback must be unregistered by setting it to nullptr.
// A newly registered callback will be called with the current state.
// Media transport does not invoke this callback concurrently.
virtual void SetMediaTransportStateCallback(
MediaTransportStateCallback* callback) = 0;
// Updates allocation limits.
// TODO(psla): Make abstract when downstream implementation implement it.
virtual void SetAllocatedBitrateLimits(
const MediaTransportAllocatedBitrateLimits& limits);
// Sets starting rate.
// TODO(psla): Make abstract when downstream implementation implement it.
virtual void SetTargetBitrateLimits(
const MediaTransportTargetRateConstraints& target_rate_constraints) {}
// TODO(sukhanov): RtcEventLogs.
};
// If media transport factory is set in peer connection factory, it will be
// used to create media transport for sending/receiving encoded frames and
// this transport will be used instead of default RTP/SRTP transport.
//
// Currently Media Transport negotiation is not supported in SDP.
// If application is using media transport, it must negotiate it before
// setting media transport factory in peer connection.
class MediaTransportFactory {
public:
virtual ~MediaTransportFactory() = default;
// Creates media transport.
// - Does not take ownership of packet_transport or network_thread.
// - Does not support group calls, in 1:1 call one side must set
// is_caller = true and another is_caller = false.
virtual RTCErrorOr<std::unique_ptr<MediaTransportInterface>>
CreateMediaTransport(rtc::PacketTransportInternal* packet_transport,
rtc::Thread* network_thread,
const MediaTransportSettings& settings);
// Creates a new Media Transport in a disconnected state. If the media
// transport for the caller is created, one can then call
// MediaTransportInterface::GetTransportParametersOffer on that new instance.
// TODO(psla): Make abstract.
virtual RTCErrorOr<std::unique_ptr<webrtc::MediaTransportInterface>>
CreateMediaTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings);
// Creates a new Datagram Transport in a disconnected state. If the datagram
// transport for the caller is created, one can then call
// DatagramTransportInterface::GetTransportParametersOffer on that new
// instance.
//
// TODO(sukhanov): Consider separating media and datagram transport factories.
// TODO(sukhanov): Move factory to a separate .h file.
virtual RTCErrorOr<std::unique_ptr<DatagramTransportInterface>>
CreateDatagramTransport(rtc::Thread* network_thread,
const MediaTransportSettings& settings);
// Gets a transport name which is supported by the implementation.
// Different factories should return different transport names, and at runtime
// it will be checked that different names were used.
// For example, "rtp" or "generic" may be returned by two different
// implementations.
// The value returned by this method must never change in the lifetime of the
// factory.
// TODO(psla): Make abstract.
virtual std::string GetTransportName() const;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_MEDIA_TRANSPORT_INTERFACE_H_

View File

@ -1,56 +0,0 @@
/*
* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#include "api/transport/media/video_transport.h"
#include <utility>
namespace webrtc {
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame() = default;
MediaTransportEncodedVideoFrame::~MediaTransportEncodedVideoFrame() = default;
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
int64_t frame_id,
std::vector<int64_t> referenced_frame_ids,
int payload_type,
const webrtc::EncodedImage& encoded_image)
: payload_type_(payload_type),
encoded_image_(encoded_image),
frame_id_(frame_id),
referenced_frame_ids_(std::move(referenced_frame_ids)) {}
MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
const MediaTransportEncodedVideoFrame&) = default;
MediaTransportEncodedVideoFrame& MediaTransportEncodedVideoFrame::operator=(
MediaTransportEncodedVideoFrame&&) = default;
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
const MediaTransportEncodedVideoFrame& o)
: MediaTransportEncodedVideoFrame() {
*this = o;
}
MediaTransportEncodedVideoFrame::MediaTransportEncodedVideoFrame(
MediaTransportEncodedVideoFrame&& o)
: MediaTransportEncodedVideoFrame() {
*this = std::move(o);
}
} // namespace webrtc

View File

@ -1,101 +0,0 @@
/* Copyright 2019 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This is EXPERIMENTAL interface for media transport.
//
// The goal is to refactor WebRTC code so that audio and video frames
// are sent / received through the media transport interface. This will
// enable different media transport implementations, including QUIC-based
// media transport.
#ifndef API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
#define API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_
#include <vector>
#include "api/video/encoded_image.h"
namespace webrtc {
// Represents encoded video frame, along with the codec information.
class MediaTransportEncodedVideoFrame final {
public:
MediaTransportEncodedVideoFrame(int64_t frame_id,
std::vector<int64_t> referenced_frame_ids,
int payload_type,
const webrtc::EncodedImage& encoded_image);
~MediaTransportEncodedVideoFrame();
MediaTransportEncodedVideoFrame(const MediaTransportEncodedVideoFrame&);
MediaTransportEncodedVideoFrame& operator=(
const MediaTransportEncodedVideoFrame& other);
MediaTransportEncodedVideoFrame& operator=(
MediaTransportEncodedVideoFrame&& other);
MediaTransportEncodedVideoFrame(MediaTransportEncodedVideoFrame&&);
int payload_type() const { return payload_type_; }
const webrtc::EncodedImage& encoded_image() const { return encoded_image_; }
int64_t frame_id() const { return frame_id_; }
const std::vector<int64_t>& referenced_frame_ids() const {
return referenced_frame_ids_;
}
// Hack to workaround lack of ownership of the EncodedImage buffer. If we
// don't already own the underlying data, make a copy.
void Retain() { encoded_image_.Retain(); }
private:
MediaTransportEncodedVideoFrame();
int payload_type_;
// The buffer is not always owned by the encoded image. On the sender it means
// that it will need to make a copy using the Retain() method, if it wants to
// deliver it asynchronously.
webrtc::EncodedImage encoded_image_;
// Frame id uniquely identifies a frame in a stream. It needs to be unique in
// a given time window (i.e. technically unique identifier for the lifetime of
// the connection is not needed, but you need to guarantee that remote side
// got rid of the previous frame_id if you plan to reuse it).
//
// It is required by a remote jitter buffer, and is the same as
// EncodedFrame::id::picture_id.
//
// This data must be opaque to the media transport, and media transport should
// itself not make any assumptions about what it is and its uniqueness.
int64_t frame_id_;
// A single frame might depend on other frames. This is set of identifiers on
// which the current frame depends.
std::vector<int64_t> referenced_frame_ids_;
};
// Interface for receiving encoded video frames from MediaTransportInterface
// implementations.
class MediaTransportVideoSinkInterface {
public:
virtual ~MediaTransportVideoSinkInterface() = default;
// Called when new encoded video frame is received.
virtual void OnData(uint64_t channel_id,
MediaTransportEncodedVideoFrame frame) = 0;
};
// Interface for video sender to be notified of received key frame request.
class MediaTransportKeyFrameRequestCallback {
public:
virtual ~MediaTransportKeyFrameRequestCallback() = default;
// Called when a key frame request is received on the transport.
virtual void OnKeyFrameRequested(uint64_t channel_id) = 0;
};
} // namespace webrtc
#endif // API_TRANSPORT_MEDIA_VIDEO_TRANSPORT_H_

View File

@ -14,15 +14,20 @@ rtc_source_set("rtp_source") {
deps = [
"../../../api:rtp_headers",
"../../../rtc_base:checks",
"//third_party/abseil-cpp/absl/types:optional",
]
absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ]
}
rtc_source_set("dependency_descriptor") {
visibility = [ "*" ]
sources = [ "dependency_descriptor.h" ]
deps = [
sources = [
"dependency_descriptor.cc",
"dependency_descriptor.h",
]
deps = [ "../../../rtc_base:checks" ]
absl_deps = [
"//third_party/abseil-cpp/absl/container:inlined_vector",
"//third_party/abseil-cpp/absl/strings",
"//third_party/abseil-cpp/absl/types:optional",
]
}

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2020 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "api/transport/rtp/dependency_descriptor.h"
#include "absl/container/inlined_vector.h"
#include "absl/strings/string_view.h"
#include "rtc_base/checks.h"
namespace webrtc {
constexpr int DependencyDescriptor::kMaxSpatialIds;
constexpr int DependencyDescriptor::kMaxTemporalIds;
constexpr int DependencyDescriptor::kMaxTemplates;
constexpr int DependencyDescriptor::kMaxDecodeTargets;
namespace webrtc_impl {
absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
absl::string_view symbols) {
absl::InlinedVector<DecodeTargetIndication, 10> dtis;
dtis.reserve(symbols.size());
for (char symbol : symbols) {
DecodeTargetIndication indication;
switch (symbol) {
case '-':
indication = DecodeTargetIndication::kNotPresent;
break;
case 'D':
indication = DecodeTargetIndication::kDiscardable;
break;
case 'R':
indication = DecodeTargetIndication::kRequired;
break;
case 'S':
indication = DecodeTargetIndication::kSwitch;
break;
default:
RTC_NOTREACHED();
}
dtis.push_back(indication);
}
return dtis;
}
} // namespace webrtc_impl
} // namespace webrtc

View File

@ -13,10 +13,12 @@
#include <stdint.h>
#include <initializer_list>
#include <memory>
#include <vector>
#include "absl/container/inlined_vector.h"
#include "absl/strings/string_view.h"
#include "absl/types/optional.h"
namespace webrtc {
@ -52,6 +54,13 @@ enum class DecodeTargetIndication {
};
struct FrameDependencyTemplate {
// Setters are named briefly to chain them when building the template.
FrameDependencyTemplate& S(int spatial_layer);
FrameDependencyTemplate& T(int temporal_layer);
FrameDependencyTemplate& Dtis(absl::string_view dtis);
FrameDependencyTemplate& FrameDiffs(std::initializer_list<int> diffs);
FrameDependencyTemplate& ChainDiffs(std::initializer_list<int> diffs);
friend bool operator==(const FrameDependencyTemplate& lhs,
const FrameDependencyTemplate& rhs) {
return lhs.spatial_id == rhs.spatial_id &&
@ -82,14 +91,18 @@ struct FrameDependencyStructure {
int num_decode_targets = 0;
int num_chains = 0;
// If chains are used (num_chains > 0), maps decode target index into index of
// the chain protecting that target or |num_chains| value if decode target is
// not protected by a chain.
// the chain protecting that target.
absl::InlinedVector<int, 10> decode_target_protected_by_chain;
absl::InlinedVector<RenderResolution, 4> resolutions;
std::vector<FrameDependencyTemplate> templates;
};
struct DependencyDescriptor {
static constexpr int kMaxSpatialIds = 4;
static constexpr int kMaxTemporalIds = 8;
static constexpr int kMaxDecodeTargets = 32;
static constexpr int kMaxTemplates = 64;
bool first_packet_in_frame = true;
bool last_packet_in_frame = true;
int frame_number = 0;
@ -99,6 +112,37 @@ struct DependencyDescriptor {
std::unique_ptr<FrameDependencyStructure> attached_structure;
};
// Below are implementation details.
namespace webrtc_impl {
absl::InlinedVector<DecodeTargetIndication, 10> StringToDecodeTargetIndications(
absl::string_view indication_symbols);
} // namespace webrtc_impl
inline FrameDependencyTemplate& FrameDependencyTemplate::S(int spatial_layer) {
this->spatial_id = spatial_layer;
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::T(int temporal_layer) {
this->temporal_id = temporal_layer;
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::Dtis(
absl::string_view dtis) {
this->decode_target_indications =
webrtc_impl::StringToDecodeTargetIndications(dtis);
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::FrameDiffs(
std::initializer_list<int> diffs) {
this->frame_diffs.assign(diffs.begin(), diffs.end());
return *this;
}
inline FrameDependencyTemplate& FrameDependencyTemplate::ChainDiffs(
std::initializer_list<int> diffs) {
this->chain_diffs.assign(diffs.begin(), diffs.end());
return *this;
}
} // namespace webrtc
#endif // API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_

View File

@ -18,11 +18,16 @@ namespace webrtc {
class MockNetworkStateEstimator : public NetworkStateEstimator {
public:
MOCK_METHOD0(GetCurrentEstimate, absl::optional<NetworkStateEstimate>());
MOCK_METHOD1(OnTransportPacketsFeedback,
void(const TransportPacketsFeedback&));
MOCK_METHOD1(OnReceivedPacket, void(const PacketResult&));
MOCK_METHOD1(OnRouteChange, void(const NetworkRouteChange&));
MOCK_METHOD(absl::optional<NetworkStateEstimate>,
GetCurrentEstimate,
(),
(override));
MOCK_METHOD(void,
OnTransportPacketsFeedback,
(const TransportPacketsFeedback&),
(override));
MOCK_METHOD(void, OnReceivedPacket, (const PacketResult&), (override));
MOCK_METHOD(void, OnRouteChange, (const NetworkRouteChange&), (override));
};
} // namespace webrtc

Some files were not shown because too many files have changed in this diff Show More