Remove ortc folder.

Since it is currently unused and not actively maintained, code under
ortc/ will be deleted by this CL.

Bug: webrtc:9824
Change-Id: I20f890b1a1e5e1dbd2b3949af916ae0a6bc8a032
Reviewed-on: https://webrtc-review.googlesource.com/c/102601
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Steve Anton <steveanton@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#25082}
This commit is contained in:
Mirko Bonadei
2018-10-04 15:54:14 +02:00
committed by Commit Bot
parent 88b68ace17
commit 2558c4e938
27 changed files with 0 additions and 6125 deletions

View File

@ -54,7 +54,6 @@ if (!build_with_chromium) {
"modules/remote_bitrate_estimator:bwe_simulations_tests",
"modules/rtp_rtcp:test_packet_masks_metrics",
"modules/video_capture:video_capture_internal_impl",
"ortc:ortc_unittests",
"pc:peerconnection_unittests",
"pc:rtc_pc_unittests",
"rtc_base:rtc_base_tests_utils",
@ -382,7 +381,6 @@ if (!build_with_chromium) {
"media",
"modules",
"modules/video_capture:video_capture_internal_impl",
"ortc",
"rtc_base",
"sdk",
"video",

View File

@ -1,108 +0,0 @@
# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
import("../webrtc.gni")
if (is_android) {
import("//build/config/android/config.gni")
import("//build/config/android/rules.gni")
}
rtc_static_library("ortc") {
defines = []
sources = [
"ortcfactory.cc",
"ortcfactory.h",
"ortcrtpreceiveradapter.cc",
"ortcrtpreceiveradapter.h",
"ortcrtpsenderadapter.cc",
"ortcrtpsenderadapter.h",
"rtptransportadapter.cc",
"rtptransportadapter.h",
"rtptransportcontrolleradapter.cc",
"rtptransportcontrolleradapter.h",
]
# TODO(deadbeef): Create a separate target for the common things ORTC and
# PeerConnection code shares, so that ortc can depend on that instead of
# libjingle_peerconnection.
deps = [
"../api:libjingle_peerconnection_api",
"../api:ortc_api",
"../api/video_codecs:builtin_video_decoder_factory",
"../api/video_codecs:builtin_video_encoder_factory",
"../call:call_interfaces",
"../call:rtp_sender",
"../logging:rtc_event_log_api",
"../logging:rtc_event_log_impl_base",
"../media:rtc_audio_video",
"../media:rtc_media",
"../media:rtc_media_base",
"../modules/audio_processing:audio_processing",
"../p2p:rtc_p2p",
"../pc:libjingle_peerconnection",
"../pc:peerconnection",
"../pc:rtc_pc",
"../pc:rtc_pc_base",
"../rtc_base:checks",
"../rtc_base:rtc_base",
"../rtc_base:rtc_base_approved",
"../rtc_base/third_party/sigslot",
"//third_party/abseil-cpp/absl/memory",
"//third_party/abseil-cpp/absl/types:optional",
]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
}
if (rtc_include_tests) {
rtc_test("ortc_unittests") {
testonly = true
sources = [
"ortcfactory_integrationtest.cc",
"ortcfactory_unittest.cc",
"ortcrtpreceiver_unittest.cc",
"ortcrtpsender_unittest.cc",
"rtptransport_unittest.cc",
"rtptransportcontroller_unittest.cc",
"srtptransport_unittest.cc",
"testrtpparameters.cc",
"testrtpparameters.h",
]
deps = [
":ortc",
"../api:libjingle_peerconnection_api",
"../api:ortc_api",
"../api/audio_codecs:builtin_audio_decoder_factory",
"../api/audio_codecs:builtin_audio_encoder_factory",
"../media:rtc_media_tests_utils",
"../p2p:p2p_test_utils",
"../p2p:rtc_p2p",
"../pc:pc_test_utils",
"../pc:peerconnection",
"../rtc_base:rtc_base",
"../rtc_base:rtc_base_approved",
"../rtc_base:rtc_base_tests_main",
"../rtc_base:rtc_base_tests_utils",
"../rtc_base/system:arch",
]
if (!build_with_chromium && is_clang) {
# Suppress warnings from the Chromium Clang plugin (bugs.webrtc.org/163).
suppressed_configs += [ "//build/config/clang:find_bad_constructs" ]
}
if (is_android) {
deps += [ "//testing/android/native_test:native_test_support" ]
}
}
}

View File

@ -1,17 +0,0 @@
include_rules = [
"+api",
"+call",
"+logging/rtc_event_log",
"+media",
"+modules/audio_coding",
"+modules/audio_processing",
"+p2p",
"+pc",
"+modules/rtp_rtcp",
"+system_wrappers",
"+modules/audio_device",
"+modules/video_coding",
"+modules/video_render",
]

View File

@ -1,7 +0,0 @@
pthatcher@webrtc.org
steveanton@webrtc.org
# These are for the common case of adding or renaming files. If you're doing
# structural changes, please get a review from a reviewer in this file.
per-file *.gn=*
per-file *.gni=*

View File

@ -1,563 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "ortc/ortcfactory.h"
#include <utility> // For std::move.
#include <vector>
#include "absl/memory/memory.h"
#include "api/mediastreamtrackproxy.h"
#include "api/proxy.h"
#include "api/rtcerror.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "api/videosourceproxy.h"
#include "logging/rtc_event_log/rtc_event_log.h"
#include "media/base/mediaconstants.h"
#include "media/base/rtpdataengine.h"
#include "modules/audio_processing/include/audio_processing.h"
#include "ortc/ortcrtpreceiveradapter.h"
#include "ortc/ortcrtpsenderadapter.h"
#include "ortc/rtptransportadapter.h"
#include "ortc/rtptransportcontrolleradapter.h"
#include "p2p/base/basicpacketsocketfactory.h"
#include "p2p/base/udptransport.h"
#include "pc/audiotrack.h"
#include "pc/channelmanager.h"
#include "pc/localaudiosource.h"
#include "pc/rtpparametersconversion.h"
#include "pc/videotrack.h"
#include "rtc_base/asyncpacketsocket.h"
#include "rtc_base/bind.h"
#include "rtc_base/checks.h"
#include "rtc_base/helpers.h"
#include "rtc_base/logging.h"
#include "rtc_base/strings/string_builder.h"
namespace {
const int kDefaultRtcpCnameLength = 16;
// Asserts that all of the built-in capabilities can be converted to
// RtpCapabilities. If they can't, something's wrong (for example, maybe a new
// feedback mechanism is supported, but an enum value wasn't added to
// rtpparameters.h).
template <typename C>
webrtc::RtpCapabilities ToRtpCapabilitiesWithAsserts(
const std::vector<C>& cricket_codecs,
const cricket::RtpHeaderExtensions& cricket_extensions) {
webrtc::RtpCapabilities capabilities =
webrtc::ToRtpCapabilities(cricket_codecs, cricket_extensions);
RTC_DCHECK_EQ(capabilities.codecs.size(), cricket_codecs.size());
for (size_t i = 0; i < capabilities.codecs.size(); ++i) {
RTC_DCHECK_EQ(capabilities.codecs[i].rtcp_feedback.size(),
cricket_codecs[i].feedback_params.params().size());
}
RTC_DCHECK_EQ(capabilities.header_extensions.size(),
cricket_extensions.size());
return capabilities;
}
} // namespace
namespace webrtc {
// Note that this proxy class uses the network thread as the "worker" thread.
BEGIN_OWNED_PROXY_MAP(OrtcFactory)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_METHOD0(RTCErrorOr<std::unique_ptr<RtpTransportControllerInterface>>,
CreateRtpTransportController)
PROXY_METHOD4(RTCErrorOr<std::unique_ptr<RtpTransportInterface>>,
CreateRtpTransport,
const RtpTransportParameters&,
PacketTransportInterface*,
PacketTransportInterface*,
RtpTransportControllerInterface*)
PROXY_METHOD4(RTCErrorOr<std::unique_ptr<SrtpTransportInterface>>,
CreateSrtpTransport,
const RtpTransportParameters&,
PacketTransportInterface*,
PacketTransportInterface*,
RtpTransportControllerInterface*)
PROXY_CONSTMETHOD1(RtpCapabilities,
GetRtpSenderCapabilities,
cricket::MediaType)
PROXY_METHOD2(RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>>,
CreateRtpSender,
rtc::scoped_refptr<MediaStreamTrackInterface>,
RtpTransportInterface*)
PROXY_METHOD2(RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>>,
CreateRtpSender,
cricket::MediaType,
RtpTransportInterface*)
PROXY_CONSTMETHOD1(RtpCapabilities,
GetRtpReceiverCapabilities,
cricket::MediaType)
PROXY_METHOD2(RTCErrorOr<std::unique_ptr<OrtcRtpReceiverInterface>>,
CreateRtpReceiver,
cricket::MediaType,
RtpTransportInterface*)
PROXY_WORKER_METHOD3(RTCErrorOr<std::unique_ptr<UdpTransportInterface>>,
CreateUdpTransport,
int,
uint16_t,
uint16_t)
PROXY_METHOD1(rtc::scoped_refptr<AudioSourceInterface>,
CreateAudioSource,
const cricket::AudioOptions&)
PROXY_METHOD2(rtc::scoped_refptr<VideoTrackInterface>,
CreateVideoTrack,
const std::string&,
VideoTrackSourceInterface*)
PROXY_METHOD2(rtc::scoped_refptr<AudioTrackInterface>,
CreateAudioTrack,
const std::string&,
AudioSourceInterface*)
END_PROXY_MAP()
// static
RTCErrorOr<std::unique_ptr<OrtcFactoryInterface>> OrtcFactory::Create(
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
rtc::NetworkManager* network_manager,
rtc::PacketSocketFactory* socket_factory,
AudioDeviceModule* adm,
std::unique_ptr<cricket::MediaEngineInterface> media_engine,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory) {
// Hop to signaling thread if needed.
if (signaling_thread && !signaling_thread->IsCurrent()) {
return signaling_thread
->Invoke<RTCErrorOr<std::unique_ptr<OrtcFactoryInterface>>>(
RTC_FROM_HERE,
rtc::Bind(&OrtcFactory::Create_s, network_thread, signaling_thread,
network_manager, socket_factory, adm,
media_engine.release(), audio_encoder_factory,
audio_decoder_factory));
}
return Create_s(network_thread, signaling_thread, network_manager,
socket_factory, adm, media_engine.release(),
audio_encoder_factory, audio_decoder_factory);
}
RTCErrorOr<std::unique_ptr<OrtcFactoryInterface>> OrtcFactoryInterface::Create(
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
rtc::NetworkManager* network_manager,
rtc::PacketSocketFactory* socket_factory,
AudioDeviceModule* adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory) {
return OrtcFactory::Create(network_thread, signaling_thread, network_manager,
socket_factory, adm, nullptr,
audio_encoder_factory, audio_decoder_factory);
}
OrtcFactory::OrtcFactory(
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
rtc::NetworkManager* network_manager,
rtc::PacketSocketFactory* socket_factory,
AudioDeviceModule* adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory)
: network_thread_(network_thread),
signaling_thread_(signaling_thread),
network_manager_(network_manager),
socket_factory_(socket_factory),
adm_(adm),
null_event_log_(RtcEventLog::CreateNull()),
audio_encoder_factory_(audio_encoder_factory),
audio_decoder_factory_(audio_decoder_factory) {
if (!rtc::CreateRandomString(kDefaultRtcpCnameLength, &default_cname_)) {
RTC_LOG(LS_ERROR) << "Failed to generate CNAME?";
RTC_NOTREACHED();
}
if (!network_thread_) {
owned_network_thread_ = rtc::Thread::CreateWithSocketServer();
owned_network_thread_->Start();
network_thread_ = owned_network_thread_.get();
}
// The worker thread is created internally because it's an implementation
// detail, and consumers of the API don't need to really know about it.
worker_thread_ = rtc::Thread::Create();
worker_thread_->SetName("ORTC-worker", this);
worker_thread_->Start();
if (signaling_thread_) {
RTC_DCHECK_RUN_ON(signaling_thread_);
} else {
signaling_thread_ = rtc::Thread::Current();
if (!signaling_thread_) {
// If this thread isn't already wrapped by an rtc::Thread, create a
// wrapper and own it in this class.
signaling_thread_ = rtc::ThreadManager::Instance()->WrapCurrentThread();
wraps_signaling_thread_ = true;
}
}
if (signaling_thread_->name().empty()) {
signaling_thread_->SetName("ORTC-signaling", this);
}
if (!network_manager_) {
owned_network_manager_.reset(new rtc::BasicNetworkManager());
network_manager_ = owned_network_manager_.get();
}
if (!socket_factory_) {
owned_socket_factory_.reset(
new rtc::BasicPacketSocketFactory(network_thread_));
socket_factory_ = owned_socket_factory_.get();
}
}
OrtcFactory::~OrtcFactory() {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (wraps_signaling_thread_) {
rtc::ThreadManager::Instance()->UnwrapCurrentThread();
}
}
RTCErrorOr<std::unique_ptr<RtpTransportControllerInterface>>
OrtcFactory::CreateRtpTransportController() {
RTC_DCHECK_RUN_ON(signaling_thread_);
return RtpTransportControllerAdapter::CreateProxied(
cricket::MediaConfig(), channel_manager_.get(), null_event_log_.get(),
signaling_thread_, worker_thread_.get(), network_thread_);
}
RTCErrorOr<std::unique_ptr<RtpTransportInterface>>
OrtcFactory::CreateRtpTransport(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerInterface* transport_controller) {
RTC_DCHECK_RUN_ON(signaling_thread_);
RtpTransportParameters copied_parameters = parameters;
if (copied_parameters.rtcp.cname.empty()) {
copied_parameters.rtcp.cname = default_cname_;
}
if (transport_controller) {
return transport_controller->GetInternal()->CreateProxiedRtpTransport(
copied_parameters, rtp, rtcp);
} else {
// If |transport_controller| is null, create one automatically, which the
// returned RtpTransport will own.
auto controller_result = CreateRtpTransportController();
if (!controller_result.ok()) {
return controller_result.MoveError();
}
auto controller = controller_result.MoveValue();
auto transport_result =
controller->GetInternal()->CreateProxiedRtpTransport(copied_parameters,
rtp, rtcp);
// If RtpTransport was successfully created, transfer ownership of
// |rtp_transport_controller|. Otherwise it will go out of scope and be
// deleted automatically.
if (transport_result.ok()) {
transport_result.value()
->GetInternal()
->TakeOwnershipOfRtpTransportController(std::move(controller));
}
return transport_result;
}
}
RTCErrorOr<std::unique_ptr<SrtpTransportInterface>>
OrtcFactory::CreateSrtpTransport(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerInterface* transport_controller) {
RTC_DCHECK_RUN_ON(signaling_thread_);
RtpTransportParameters copied_parameters = parameters;
if (copied_parameters.rtcp.cname.empty()) {
copied_parameters.rtcp.cname = default_cname_;
}
if (transport_controller) {
return transport_controller->GetInternal()->CreateProxiedSrtpTransport(
copied_parameters, rtp, rtcp);
} else {
// If |transport_controller| is null, create one automatically, which the
// returned SrtpTransport will own.
auto controller_result = CreateRtpTransportController();
if (!controller_result.ok()) {
return controller_result.MoveError();
}
auto controller = controller_result.MoveValue();
auto transport_result =
controller->GetInternal()->CreateProxiedSrtpTransport(copied_parameters,
rtp, rtcp);
// If SrtpTransport was successfully created, transfer ownership of
// |rtp_transport_controller|. Otherwise it will go out of scope and be
// deleted automatically.
if (transport_result.ok()) {
transport_result.value()
->GetInternal()
->TakeOwnershipOfRtpTransportController(std::move(controller));
}
return transport_result;
}
}
RtpCapabilities OrtcFactory::GetRtpSenderCapabilities(
cricket::MediaType kind) const {
RTC_DCHECK_RUN_ON(signaling_thread_);
switch (kind) {
case cricket::MEDIA_TYPE_AUDIO: {
cricket::AudioCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedAudioSendCodecs(&cricket_codecs);
channel_manager_->GetSupportedAudioRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilitiesWithAsserts(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_VIDEO: {
cricket::VideoCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedVideoCodecs(&cricket_codecs);
channel_manager_->GetSupportedVideoRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilitiesWithAsserts(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_DATA:
return RtpCapabilities();
}
// Not reached; avoids compile warning.
FATAL();
}
RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>>
OrtcFactory::CreateRtpSender(
rtc::scoped_refptr<MediaStreamTrackInterface> track,
RtpTransportInterface* transport) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (!track) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Cannot pass null track into CreateRtpSender.");
}
auto result =
CreateRtpSender(cricket::MediaTypeFromString(track->kind()), transport);
if (!result.ok()) {
return result;
}
auto err = result.value()->SetTrack(track);
if (!err.ok()) {
return std::move(err);
}
return result;
}
RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>>
OrtcFactory::CreateRtpSender(cricket::MediaType kind,
RtpTransportInterface* transport) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (kind == cricket::MEDIA_TYPE_DATA) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Cannot create data RtpSender.");
}
if (!transport) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Cannot pass null transport into CreateRtpSender.");
}
return transport->GetInternal()
->rtp_transport_controller()
->CreateProxiedRtpSender(kind, transport);
}
RtpCapabilities OrtcFactory::GetRtpReceiverCapabilities(
cricket::MediaType kind) const {
RTC_DCHECK_RUN_ON(signaling_thread_);
switch (kind) {
case cricket::MEDIA_TYPE_AUDIO: {
cricket::AudioCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedAudioReceiveCodecs(&cricket_codecs);
channel_manager_->GetSupportedAudioRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilitiesWithAsserts(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_VIDEO: {
cricket::VideoCodecs cricket_codecs;
cricket::RtpHeaderExtensions cricket_extensions;
channel_manager_->GetSupportedVideoCodecs(&cricket_codecs);
channel_manager_->GetSupportedVideoRtpHeaderExtensions(
&cricket_extensions);
return ToRtpCapabilitiesWithAsserts(cricket_codecs, cricket_extensions);
}
case cricket::MEDIA_TYPE_DATA:
return RtpCapabilities();
}
// Not reached; avoids compile warning.
FATAL();
}
RTCErrorOr<std::unique_ptr<OrtcRtpReceiverInterface>>
OrtcFactory::CreateRtpReceiver(cricket::MediaType kind,
RtpTransportInterface* transport) {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (kind == cricket::MEDIA_TYPE_DATA) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Cannot create data RtpReceiver.");
}
if (!transport) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Cannot pass null transport into CreateRtpReceiver.");
}
return transport->GetInternal()
->rtp_transport_controller()
->CreateProxiedRtpReceiver(kind, transport);
}
// UdpTransport expects all methods to be called on one thread, which needs to
// be the network thread, since that's where its socket can safely be used. So
// return a proxy to the created UdpTransport.
BEGIN_OWNED_PROXY_MAP(UdpTransport)
PROXY_WORKER_THREAD_DESTRUCTOR()
PROXY_WORKER_CONSTMETHOD0(rtc::SocketAddress, GetLocalAddress)
PROXY_WORKER_METHOD1(bool, SetRemoteAddress, const rtc::SocketAddress&)
PROXY_WORKER_CONSTMETHOD0(rtc::SocketAddress, GetRemoteAddress)
protected:
rtc::PacketTransportInternal* GetInternal() override {
return internal();
}
END_PROXY_MAP()
RTCErrorOr<std::unique_ptr<UdpTransportInterface>>
OrtcFactory::CreateUdpTransport(int family,
uint16_t min_port,
uint16_t max_port) {
RTC_DCHECK_RUN_ON(network_thread_);
if (family != AF_INET && family != AF_INET6) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Address family must be AF_INET or AF_INET6.");
}
if (min_port > max_port) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_RANGE,
"Port range invalid; minimum port must be less than "
"or equal to max port.");
}
std::unique_ptr<rtc::AsyncPacketSocket> socket(
socket_factory_->CreateUdpSocket(
rtc::SocketAddress(rtc::GetAnyIP(family), 0), min_port, max_port));
if (!socket) {
// Only log at warning level, because this method may be called with
// specific port ranges to determine if a port is available, expecting the
// possibility of an error.
LOG_AND_RETURN_ERROR_EX(RTCErrorType::RESOURCE_EXHAUSTED,
"Local socket allocation failure.", LS_WARNING);
}
RTC_LOG(LS_INFO) << "Created UDP socket with address "
<< socket->GetLocalAddress().ToSensitiveString() << ".";
// Make a unique debug name (for logging/diagnostics only).
rtc::StringBuilder oss;
static int udp_id = 0;
oss << "udp" << udp_id++;
return UdpTransportProxyWithInternal<cricket::UdpTransport>::Create(
signaling_thread_, network_thread_,
std::unique_ptr<cricket::UdpTransport>(
new cricket::UdpTransport(oss.str(), std::move(socket))));
}
rtc::scoped_refptr<AudioSourceInterface> OrtcFactory::CreateAudioSource(
const cricket::AudioOptions& options) {
RTC_DCHECK_RUN_ON(signaling_thread_);
return rtc::scoped_refptr<LocalAudioSource>(
LocalAudioSource::Create(&options));
}
rtc::scoped_refptr<VideoTrackInterface> OrtcFactory::CreateVideoTrack(
const std::string& id,
VideoTrackSourceInterface* source) {
RTC_DCHECK_RUN_ON(signaling_thread_);
rtc::scoped_refptr<VideoTrackInterface> track(
VideoTrack::Create(id, source, worker_thread_.get()));
return VideoTrackProxy::Create(signaling_thread_, worker_thread_.get(),
track);
}
rtc::scoped_refptr<AudioTrackInterface> OrtcFactory::CreateAudioTrack(
const std::string& id,
AudioSourceInterface* source) {
RTC_DCHECK_RUN_ON(signaling_thread_);
rtc::scoped_refptr<AudioTrackInterface> track(AudioTrack::Create(id, source));
return AudioTrackProxy::Create(signaling_thread_, track);
}
// static
RTCErrorOr<std::unique_ptr<OrtcFactoryInterface>> OrtcFactory::Create_s(
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
rtc::NetworkManager* network_manager,
rtc::PacketSocketFactory* socket_factory,
AudioDeviceModule* adm,
cricket::MediaEngineInterface* media_engine,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory) {
// Add the unique_ptr wrapper back.
std::unique_ptr<cricket::MediaEngineInterface> owned_media_engine(
media_engine);
std::unique_ptr<OrtcFactory> new_factory(new OrtcFactory(
network_thread, signaling_thread, network_manager, socket_factory, adm,
audio_encoder_factory, audio_decoder_factory));
RTCError err = new_factory->Initialize(std::move(owned_media_engine));
if (!err.ok()) {
return std::move(err);
}
// Return a proxy so that any calls on the returned object (including
// destructor) happen on the signaling thread.
rtc::Thread* signaling = new_factory->signaling_thread();
rtc::Thread* network = new_factory->network_thread();
return OrtcFactoryProxy::Create(signaling, network, std::move(new_factory));
}
RTCError OrtcFactory::Initialize(
std::unique_ptr<cricket::MediaEngineInterface> media_engine) {
RTC_DCHECK_RUN_ON(signaling_thread_);
// TODO(deadbeef): Get rid of requirement to hop to worker thread here.
if (!media_engine) {
media_engine =
worker_thread_->Invoke<std::unique_ptr<cricket::MediaEngineInterface>>(
RTC_FROM_HERE, rtc::Bind(&OrtcFactory::CreateMediaEngine_w, this));
}
channel_manager_.reset(new cricket::ChannelManager(
std::move(media_engine), absl::make_unique<cricket::RtpDataEngine>(),
worker_thread_.get(), network_thread_));
channel_manager_->SetVideoRtxEnabled(true);
if (!channel_manager_->Init()) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to initialize ChannelManager.");
}
return RTCError::OK();
}
std::unique_ptr<cricket::MediaEngineInterface>
OrtcFactory::CreateMediaEngine_w() {
RTC_DCHECK_RUN_ON(worker_thread_.get());
// The null arguments are optional factories that could be passed into the
// OrtcFactory, but aren't yet.
//
// Note that |adm_| may be null, in which case the platform-specific default
// AudioDeviceModule will be used.
return std::unique_ptr<cricket::MediaEngineInterface>(
cricket::WebRtcMediaEngineFactory::Create(
rtc::scoped_refptr<webrtc::AudioDeviceModule>(adm_),
audio_encoder_factory_, audio_decoder_factory_,
webrtc::CreateBuiltinVideoEncoderFactory(),
webrtc::CreateBuiltinVideoDecoderFactory(), nullptr,
webrtc::AudioProcessingBuilder().Create()));
}
} // namespace webrtc

View File

@ -1,156 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ORTC_ORTCFACTORY_H_
#define ORTC_ORTCFACTORY_H_
#include <memory>
#include <string>
#include "api/ortc/ortcfactoryinterface.h"
#include "media/base/mediaengine.h"
#include "media/engine/webrtcmediaengine.h"
#include "pc/channelmanager.h"
#include "rtc_base/constructormagic.h"
#include "rtc_base/scoped_ref_ptr.h"
namespace webrtc {
// Implementation of OrtcFactoryInterface.
//
// See ortcfactoryinterface.h for documentation.
class OrtcFactory : public OrtcFactoryInterface {
public:
~OrtcFactory() override;
// Internal-only Create method that allows passing in a fake media engine,
// for testing.
static RTCErrorOr<std::unique_ptr<OrtcFactoryInterface>> Create(
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
rtc::NetworkManager* network_manager,
rtc::PacketSocketFactory* socket_factory,
AudioDeviceModule* adm,
std::unique_ptr<cricket::MediaEngineInterface> media_engine,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory);
RTCErrorOr<std::unique_ptr<RtpTransportControllerInterface>>
CreateRtpTransportController() override;
RTCErrorOr<std::unique_ptr<RtpTransportInterface>> CreateRtpTransport(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerInterface* transport_controller) override;
RTCErrorOr<std::unique_ptr<SrtpTransportInterface>> CreateSrtpTransport(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerInterface* transport_controller) override;
RtpCapabilities GetRtpSenderCapabilities(
cricket::MediaType kind) const override;
RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>> CreateRtpSender(
rtc::scoped_refptr<MediaStreamTrackInterface> track,
RtpTransportInterface* transport) override;
RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>> CreateRtpSender(
cricket::MediaType kind,
RtpTransportInterface* transport) override;
RtpCapabilities GetRtpReceiverCapabilities(
cricket::MediaType kind) const override;
RTCErrorOr<std::unique_ptr<OrtcRtpReceiverInterface>> CreateRtpReceiver(
cricket::MediaType kind,
RtpTransportInterface* transport) override;
RTCErrorOr<std::unique_ptr<UdpTransportInterface>>
CreateUdpTransport(int family, uint16_t min_port, uint16_t max_port) override;
rtc::scoped_refptr<AudioSourceInterface> CreateAudioSource(
const cricket::AudioOptions& options) override;
rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
const std::string& id,
VideoTrackSourceInterface* source) override;
rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
const std::string& id,
AudioSourceInterface* source) override;
rtc::Thread* network_thread() { return network_thread_; }
rtc::Thread* worker_thread() { return worker_thread_.get(); }
rtc::Thread* signaling_thread() { return signaling_thread_; }
private:
// Should only be called by OrtcFactoryInterface::Create.
OrtcFactory(rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
rtc::NetworkManager* network_manager,
rtc::PacketSocketFactory* socket_factory,
AudioDeviceModule* adm,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory);
RTCErrorOr<std::unique_ptr<RtpTransportControllerInterface>>
CreateRtpTransportController(const RtpTransportParameters& parameters);
// Thread::Invoke doesn't support move-only arguments, so we need to remove
// the unique_ptr wrapper from media_engine. TODO(deadbeef): Fix this.
static RTCErrorOr<std::unique_ptr<OrtcFactoryInterface>> Create_s(
rtc::Thread* network_thread,
rtc::Thread* signaling_thread,
rtc::NetworkManager* network_manager,
rtc::PacketSocketFactory* socket_factory,
AudioDeviceModule* adm,
cricket::MediaEngineInterface* media_engine,
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory,
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory);
// Performs initialization that can fail. Called by factory method after
// construction, and if it fails, no object is returned.
RTCError Initialize(
std::unique_ptr<cricket::MediaEngineInterface> media_engine);
std::unique_ptr<cricket::MediaEngineInterface> CreateMediaEngine_w();
// Threads and networking objects.
rtc::Thread* network_thread_;
rtc::Thread* signaling_thread_;
rtc::NetworkManager* network_manager_;
rtc::PacketSocketFactory* socket_factory_;
AudioDeviceModule* adm_;
// If we created/own the objects above, these will be non-null and thus will
// be released automatically upon destruction.
std::unique_ptr<rtc::Thread> owned_network_thread_;
bool wraps_signaling_thread_ = false;
std::unique_ptr<rtc::NetworkManager> owned_network_manager_;
std::unique_ptr<rtc::PacketSocketFactory> owned_socket_factory_;
// We always own the worker thread.
std::unique_ptr<rtc::Thread> worker_thread_;
// Media-releated objects.
std::unique_ptr<RtcEventLog> null_event_log_;
rtc::scoped_refptr<AudioEncoderFactory> audio_encoder_factory_;
rtc::scoped_refptr<AudioDecoderFactory> audio_decoder_factory_;
std::unique_ptr<cricket::ChannelManager> channel_manager_;
// Default CNAME to use for RtpTransports if none is passed in.
std::string default_cname_;
friend class OrtcFactoryInterface;
RTC_DISALLOW_COPY_AND_ASSIGN(OrtcFactory);
};
} // namespace webrtc
#endif // ORTC_ORTCFACTORY_H_

View File

@ -1,724 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include <utility> // For std::pair, std::move.
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "api/ortc/ortcfactoryinterface.h"
#include "ortc/testrtpparameters.h"
#include "p2p/base/udptransport.h"
#include "pc/test/fakeaudiocapturemodule.h"
#include "pc/test/fakeperiodicvideotracksource.h"
#include "pc/test/fakevideotrackrenderer.h"
#include "pc/videotracksource.h"
#include "rtc_base/criticalsection.h"
#include "rtc_base/fakenetwork.h"
#include "rtc_base/gunit.h"
#include "rtc_base/system/arch.h"
#include "rtc_base/timeutils.h"
#include "rtc_base/virtualsocketserver.h"
namespace {
const int kDefaultTimeout = 10000; // 10 seconds.
const int kReceivingDuration = 1000; // 1 second.
// Default number of audio/video frames to wait for before considering a test a
// success.
const int kDefaultNumFrames = 3;
const rtc::IPAddress kIPv4LocalHostAddress =
rtc::IPAddress(0x7F000001); // 127.0.0.1
static const char kTestKeyParams1[] =
"inline:WVNfX19zZW1jdGwgKskgewkyMjA7fQp9CnVubGVz";
static const char kTestKeyParams2[] =
"inline:PS1uQCVeeCFCanVmcjkpaywjNWhcYD0mXXtxaVBR";
static const char kTestKeyParams3[] =
"inline:WVNfX19zZW1jdGwgKskgewkyMjA7fQp9CnVubGVa";
static const char kTestKeyParams4[] =
"inline:WVNfX19zZW1jdGwgKskgewkyMjA7fQp9CnVubGVb";
static const cricket::CryptoParams kTestCryptoParams1(1,
"AES_CM_128_HMAC_SHA1_80",
kTestKeyParams1,
"");
static const cricket::CryptoParams kTestCryptoParams2(1,
"AES_CM_128_HMAC_SHA1_80",
kTestKeyParams2,
"");
static const cricket::CryptoParams kTestCryptoParams3(1,
"AES_CM_128_HMAC_SHA1_80",
kTestKeyParams3,
"");
static const cricket::CryptoParams kTestCryptoParams4(1,
"AES_CM_128_HMAC_SHA1_80",
kTestKeyParams4,
"");
} // namespace
namespace webrtc {
// Used to test that things work end-to-end when using the default
// implementations of threads/etc. provided by OrtcFactory, with the exception
// of using a virtual network.
//
// By default, the virtual network manager doesn't enumerate any networks, but
// sockets can still be created in this state.
class OrtcFactoryIntegrationTest : public testing::Test {
public:
OrtcFactoryIntegrationTest()
: network_thread_(&virtual_socket_server_),
fake_audio_capture_module1_(FakeAudioCaptureModule::Create()),
fake_audio_capture_module2_(FakeAudioCaptureModule::Create()) {
// Sockets are bound to the ANY address, so this is needed to tell the
// virtual network which address to use in this case.
virtual_socket_server_.SetDefaultRoute(kIPv4LocalHostAddress);
network_thread_.SetName("TestNetworkThread", this);
network_thread_.Start();
// Need to create after network thread is started.
ortc_factory1_ =
OrtcFactoryInterface::Create(
&network_thread_, nullptr, &fake_network_manager_, nullptr,
fake_audio_capture_module1_, CreateBuiltinAudioEncoderFactory(),
CreateBuiltinAudioDecoderFactory())
.MoveValue();
ortc_factory2_ =
OrtcFactoryInterface::Create(
&network_thread_, nullptr, &fake_network_manager_, nullptr,
fake_audio_capture_module2_, CreateBuiltinAudioEncoderFactory(),
CreateBuiltinAudioDecoderFactory())
.MoveValue();
}
protected:
typedef std::pair<std::unique_ptr<UdpTransportInterface>,
std::unique_ptr<UdpTransportInterface>>
UdpTransportPair;
typedef std::pair<std::unique_ptr<RtpTransportInterface>,
std::unique_ptr<RtpTransportInterface>>
RtpTransportPair;
typedef std::pair<std::unique_ptr<SrtpTransportInterface>,
std::unique_ptr<SrtpTransportInterface>>
SrtpTransportPair;
typedef std::pair<std::unique_ptr<RtpTransportControllerInterface>,
std::unique_ptr<RtpTransportControllerInterface>>
RtpTransportControllerPair;
// Helper function that creates one UDP transport each for |ortc_factory1_|
// and |ortc_factory2_|, and connects them.
UdpTransportPair CreateAndConnectUdpTransportPair() {
auto transport1 = ortc_factory1_->CreateUdpTransport(AF_INET).MoveValue();
auto transport2 = ortc_factory2_->CreateUdpTransport(AF_INET).MoveValue();
transport1->SetRemoteAddress(
rtc::SocketAddress(virtual_socket_server_.GetDefaultRoute(AF_INET),
transport2->GetLocalAddress().port()));
transport2->SetRemoteAddress(
rtc::SocketAddress(virtual_socket_server_.GetDefaultRoute(AF_INET),
transport1->GetLocalAddress().port()));
return {std::move(transport1), std::move(transport2)};
}
// Creates one transport controller each for |ortc_factory1_| and
// |ortc_factory2_|.
RtpTransportControllerPair CreateRtpTransportControllerPair() {
return {ortc_factory1_->CreateRtpTransportController().MoveValue(),
ortc_factory2_->CreateRtpTransportController().MoveValue()};
}
// Helper function that creates a pair of RtpTransports between
// |ortc_factory1_| and |ortc_factory2_|. Expected to be called with the
// result of CreateAndConnectUdpTransportPair. |rtcp_udp_transports| can be
// empty if RTCP muxing is used. |transport_controllers| can be empty if
// these transports are being created using a default transport controller.
RtpTransportPair CreateRtpTransportPair(
const RtpTransportParameters& parameters,
const UdpTransportPair& rtp_udp_transports,
const UdpTransportPair& rtcp_udp_transports,
const RtpTransportControllerPair& transport_controllers) {
auto transport_result1 = ortc_factory1_->CreateRtpTransport(
parameters, rtp_udp_transports.first.get(),
rtcp_udp_transports.first.get(), transport_controllers.first.get());
auto transport_result2 = ortc_factory2_->CreateRtpTransport(
parameters, rtp_udp_transports.second.get(),
rtcp_udp_transports.second.get(), transport_controllers.second.get());
return {transport_result1.MoveValue(), transport_result2.MoveValue()};
}
SrtpTransportPair CreateSrtpTransportPair(
const RtpTransportParameters& parameters,
const UdpTransportPair& rtp_udp_transports,
const UdpTransportPair& rtcp_udp_transports,
const RtpTransportControllerPair& transport_controllers) {
auto transport_result1 = ortc_factory1_->CreateSrtpTransport(
parameters, rtp_udp_transports.first.get(),
rtcp_udp_transports.first.get(), transport_controllers.first.get());
auto transport_result2 = ortc_factory2_->CreateSrtpTransport(
parameters, rtp_udp_transports.second.get(),
rtcp_udp_transports.second.get(), transport_controllers.second.get());
return {transport_result1.MoveValue(), transport_result2.MoveValue()};
}
// For convenience when |rtcp_udp_transports| and |transport_controllers|
// aren't needed.
RtpTransportPair CreateRtpTransportPair(
const RtpTransportParameters& parameters,
const UdpTransportPair& rtp_udp_transports) {
return CreateRtpTransportPair(parameters, rtp_udp_transports,
UdpTransportPair(),
RtpTransportControllerPair());
}
SrtpTransportPair CreateSrtpTransportPairAndSetKeys(
const RtpTransportParameters& parameters,
const UdpTransportPair& rtp_udp_transports) {
SrtpTransportPair srtp_transports = CreateSrtpTransportPair(
parameters, rtp_udp_transports, UdpTransportPair(),
RtpTransportControllerPair());
EXPECT_TRUE(srtp_transports.first->SetSrtpSendKey(kTestCryptoParams1).ok());
EXPECT_TRUE(
srtp_transports.first->SetSrtpReceiveKey(kTestCryptoParams2).ok());
EXPECT_TRUE(
srtp_transports.second->SetSrtpSendKey(kTestCryptoParams2).ok());
EXPECT_TRUE(
srtp_transports.second->SetSrtpReceiveKey(kTestCryptoParams1).ok());
return srtp_transports;
}
SrtpTransportPair CreateSrtpTransportPairAndSetMismatchingKeys(
const RtpTransportParameters& parameters,
const UdpTransportPair& rtp_udp_transports) {
SrtpTransportPair srtp_transports = CreateSrtpTransportPair(
parameters, rtp_udp_transports, UdpTransportPair(),
RtpTransportControllerPair());
EXPECT_TRUE(srtp_transports.first->SetSrtpSendKey(kTestCryptoParams1).ok());
EXPECT_TRUE(
srtp_transports.first->SetSrtpReceiveKey(kTestCryptoParams2).ok());
EXPECT_TRUE(
srtp_transports.second->SetSrtpSendKey(kTestCryptoParams1).ok());
EXPECT_TRUE(
srtp_transports.second->SetSrtpReceiveKey(kTestCryptoParams2).ok());
return srtp_transports;
}
// Ends up using fake audio capture module, which was passed into OrtcFactory
// on creation.
rtc::scoped_refptr<webrtc::AudioTrackInterface> CreateLocalAudioTrack(
const std::string& id,
OrtcFactoryInterface* ortc_factory) {
// Disable echo cancellation to make test more efficient.
cricket::AudioOptions options;
options.echo_cancellation.emplace(true);
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
ortc_factory->CreateAudioSource(options);
return ortc_factory->CreateAudioTrack(id, source);
}
// Stores created video source in |fake_video_track_sources_|.
rtc::scoped_refptr<webrtc::VideoTrackInterface>
CreateLocalVideoTrackAndFakeSource(const std::string& id,
OrtcFactoryInterface* ortc_factory) {
FakePeriodicVideoSource::Config config;
config.timestamp_offset_ms = rtc::TimeMillis();
fake_video_track_sources_.emplace_back(
new rtc::RefCountedObject<FakePeriodicVideoTrackSource>(
config, false /* remote */));
return rtc::scoped_refptr<VideoTrackInterface>(
ortc_factory->CreateVideoTrack(id, fake_video_track_sources_.back()));
}
// Helper function used to test two way RTP senders and receivers with basic
// configurations.
// If |expect_success| is true, waits for kDefaultTimeout for
// kDefaultNumFrames frames to be received by all RtpReceivers.
// If |expect_success| is false, simply waits for |kReceivingDuration|, and
// stores the number of received frames in |received_audio_frame1_| etc.
void BasicTwoWayRtpSendersAndReceiversTest(RtpTransportPair srtp_transports,
bool expect_success) {
received_audio_frames1_ = 0;
received_audio_frames2_ = 0;
rendered_video_frames1_ = 0;
rendered_video_frames2_ = 0;
// Create all the senders and receivers (four per endpoint).
auto audio_sender_result1 = ortc_factory1_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, srtp_transports.first.get());
auto video_sender_result1 = ortc_factory1_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, srtp_transports.first.get());
auto audio_receiver_result1 = ortc_factory1_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, srtp_transports.first.get());
auto video_receiver_result1 = ortc_factory1_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, srtp_transports.first.get());
ASSERT_TRUE(audio_sender_result1.ok());
ASSERT_TRUE(video_sender_result1.ok());
ASSERT_TRUE(audio_receiver_result1.ok());
ASSERT_TRUE(video_receiver_result1.ok());
auto audio_sender1 = audio_sender_result1.MoveValue();
auto video_sender1 = video_sender_result1.MoveValue();
auto audio_receiver1 = audio_receiver_result1.MoveValue();
auto video_receiver1 = video_receiver_result1.MoveValue();
auto audio_sender_result2 = ortc_factory2_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, srtp_transports.second.get());
auto video_sender_result2 = ortc_factory2_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, srtp_transports.second.get());
auto audio_receiver_result2 = ortc_factory2_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, srtp_transports.second.get());
auto video_receiver_result2 = ortc_factory2_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, srtp_transports.second.get());
ASSERT_TRUE(audio_sender_result2.ok());
ASSERT_TRUE(video_sender_result2.ok());
ASSERT_TRUE(audio_receiver_result2.ok());
ASSERT_TRUE(video_receiver_result2.ok());
auto audio_sender2 = audio_sender_result2.MoveValue();
auto video_sender2 = video_sender_result2.MoveValue();
auto audio_receiver2 = audio_receiver_result2.MoveValue();
auto video_receiver2 = video_receiver_result2.MoveValue();
// Add fake tracks.
RTCError error = audio_sender1->SetTrack(
CreateLocalAudioTrack("audio", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
error = video_sender1->SetTrack(
CreateLocalVideoTrackAndFakeSource("video", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
error = audio_sender2->SetTrack(
CreateLocalAudioTrack("audio", ortc_factory2_.get()));
EXPECT_TRUE(error.ok());
error = video_sender2->SetTrack(
CreateLocalVideoTrackAndFakeSource("video", ortc_factory2_.get()));
EXPECT_TRUE(error.ok());
// "sent_X_parameters1" are the parameters that endpoint 1 sends with and
// endpoint 2 receives with.
RtpParameters sent_opus_parameters1 =
MakeMinimalOpusParametersWithSsrc(0xdeadbeef);
RtpParameters sent_vp8_parameters1 =
MakeMinimalVp8ParametersWithSsrc(0xbaadfeed);
RtpParameters sent_opus_parameters2 =
MakeMinimalOpusParametersWithSsrc(0x13333337);
RtpParameters sent_vp8_parameters2 =
MakeMinimalVp8ParametersWithSsrc(0x12345678);
// Configure the senders' and receivers' parameters.
EXPECT_TRUE(audio_receiver1->Receive(sent_opus_parameters2).ok());
EXPECT_TRUE(video_receiver1->Receive(sent_vp8_parameters2).ok());
EXPECT_TRUE(audio_receiver2->Receive(sent_opus_parameters1).ok());
EXPECT_TRUE(video_receiver2->Receive(sent_vp8_parameters1).ok());
EXPECT_TRUE(audio_sender1->Send(sent_opus_parameters1).ok());
EXPECT_TRUE(video_sender1->Send(sent_vp8_parameters1).ok());
EXPECT_TRUE(audio_sender2->Send(sent_opus_parameters2).ok());
EXPECT_TRUE(video_sender2->Send(sent_vp8_parameters2).ok());
FakeVideoTrackRenderer fake_video_renderer1(
static_cast<VideoTrackInterface*>(video_receiver1->GetTrack().get()));
FakeVideoTrackRenderer fake_video_renderer2(
static_cast<VideoTrackInterface*>(video_receiver2->GetTrack().get()));
if (expect_success) {
EXPECT_TRUE_WAIT(
fake_audio_capture_module1_->frames_received() > kDefaultNumFrames &&
fake_video_renderer1.num_rendered_frames() > kDefaultNumFrames &&
fake_audio_capture_module2_->frames_received() >
kDefaultNumFrames &&
fake_video_renderer2.num_rendered_frames() > kDefaultNumFrames,
kDefaultTimeout)
<< "Audio capture module 1 received "
<< fake_audio_capture_module1_->frames_received()
<< " frames, Video renderer 1 rendered "
<< fake_video_renderer1.num_rendered_frames()
<< " frames, Audio capture module 2 received "
<< fake_audio_capture_module2_->frames_received()
<< " frames, Video renderer 2 rendered "
<< fake_video_renderer2.num_rendered_frames() << " frames.";
} else {
WAIT(false, kReceivingDuration);
rendered_video_frames1_ = fake_video_renderer1.num_rendered_frames();
rendered_video_frames2_ = fake_video_renderer2.num_rendered_frames();
received_audio_frames1_ = fake_audio_capture_module1_->frames_received();
received_audio_frames2_ = fake_audio_capture_module2_->frames_received();
}
}
rtc::VirtualSocketServer virtual_socket_server_;
rtc::Thread network_thread_;
rtc::FakeNetworkManager fake_network_manager_;
rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module1_;
rtc::scoped_refptr<FakeAudioCaptureModule> fake_audio_capture_module2_;
std::unique_ptr<OrtcFactoryInterface> ortc_factory1_;
std::unique_ptr<OrtcFactoryInterface> ortc_factory2_;
std::vector<rtc::scoped_refptr<VideoTrackSource>> fake_video_track_sources_;
int received_audio_frames1_ = 0;
int received_audio_frames2_ = 0;
int rendered_video_frames1_ = 0;
int rendered_video_frames2_ = 0;
};
// Disable for TSan v2, see
// https://bugs.chromium.org/p/webrtc/issues/detail?id=7366 for details.
#if !defined(THREAD_SANITIZER)
// Very basic end-to-end test with a single pair of audio RTP sender and
// receiver.
//
// Uses muxed RTCP, and minimal parameters with a hard-coded config that's
// known to work.
TEST_F(OrtcFactoryIntegrationTest, BasicOneWayAudioRtpSenderAndReceiver) {
auto udp_transports = CreateAndConnectUdpTransportPair();
auto rtp_transports =
CreateRtpTransportPair(MakeRtcpMuxParameters(), udp_transports);
auto sender_result = ortc_factory1_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transports.first.get());
auto receiver_result = ortc_factory2_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transports.second.get());
ASSERT_TRUE(sender_result.ok());
ASSERT_TRUE(receiver_result.ok());
auto sender = sender_result.MoveValue();
auto receiver = receiver_result.MoveValue();
RTCError error =
sender->SetTrack(CreateLocalAudioTrack("audio", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
RtpParameters opus_parameters = MakeMinimalOpusParameters();
EXPECT_TRUE(receiver->Receive(opus_parameters).ok());
EXPECT_TRUE(sender->Send(opus_parameters).ok());
// Sender and receiver are connected and configured; audio frames should be
// able to flow at this point.
EXPECT_TRUE_WAIT(
fake_audio_capture_module2_->frames_received() > kDefaultNumFrames,
kDefaultTimeout);
}
// Very basic end-to-end test with a single pair of video RTP sender and
// receiver.
//
// Uses muxed RTCP, and minimal parameters with a hard-coded config that's
// known to work.
TEST_F(OrtcFactoryIntegrationTest, BasicOneWayVideoRtpSenderAndReceiver) {
auto udp_transports = CreateAndConnectUdpTransportPair();
auto rtp_transports =
CreateRtpTransportPair(MakeRtcpMuxParameters(), udp_transports);
auto sender_result = ortc_factory1_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transports.first.get());
auto receiver_result = ortc_factory2_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transports.second.get());
ASSERT_TRUE(sender_result.ok());
ASSERT_TRUE(receiver_result.ok());
auto sender = sender_result.MoveValue();
auto receiver = receiver_result.MoveValue();
RTCError error = sender->SetTrack(
CreateLocalVideoTrackAndFakeSource("video", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
RtpParameters vp8_parameters = MakeMinimalVp8Parameters();
EXPECT_TRUE(receiver->Receive(vp8_parameters).ok());
EXPECT_TRUE(sender->Send(vp8_parameters).ok());
FakeVideoTrackRenderer fake_renderer(
static_cast<VideoTrackInterface*>(receiver->GetTrack().get()));
// Sender and receiver are connected and configured; video frames should be
// able to flow at this point.
EXPECT_TRUE_WAIT(fake_renderer.num_rendered_frames() > kDefaultNumFrames,
kDefaultTimeout);
}
// Test that if the track is changed while sending, the sender seamlessly
// transitions to sending it and frames are received end-to-end.
//
// Only doing this for video, since given that audio is sourced from a single
// fake audio capture module, the audio track is just a dummy object.
// TODO(deadbeef): Change this when possible.
TEST_F(OrtcFactoryIntegrationTest, SetTrackWhileSending) {
auto udp_transports = CreateAndConnectUdpTransportPair();
auto rtp_transports =
CreateRtpTransportPair(MakeRtcpMuxParameters(), udp_transports);
auto sender_result = ortc_factory1_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transports.first.get());
auto receiver_result = ortc_factory2_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transports.second.get());
ASSERT_TRUE(sender_result.ok());
ASSERT_TRUE(receiver_result.ok());
auto sender = sender_result.MoveValue();
auto receiver = receiver_result.MoveValue();
RTCError error = sender->SetTrack(
CreateLocalVideoTrackAndFakeSource("video_1", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
RtpParameters vp8_parameters = MakeMinimalVp8Parameters();
EXPECT_TRUE(receiver->Receive(vp8_parameters).ok());
EXPECT_TRUE(sender->Send(vp8_parameters).ok());
FakeVideoTrackRenderer fake_renderer(
static_cast<VideoTrackInterface*>(receiver->GetTrack().get()));
// Expect for some initial number of frames to be received.
EXPECT_TRUE_WAIT(fake_renderer.num_rendered_frames() > kDefaultNumFrames,
kDefaultTimeout);
// Destroy old source, set a new track, and verify new frames are received
// from the new track. The VideoTrackSource is reference counted and may live
// a little longer, so tell it that its source is going away now.
fake_video_track_sources_[0] = nullptr;
int prev_num_frames = fake_renderer.num_rendered_frames();
error = sender->SetTrack(
CreateLocalVideoTrackAndFakeSource("video_2", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
EXPECT_TRUE_WAIT(
fake_renderer.num_rendered_frames() > kDefaultNumFrames + prev_num_frames,
kDefaultTimeout);
}
// TODO(webrtc:7915, webrtc:9184): Tests below are disabled for iOS 64 on debug
// builds because of flakiness.
#if !(defined(WEBRTC_IOS) && defined(WEBRTC_ARCH_64_BITS) && !defined(NDEBUG))
#define MAYBE_BasicTwoWayAudioVideoRtpSendersAndReceivers \
BasicTwoWayAudioVideoRtpSendersAndReceivers
#define MAYBE_BasicTwoWayAudioVideoSrtpSendersAndReceivers \
BasicTwoWayAudioVideoSrtpSendersAndReceivers
#define MAYBE_SrtpSendersAndReceiversWithMismatchingKeys \
SrtpSendersAndReceiversWithMismatchingKeys
#define MAYBE_OneSideSrtpSenderAndReceiver OneSideSrtpSenderAndReceiver
#define MAYBE_FullTwoWayAudioVideoSrtpSendersAndReceivers \
FullTwoWayAudioVideoSrtpSendersAndReceivers
#else
#define MAYBE_BasicTwoWayAudioVideoRtpSendersAndReceivers \
DISABLED_BasicTwoWayAudioVideoRtpSendersAndReceivers
#define MAYBE_BasicTwoWayAudioVideoSrtpSendersAndReceivers \
DISABLED_BasicTwoWayAudioVideoSrtpSendersAndReceivers
#define MAYBE_SrtpSendersAndReceiversWithMismatchingKeys \
DISABLED_SrtpSendersAndReceiversWithMismatchingKeys
#define MAYBE_OneSideSrtpSenderAndReceiver DISABLED_OneSideSrtpSenderAndReceiver
#define MAYBE_FullTwoWayAudioVideoSrtpSendersAndReceivers \
DISABLED_FullTwoWayAudioVideoSrtpSendersAndReceivers
#endif
// End-to-end test with two pairs of RTP senders and receivers, for audio and
// video.
//
// Uses muxed RTCP, and minimal parameters with hard-coded configs that are
// known to work.
TEST_F(OrtcFactoryIntegrationTest,
MAYBE_BasicTwoWayAudioVideoRtpSendersAndReceivers) {
auto udp_transports = CreateAndConnectUdpTransportPair();
auto rtp_transports =
CreateRtpTransportPair(MakeRtcpMuxParameters(), udp_transports);
bool expect_success = true;
BasicTwoWayRtpSendersAndReceiversTest(std::move(rtp_transports),
expect_success);
}
TEST_F(OrtcFactoryIntegrationTest,
MAYBE_BasicTwoWayAudioVideoSrtpSendersAndReceivers) {
auto udp_transports = CreateAndConnectUdpTransportPair();
auto srtp_transports = CreateSrtpTransportPairAndSetKeys(
MakeRtcpMuxParameters(), udp_transports);
bool expect_success = true;
BasicTwoWayRtpSendersAndReceiversTest(std::move(srtp_transports),
expect_success);
}
// Tests that the packets cannot be decoded if the keys are mismatched.
// TODO(webrtc:9184): Disabled because this test is flaky.
TEST_F(OrtcFactoryIntegrationTest,
MAYBE_SrtpSendersAndReceiversWithMismatchingKeys) {
auto udp_transports = CreateAndConnectUdpTransportPair();
auto srtp_transports = CreateSrtpTransportPairAndSetMismatchingKeys(
MakeRtcpMuxParameters(), udp_transports);
bool expect_success = false;
BasicTwoWayRtpSendersAndReceiversTest(std::move(srtp_transports),
expect_success);
// No frames are expected to be decoded.
EXPECT_TRUE(received_audio_frames1_ == 0 && received_audio_frames2_ == 0 &&
rendered_video_frames1_ == 0 && rendered_video_frames2_ == 0);
}
// Tests that the frames cannot be decoded if only one side uses SRTP.
TEST_F(OrtcFactoryIntegrationTest, MAYBE_OneSideSrtpSenderAndReceiver) {
auto rtcp_parameters = MakeRtcpMuxParameters();
auto udp_transports = CreateAndConnectUdpTransportPair();
auto rtcp_udp_transports = UdpTransportPair();
auto transport_controllers = RtpTransportControllerPair();
auto transport_result1 = ortc_factory1_->CreateRtpTransport(
rtcp_parameters, udp_transports.first.get(),
rtcp_udp_transports.first.get(), transport_controllers.first.get());
auto transport_result2 = ortc_factory2_->CreateSrtpTransport(
rtcp_parameters, udp_transports.second.get(),
rtcp_udp_transports.second.get(), transport_controllers.second.get());
auto rtp_transport = transport_result1.MoveValue();
auto srtp_transport = transport_result2.MoveValue();
EXPECT_TRUE(srtp_transport->SetSrtpSendKey(kTestCryptoParams1).ok());
EXPECT_TRUE(srtp_transport->SetSrtpReceiveKey(kTestCryptoParams2).ok());
bool expect_success = false;
BasicTwoWayRtpSendersAndReceiversTest(
{std::move(rtp_transport), std::move(srtp_transport)}, expect_success);
// The SRTP side is not expected to decode any audio or video frames.
// The RTP side is not expected to decode any video frames while it is
// possible that the encrypted audio frames can be accidentally decoded which
// is why received_audio_frames1_ is not validated.
EXPECT_TRUE(received_audio_frames2_ == 0 && rendered_video_frames1_ == 0 &&
rendered_video_frames2_ == 0);
}
// End-to-end test with two pairs of RTP senders and receivers, for audio and
// video. Unlike the test above, this attempts to make the parameters as
// complex as possible. The senders and receivers use the SRTP transport with
// different keys.
//
// Uses non-muxed RTCP, with separate audio/video transports, and a full set of
// parameters, as would normally be used in a PeerConnection.
//
// TODO(deadbeef): Update this test as more audio/video features become
// supported.
TEST_F(OrtcFactoryIntegrationTest,
MAYBE_FullTwoWayAudioVideoSrtpSendersAndReceivers) {
// We want four pairs of UDP transports for this test, for audio/video and
// RTP/RTCP.
auto audio_rtp_udp_transports = CreateAndConnectUdpTransportPair();
auto audio_rtcp_udp_transports = CreateAndConnectUdpTransportPair();
auto video_rtp_udp_transports = CreateAndConnectUdpTransportPair();
auto video_rtcp_udp_transports = CreateAndConnectUdpTransportPair();
// Since we have multiple RTP transports on each side, we need an RTP
// transport controller.
auto transport_controllers = CreateRtpTransportControllerPair();
RtpTransportParameters audio_rtp_transport_parameters;
audio_rtp_transport_parameters.rtcp.mux = false;
auto audio_srtp_transports = CreateSrtpTransportPair(
audio_rtp_transport_parameters, audio_rtp_udp_transports,
audio_rtcp_udp_transports, transport_controllers);
RtpTransportParameters video_rtp_transport_parameters;
video_rtp_transport_parameters.rtcp.mux = false;
video_rtp_transport_parameters.rtcp.reduced_size = true;
auto video_srtp_transports = CreateSrtpTransportPair(
video_rtp_transport_parameters, video_rtp_udp_transports,
video_rtcp_udp_transports, transport_controllers);
// Set keys for SRTP transports.
audio_srtp_transports.first->SetSrtpSendKey(kTestCryptoParams1);
audio_srtp_transports.first->SetSrtpReceiveKey(kTestCryptoParams2);
video_srtp_transports.first->SetSrtpSendKey(kTestCryptoParams3);
video_srtp_transports.first->SetSrtpReceiveKey(kTestCryptoParams4);
audio_srtp_transports.second->SetSrtpSendKey(kTestCryptoParams2);
audio_srtp_transports.second->SetSrtpReceiveKey(kTestCryptoParams1);
video_srtp_transports.second->SetSrtpSendKey(kTestCryptoParams4);
video_srtp_transports.second->SetSrtpReceiveKey(kTestCryptoParams3);
// Create all the senders and receivers (four per endpoint).
auto audio_sender_result1 = ortc_factory1_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, audio_srtp_transports.first.get());
auto video_sender_result1 = ortc_factory1_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, video_srtp_transports.first.get());
auto audio_receiver_result1 = ortc_factory1_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, audio_srtp_transports.first.get());
auto video_receiver_result1 = ortc_factory1_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, video_srtp_transports.first.get());
ASSERT_TRUE(audio_sender_result1.ok());
ASSERT_TRUE(video_sender_result1.ok());
ASSERT_TRUE(audio_receiver_result1.ok());
ASSERT_TRUE(video_receiver_result1.ok());
auto audio_sender1 = audio_sender_result1.MoveValue();
auto video_sender1 = video_sender_result1.MoveValue();
auto audio_receiver1 = audio_receiver_result1.MoveValue();
auto video_receiver1 = video_receiver_result1.MoveValue();
auto audio_sender_result2 = ortc_factory2_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, audio_srtp_transports.second.get());
auto video_sender_result2 = ortc_factory2_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, video_srtp_transports.second.get());
auto audio_receiver_result2 = ortc_factory2_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, audio_srtp_transports.second.get());
auto video_receiver_result2 = ortc_factory2_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, video_srtp_transports.second.get());
ASSERT_TRUE(audio_sender_result2.ok());
ASSERT_TRUE(video_sender_result2.ok());
ASSERT_TRUE(audio_receiver_result2.ok());
ASSERT_TRUE(video_receiver_result2.ok());
auto audio_sender2 = audio_sender_result2.MoveValue();
auto video_sender2 = video_sender_result2.MoveValue();
auto audio_receiver2 = audio_receiver_result2.MoveValue();
auto video_receiver2 = video_receiver_result2.MoveValue();
RTCError error = audio_sender1->SetTrack(
CreateLocalAudioTrack("audio", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
error = video_sender1->SetTrack(
CreateLocalVideoTrackAndFakeSource("video", ortc_factory1_.get()));
EXPECT_TRUE(error.ok());
error = audio_sender2->SetTrack(
CreateLocalAudioTrack("audio", ortc_factory2_.get()));
EXPECT_TRUE(error.ok());
error = video_sender2->SetTrack(
CreateLocalVideoTrackAndFakeSource("video", ortc_factory2_.get()));
EXPECT_TRUE(error.ok());
// Use different codecs in different directions for extra challenge.
RtpParameters opus_send_parameters = MakeFullOpusParameters();
RtpParameters isac_send_parameters = MakeFullIsacParameters();
RtpParameters vp8_send_parameters = MakeFullVp8Parameters();
RtpParameters vp9_send_parameters = MakeFullVp9Parameters();
// Remove "payload_type" from receive parameters. Receiver will need to
// discern the payload type from packets received.
RtpParameters opus_receive_parameters = opus_send_parameters;
RtpParameters isac_receive_parameters = isac_send_parameters;
RtpParameters vp8_receive_parameters = vp8_send_parameters;
RtpParameters vp9_receive_parameters = vp9_send_parameters;
opus_receive_parameters.encodings[0].codec_payload_type.reset();
isac_receive_parameters.encodings[0].codec_payload_type.reset();
vp8_receive_parameters.encodings[0].codec_payload_type.reset();
vp9_receive_parameters.encodings[0].codec_payload_type.reset();
// Configure the senders' and receivers' parameters.
//
// Note: Intentionally, the top codec in the receive parameters does not
// match the codec sent by the other side. If "Receive" is called with a list
// of codecs, the receiver should be prepared to receive any of them, not
// just the one on top.
EXPECT_TRUE(audio_receiver1->Receive(opus_receive_parameters).ok());
EXPECT_TRUE(video_receiver1->Receive(vp8_receive_parameters).ok());
EXPECT_TRUE(audio_receiver2->Receive(isac_receive_parameters).ok());
EXPECT_TRUE(video_receiver2->Receive(vp9_receive_parameters).ok());
EXPECT_TRUE(audio_sender1->Send(opus_send_parameters).ok());
EXPECT_TRUE(video_sender1->Send(vp8_send_parameters).ok());
EXPECT_TRUE(audio_sender2->Send(isac_send_parameters).ok());
EXPECT_TRUE(video_sender2->Send(vp9_send_parameters).ok());
FakeVideoTrackRenderer fake_video_renderer1(
static_cast<VideoTrackInterface*>(video_receiver1->GetTrack().get()));
FakeVideoTrackRenderer fake_video_renderer2(
static_cast<VideoTrackInterface*>(video_receiver2->GetTrack().get()));
// Senders and receivers are connected and configured; audio and video frames
// should be able to flow at this point.
EXPECT_TRUE_WAIT(
fake_audio_capture_module1_->frames_received() > kDefaultNumFrames &&
fake_video_renderer1.num_rendered_frames() > kDefaultNumFrames &&
fake_audio_capture_module2_->frames_received() > kDefaultNumFrames &&
fake_video_renderer2.num_rendered_frames() > kDefaultNumFrames,
kDefaultTimeout);
}
// TODO(deadbeef): End-to-end test for multiple senders/receivers of the same
// media type, once that's supported. Currently, it is not because the
// BaseChannel model relies on there being a single VoiceChannel and
// VideoChannel, and these only support a single set of codecs/etc. per
// send/receive direction.
// TODO(deadbeef): End-to-end test for simulcast, once that's supported by this
// API.
#endif // if !defined(THREAD_SANITIZER)
} // namespace webrtc

View File

@ -1,250 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "media/base/fakemediaengine.h"
#include "ortc/ortcfactory.h"
#include "ortc/testrtpparameters.h"
#include "p2p/base/fakepackettransport.h"
#include "rtc_base/fakenetwork.h"
#include "rtc_base/gunit.h"
#include "rtc_base/virtualsocketserver.h"
namespace webrtc {
// This test uses a virtual network and fake media engine, in order to test the
// OrtcFactory at only an API level. Any end-to-end test should go in
// ortcfactory_integrationtest.cc instead.
class OrtcFactoryTest : public testing::Test {
public:
OrtcFactoryTest()
: thread_(&virtual_socket_server_),
fake_packet_transport_("fake transport") {
ortc_factory_ =
OrtcFactory::Create(&thread_, nullptr, &fake_network_manager_, nullptr,
nullptr,
std::unique_ptr<cricket::MediaEngineInterface>(
new cricket::FakeMediaEngine()),
CreateBuiltinAudioEncoderFactory(),
CreateBuiltinAudioDecoderFactory())
.MoveValue();
}
protected:
// Uses a single pre-made FakePacketTransport, so shouldn't be called twice in
// the same test.
std::unique_ptr<RtpTransportInterface>
CreateRtpTransportWithFakePacketTransport() {
return ortc_factory_
->CreateRtpTransport(MakeRtcpMuxParameters(), &fake_packet_transport_,
nullptr, nullptr)
.MoveValue();
}
rtc::VirtualSocketServer virtual_socket_server_;
rtc::AutoSocketServerThread thread_;
rtc::FakeNetworkManager fake_network_manager_;
rtc::FakePacketTransport fake_packet_transport_;
std::unique_ptr<OrtcFactoryInterface> ortc_factory_;
};
TEST_F(OrtcFactoryTest, CanCreateMultipleRtpTransportControllers) {
auto controller_result1 = ortc_factory_->CreateRtpTransportController();
EXPECT_TRUE(controller_result1.ok());
auto controller_result2 = ortc_factory_->CreateRtpTransportController();
EXPECT_TRUE(controller_result1.ok());
}
// Simple test for the successful cases of CreateRtpTransport.
TEST_F(OrtcFactoryTest, CreateRtpTransportWithAndWithoutMux) {
rtc::FakePacketTransport rtp("rtp");
rtc::FakePacketTransport rtcp("rtcp");
// With muxed RTCP.
RtpTransportParameters parameters = MakeRtcpMuxParameters();
auto result =
ortc_factory_->CreateRtpTransport(parameters, &rtp, nullptr, nullptr);
EXPECT_TRUE(result.ok());
result.MoveValue().reset();
// With non-muxed RTCP.
parameters.rtcp.mux = false;
result = ortc_factory_->CreateRtpTransport(parameters, &rtp, &rtcp, nullptr);
EXPECT_TRUE(result.ok());
}
// Simple test for the successful cases of CreateSrtpTransport.
TEST_F(OrtcFactoryTest, CreateSrtpTransport) {
rtc::FakePacketTransport rtp("rtp");
rtc::FakePacketTransport rtcp("rtcp");
// With muxed RTCP.
RtpTransportParameters parameters = MakeRtcpMuxParameters();
auto result =
ortc_factory_->CreateSrtpTransport(parameters, &rtp, nullptr, nullptr);
EXPECT_TRUE(result.ok());
result.MoveValue().reset();
// With non-muxed RTCP.
parameters.rtcp.mux = false;
result = ortc_factory_->CreateSrtpTransport(parameters, &rtp, &rtcp, nullptr);
EXPECT_TRUE(result.ok());
}
// If no CNAME is provided, one should be generated and returned by
// GetRtpParameters.
TEST_F(OrtcFactoryTest, CreateRtpTransportGeneratesCname) {
rtc::FakePacketTransport rtp("rtp");
auto result = ortc_factory_->CreateRtpTransport(MakeRtcpMuxParameters(), &rtp,
nullptr, nullptr);
ASSERT_TRUE(result.ok());
EXPECT_FALSE(result.value()->GetParameters().rtcp.cname.empty());
}
// Extension of the above test; multiple transports created by the same factory
// should use the same generated CNAME.
TEST_F(OrtcFactoryTest, MultipleRtpTransportsUseSameGeneratedCname) {
rtc::FakePacketTransport packet_transport1("1");
rtc::FakePacketTransport packet_transport2("2");
RtpTransportParameters parameters = MakeRtcpMuxParameters();
// Sanity check.
ASSERT_TRUE(parameters.rtcp.cname.empty());
auto result = ortc_factory_->CreateRtpTransport(
parameters, &packet_transport1, nullptr, nullptr);
ASSERT_TRUE(result.ok());
auto rtp_transport1 = result.MoveValue();
result = ortc_factory_->CreateRtpTransport(parameters, &packet_transport2,
nullptr, nullptr);
ASSERT_TRUE(result.ok());
auto rtp_transport2 = result.MoveValue();
RtcpParameters params1 = rtp_transport1->GetParameters().rtcp;
RtcpParameters params2 = rtp_transport2->GetParameters().rtcp;
EXPECT_FALSE(params1.cname.empty());
EXPECT_EQ(params1.cname, params2.cname);
}
TEST_F(OrtcFactoryTest, CreateRtpTransportWithNoPacketTransport) {
auto result = ortc_factory_->CreateRtpTransport(MakeRtcpMuxParameters(),
nullptr, nullptr, nullptr);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
}
// If the |mux| member of the RtcpParameters is false, both an RTP and RTCP
// packet transport are needed.
TEST_F(OrtcFactoryTest, CreateRtpTransportWithMissingRtcpTransport) {
rtc::FakePacketTransport rtp("rtp");
RtpTransportParameters parameters;
parameters.rtcp.mux = false;
auto result =
ortc_factory_->CreateRtpTransport(parameters, &rtp, nullptr, nullptr);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
}
// If the |mux| member of the RtcpParameters is true, only an RTP packet
// transport is necessary. So, passing in an RTCP transport is most likely
// an accident, and thus should be treated as an error.
TEST_F(OrtcFactoryTest, CreateRtpTransportWithExtraneousRtcpTransport) {
rtc::FakePacketTransport rtp("rtp");
rtc::FakePacketTransport rtcp("rtcp");
auto result = ortc_factory_->CreateRtpTransport(MakeRtcpMuxParameters(), &rtp,
&rtcp, nullptr);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
}
// Basic test that CreateUdpTransport works with AF_INET and AF_INET6.
TEST_F(OrtcFactoryTest, CreateUdpTransport) {
auto result = ortc_factory_->CreateUdpTransport(AF_INET);
EXPECT_TRUE(result.ok());
result = ortc_factory_->CreateUdpTransport(AF_INET6);
EXPECT_TRUE(result.ok());
}
// Test CreateUdpPort with the |min_port| and |max_port| arguments.
TEST_F(OrtcFactoryTest, CreateUdpTransportWithPortRange) {
auto socket_result1 = ortc_factory_->CreateUdpTransport(AF_INET, 2000, 2002);
ASSERT_TRUE(socket_result1.ok());
EXPECT_EQ(2000, socket_result1.value()->GetLocalAddress().port());
auto socket_result2 = ortc_factory_->CreateUdpTransport(AF_INET, 2000, 2002);
ASSERT_TRUE(socket_result2.ok());
EXPECT_EQ(2001, socket_result2.value()->GetLocalAddress().port());
auto socket_result3 = ortc_factory_->CreateUdpTransport(AF_INET, 2000, 2002);
ASSERT_TRUE(socket_result3.ok());
EXPECT_EQ(2002, socket_result3.value()->GetLocalAddress().port());
// All sockets in the range have been exhausted, so the next call should
// fail.
auto failed_result = ortc_factory_->CreateUdpTransport(AF_INET, 2000, 2002);
EXPECT_EQ(RTCErrorType::RESOURCE_EXHAUSTED, failed_result.error().type());
// If one socket is destroyed, that port should be freed up again.
socket_result2.MoveValue().reset();
auto socket_result4 = ortc_factory_->CreateUdpTransport(AF_INET, 2000, 2002);
ASSERT_TRUE(socket_result4.ok());
EXPECT_EQ(2001, socket_result4.value()->GetLocalAddress().port());
}
// Basic test that CreateUdpTransport works with AF_INET and AF_INET6.
TEST_F(OrtcFactoryTest, CreateUdpTransportWithInvalidAddressFamily) {
auto result = ortc_factory_->CreateUdpTransport(12345);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
}
TEST_F(OrtcFactoryTest, CreateUdpTransportWithInvalidPortRange) {
auto result = ortc_factory_->CreateUdpTransport(AF_INET, 3000, 2000);
EXPECT_EQ(RTCErrorType::INVALID_RANGE, result.error().type());
}
// Just sanity check that each "GetCapabilities" method returns some codecs.
TEST_F(OrtcFactoryTest, GetSenderAndReceiverCapabilities) {
RtpCapabilities audio_send_caps =
ortc_factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_AUDIO);
EXPECT_GT(audio_send_caps.codecs.size(), 0u);
RtpCapabilities video_send_caps =
ortc_factory_->GetRtpSenderCapabilities(cricket::MEDIA_TYPE_VIDEO);
EXPECT_GT(video_send_caps.codecs.size(), 0u);
RtpCapabilities audio_receive_caps =
ortc_factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_AUDIO);
EXPECT_GT(audio_receive_caps.codecs.size(), 0u);
RtpCapabilities video_receive_caps =
ortc_factory_->GetRtpReceiverCapabilities(cricket::MEDIA_TYPE_VIDEO);
EXPECT_GT(video_receive_caps.codecs.size(), 0u);
}
// Calling CreateRtpSender with a null track should fail, since that makes it
// impossible to know whether to create an audio or video sender. The
// application should be using the method that takes a cricket::MediaType
// instead.
TEST_F(OrtcFactoryTest, CreateSenderWithNullTrack) {
auto rtp_transport = CreateRtpTransportWithFakePacketTransport();
auto result = ortc_factory_->CreateRtpSender(nullptr, rtp_transport.get());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, result.error().type());
}
// Calling CreateRtpSender or CreateRtpReceiver with MEDIA_TYPE_DATA should
// fail.
TEST_F(OrtcFactoryTest, CreateSenderOrReceieverWithInvalidKind) {
auto rtp_transport = CreateRtpTransportWithFakePacketTransport();
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_DATA,
rtp_transport.get());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, sender_result.error().type());
auto receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_DATA, rtp_transport.get());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, receiver_result.error().type());
}
TEST_F(OrtcFactoryTest, CreateSendersOrReceieversWithNullTransport) {
auto sender_result =
ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO, nullptr);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, sender_result.error().type());
auto receiver_result =
ortc_factory_->CreateRtpReceiver(cricket::MEDIA_TYPE_AUDIO, nullptr);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, receiver_result.error().type());
}
} // namespace webrtc

View File

@ -1,550 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "media/base/fakemediaengine.h"
#include "ortc/ortcfactory.h"
#include "ortc/testrtpparameters.h"
#include "p2p/base/fakepackettransport.h"
#include "pc/test/fakevideotracksource.h"
#include "rtc_base/gunit.h"
namespace webrtc {
// This test uses an individual RtpReceiver using only the public interface,
// and verifies that it behaves as designed at an API level. Also tests that
// parameters are applied to the audio/video engines as expected. Network and
// media interfaces are faked to isolate what's being tested.
//
// This test shouldn't result any any actual media being sent. That sort of
// test should go in ortcfactory_integrationtest.cc.
class OrtcRtpReceiverTest : public testing::Test {
public:
OrtcRtpReceiverTest() : fake_packet_transport_("fake") {
fake_media_engine_ = new cricket::FakeMediaEngine();
// Note: This doesn't need to use fake network classes, since we already
// use FakePacketTransport.
auto ortc_factory_result = OrtcFactory::Create(
nullptr, nullptr, nullptr, nullptr, nullptr,
std::unique_ptr<cricket::MediaEngineInterface>(fake_media_engine_),
CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory());
ortc_factory_ = ortc_factory_result.MoveValue();
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
auto rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport_, nullptr, nullptr);
rtp_transport_ = rtp_transport_result.MoveValue();
}
protected:
// Owned by |ortc_factory_|.
cricket::FakeMediaEngine* fake_media_engine_;
rtc::FakePacketTransport fake_packet_transport_;
std::unique_ptr<OrtcFactoryInterface> ortc_factory_;
std::unique_ptr<RtpTransportInterface> rtp_transport_;
};
// See ortcrtpreceiverinterface.h for the current expectations of what GetTrack
// will return after calls to Receive.
// TODO(deadbeef): Replace this test when the non-standard behavior is fixed
// and GetTrack starts returning the same track for the lifetime of the
// receiver.
TEST_F(OrtcRtpReceiverTest, GetTrack) {
auto receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
ASSERT_TRUE(receiver_result.ok());
auto receiver = receiver_result.MoveValue();
// Track initially expected to be null.
EXPECT_EQ(nullptr, receiver_result.value().get());
EXPECT_TRUE(receiver->Receive(MakeMinimalVp8ParametersWithNoSsrc()).ok());
auto initial_track = receiver->GetTrack();
EXPECT_NE(nullptr, initial_track);
// Codec changing but SSRC (or lack thereof) isn't; shouldn't create new track
EXPECT_TRUE(receiver->Receive(MakeMinimalVp9ParametersWithNoSsrc()).ok());
EXPECT_EQ(initial_track, receiver->GetTrack());
// Explicitly set SSRC and expect a different track.
EXPECT_TRUE(
receiver->Receive(MakeMinimalVp9ParametersWithSsrc(0xdeadbeef)).ok());
auto next_track = receiver->GetTrack();
EXPECT_NE(next_track, initial_track);
// Deactivating the encoding shouldn't change the track.
RtpParameters inactive_encoding =
MakeMinimalVp9ParametersWithSsrc(0xdeadbeef);
inactive_encoding.encodings[0].active = false;
EXPECT_TRUE(receiver->Receive(inactive_encoding).ok());
EXPECT_EQ(next_track, receiver->GetTrack());
// Removing all encodings *is* expected to clear the track.
RtpParameters no_encodings = MakeMinimalVp9ParametersWithSsrc(0xdeadbeef);
no_encodings.encodings.clear();
EXPECT_TRUE(receiver->Receive(no_encodings).ok());
EXPECT_EQ(nullptr, receiver->GetTrack());
}
// Currently SetTransport isn't supported. When it is, replace this test with a
// test/tests for it.
TEST_F(OrtcRtpReceiverTest, SetTransportFails) {
rtc::FakePacketTransport fake_packet_transport("another_transport");
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
auto rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport, nullptr, nullptr);
auto rtp_transport = rtp_transport_result.MoveValue();
auto receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto receiver = receiver_result.MoveValue();
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
receiver->SetTransport(rtp_transport.get()).type());
}
TEST_F(OrtcRtpReceiverTest, GetTransport) {
auto result = ortc_factory_->CreateRtpReceiver(cricket::MEDIA_TYPE_AUDIO,
rtp_transport_.get());
EXPECT_EQ(rtp_transport_.get(), result.value()->GetTransport());
}
// Test that "Receive" causes the expected parameters to be applied to the media
// engine level, for an audio receiver.
TEST_F(OrtcRtpReceiverTest, ReceiveAppliesAudioParametersToMediaEngine) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
// First, create parameters with all the bells and whistles.
RtpParameters parameters;
RtpCodecParameters opus_codec;
opus_codec.name = "opus";
opus_codec.kind = cricket::MEDIA_TYPE_AUDIO;
opus_codec.payload_type = 120;
opus_codec.clock_rate.emplace(48000);
opus_codec.num_channels.emplace(2);
opus_codec.parameters["minptime"] = "10";
opus_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC);
parameters.codecs.push_back(std::move(opus_codec));
// Add two codecs, expecting the first to be used.
// TODO(deadbeef): Once "codec_payload_type" is supported, use it to select a
// codec that's not at the top of the list.
RtpCodecParameters isac_codec;
isac_codec.name = "ISAC";
isac_codec.kind = cricket::MEDIA_TYPE_AUDIO;
isac_codec.payload_type = 110;
isac_codec.clock_rate.emplace(16000);
parameters.codecs.push_back(std::move(isac_codec));
RtpEncodingParameters encoding;
encoding.ssrc.emplace(0xdeadbeef);
parameters.encodings.push_back(std::move(encoding));
parameters.header_extensions.emplace_back(
"urn:ietf:params:rtp-hdrext:ssrc-audio-level", 3);
EXPECT_TRUE(audio_receiver->Receive(parameters).ok());
// Now verify that the parameters were applied to the fake media engine layer
// that exists below BaseChannel.
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->playout());
// Verify codec parameters.
ASSERT_GT(fake_voice_channel->recv_codecs().size(), 0u);
const cricket::AudioCodec& top_codec = fake_voice_channel->recv_codecs()[0];
EXPECT_EQ("opus", top_codec.name);
EXPECT_EQ(120, top_codec.id);
EXPECT_EQ(48000, top_codec.clockrate);
EXPECT_EQ(2u, top_codec.channels);
ASSERT_NE(top_codec.params.end(), top_codec.params.find("minptime"));
EXPECT_EQ("10", top_codec.params.at("minptime"));
// Verify encoding parameters.
ASSERT_EQ(1u, fake_voice_channel->recv_streams().size());
const cricket::StreamParams& recv_stream =
fake_voice_channel->recv_streams()[0];
EXPECT_EQ(1u, recv_stream.ssrcs.size());
EXPECT_EQ(0xdeadbeef, recv_stream.first_ssrc());
// Verify header extensions.
ASSERT_EQ(1u, fake_voice_channel->recv_extensions().size());
const RtpExtension& extension = fake_voice_channel->recv_extensions()[0];
EXPECT_EQ("urn:ietf:params:rtp-hdrext:ssrc-audio-level", extension.uri);
EXPECT_EQ(3, extension.id);
}
// Test that "Receive" causes the expected parameters to be applied to the media
// engine level, for a video receiver.
TEST_F(OrtcRtpReceiverTest, ReceiveAppliesVideoParametersToMediaEngine) {
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_receiver = video_receiver_result.MoveValue();
// First, create parameters with all the bells and whistles.
RtpParameters parameters;
RtpCodecParameters vp8_codec;
vp8_codec.name = "VP8";
vp8_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp8_codec.payload_type = 99;
// Try a couple types of feedback params. "Generic NACK" is a bit of a
// special case, so test it here.
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::CCM,
RtcpFeedbackMessageType::FIR);
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK,
RtcpFeedbackMessageType::GENERIC_NACK);
parameters.codecs.push_back(std::move(vp8_codec));
RtpCodecParameters vp8_rtx_codec;
vp8_rtx_codec.name = "rtx";
vp8_rtx_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp8_rtx_codec.payload_type = 100;
vp8_rtx_codec.parameters["apt"] = "99";
parameters.codecs.push_back(std::move(vp8_rtx_codec));
// Add two codecs, expecting the first to be used.
// TODO(deadbeef): Once "codec_payload_type" is supported, use it to select a
// codec that's not at the top of the list.
RtpCodecParameters vp9_codec;
vp9_codec.name = "VP9";
vp9_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp9_codec.payload_type = 102;
parameters.codecs.push_back(std::move(vp9_codec));
RtpCodecParameters vp9_rtx_codec;
vp9_rtx_codec.name = "rtx";
vp9_rtx_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp9_rtx_codec.payload_type = 103;
vp9_rtx_codec.parameters["apt"] = "102";
parameters.codecs.push_back(std::move(vp9_rtx_codec));
RtpEncodingParameters encoding;
encoding.ssrc.emplace(0xdeadbeef);
encoding.rtx.emplace(0xbaadfeed);
parameters.encodings.push_back(std::move(encoding));
parameters.header_extensions.emplace_back("urn:3gpp:video-orientation", 4);
parameters.header_extensions.emplace_back(
"http://www.webrtc.org/experiments/rtp-hdrext/playout-delay", 6);
EXPECT_TRUE(video_receiver->Receive(parameters).ok());
// Now verify that the parameters were applied to the fake media engine layer
// that exists below BaseChannel.
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
// Verify codec parameters.
ASSERT_GE(fake_video_channel->recv_codecs().size(), 2u);
const cricket::VideoCodec& top_codec = fake_video_channel->recv_codecs()[0];
EXPECT_EQ("VP8", top_codec.name);
EXPECT_EQ(99, top_codec.id);
EXPECT_TRUE(top_codec.feedback_params.Has({"ccm", "fir"}));
EXPECT_TRUE(top_codec.feedback_params.Has(cricket::FeedbackParam("nack")));
const cricket::VideoCodec& rtx_codec = fake_video_channel->recv_codecs()[1];
EXPECT_EQ("rtx", rtx_codec.name);
EXPECT_EQ(100, rtx_codec.id);
ASSERT_NE(rtx_codec.params.end(), rtx_codec.params.find("apt"));
EXPECT_EQ("99", rtx_codec.params.at("apt"));
// Verify encoding parameters.
ASSERT_EQ(1u, fake_video_channel->recv_streams().size());
const cricket::StreamParams& recv_stream =
fake_video_channel->recv_streams()[0];
EXPECT_EQ(2u, recv_stream.ssrcs.size());
EXPECT_EQ(0xdeadbeef, recv_stream.first_ssrc());
uint32_t rtx_ssrc = 0u;
EXPECT_TRUE(recv_stream.GetFidSsrc(recv_stream.first_ssrc(), &rtx_ssrc));
EXPECT_EQ(0xbaadfeed, rtx_ssrc);
// Verify header extensions.
ASSERT_EQ(2u, fake_video_channel->recv_extensions().size());
const RtpExtension& extension1 = fake_video_channel->recv_extensions()[0];
EXPECT_EQ("urn:3gpp:video-orientation", extension1.uri);
EXPECT_EQ(4, extension1.id);
const RtpExtension& extension2 = fake_video_channel->recv_extensions()[1];
EXPECT_EQ("http://www.webrtc.org/experiments/rtp-hdrext/playout-delay",
extension2.uri);
EXPECT_EQ(6, extension2.id);
}
// Test changing both the receive codec and SSRC at the same time, and verify
// that the new parameters are applied to the media engine level.
TEST_F(OrtcRtpReceiverTest, CallingReceiveTwiceChangesParameters) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
RTCError error =
audio_receiver->Receive(MakeMinimalOpusParametersWithSsrc(0x11111111));
EXPECT_TRUE(error.ok());
error =
audio_receiver->Receive(MakeMinimalIsacParametersWithSsrc(0x22222222));
EXPECT_TRUE(error.ok());
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
ASSERT_GT(fake_voice_channel->recv_codecs().size(), 0u);
EXPECT_EQ("ISAC", fake_voice_channel->recv_codecs()[0].name);
ASSERT_EQ(1u, fake_voice_channel->recv_streams().size());
EXPECT_EQ(0x22222222u, fake_voice_channel->recv_streams()[0].first_ssrc());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_receiver = video_receiver_result.MoveValue();
error = video_receiver->Receive(MakeMinimalVp8ParametersWithSsrc(0x33333333));
EXPECT_TRUE(error.ok());
error = video_receiver->Receive(MakeMinimalVp9ParametersWithSsrc(0x44444444));
EXPECT_TRUE(error.ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
ASSERT_GT(fake_video_channel->recv_codecs().size(), 0u);
EXPECT_EQ("VP9", fake_video_channel->recv_codecs()[0].name);
ASSERT_EQ(1u, fake_video_channel->recv_streams().size());
EXPECT_EQ(0x44444444u, fake_video_channel->recv_streams()[0].first_ssrc());
}
// Ensure that if the |active| flag of RtpEncodingParameters is set to false,
// playout stops at the media engine level. Note that this is only applicable
// to audio (at least currently).
TEST_F(OrtcRtpReceiverTest, DeactivatingEncodingStopsPlayout) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
RtpParameters parameters = MakeMinimalOpusParameters();
EXPECT_TRUE(audio_receiver->Receive(parameters).ok());
// Expect "playout" flag to initially be true.
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->playout());
// Deactivate encoding and expect it to change to false.
parameters.encodings[0].active = false;
EXPECT_TRUE(audio_receiver->Receive(parameters).ok());
EXPECT_FALSE(fake_voice_channel->playout());
}
// Ensure that calling Receive with an empty list of encodings causes receive
// streams at the media engine level to be cleared.
TEST_F(OrtcRtpReceiverTest,
CallingReceiveWithEmptyEncodingsClearsReceiveStreams) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
RtpParameters parameters = MakeMinimalOpusParameters();
EXPECT_TRUE(audio_receiver->Receive(parameters).ok());
parameters.encodings.clear();
EXPECT_TRUE(audio_receiver->Receive(parameters).ok());
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->recv_streams().empty());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_receiver = video_receiver_result.MoveValue();
parameters = MakeMinimalVp8Parameters();
EXPECT_TRUE(video_receiver->Receive(parameters).ok());
parameters.encodings.clear();
EXPECT_TRUE(video_receiver->Receive(parameters).ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->recv_streams().empty());
}
// These errors should be covered by rtpparametersconversion_unittest.cc, but
// we should at least test that those errors are propogated from calls to
// Receive, with a few examples.
TEST_F(OrtcRtpReceiverTest, ReceiveReturnsErrorOnInvalidParameters) {
auto result = ortc_factory_->CreateRtpReceiver(cricket::MEDIA_TYPE_AUDIO,
rtp_transport_.get());
auto receiver = result.MoveValue();
// CCM feedback missing message type.
RtpParameters invalid_feedback = MakeMinimalOpusParameters();
invalid_feedback.codecs[0].rtcp_feedback.emplace_back(RtcpFeedbackType::CCM);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
receiver->Receive(invalid_feedback).type());
// Payload type greater than 127.
RtpParameters invalid_pt = MakeMinimalOpusParameters();
invalid_pt.codecs[0].payload_type = 128;
EXPECT_EQ(RTCErrorType::INVALID_RANGE, receiver->Receive(invalid_pt).type());
// Duplicate header extension IDs.
RtpParameters duplicate_ids = MakeMinimalOpusParameters();
duplicate_ids.header_extensions.emplace_back("foo", 5);
duplicate_ids.header_extensions.emplace_back("bar", 5);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
receiver->Receive(duplicate_ids).type());
}
// Two receivers using the same transport shouldn't be able to use the same
// payload type to refer to different codecs, same header extension IDs to
// refer to different extensions, or same SSRC.
TEST_F(OrtcRtpReceiverTest, ReceiveReturnsErrorOnIdConflicts) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
auto video_receiver = video_receiver_result.MoveValue();
// First test payload type conflict.
RtpParameters audio_parameters = MakeMinimalOpusParameters();
RtpParameters video_parameters = MakeMinimalVp8Parameters();
audio_parameters.codecs[0].payload_type = 100;
video_parameters.codecs[0].payload_type = 100;
EXPECT_TRUE(audio_receiver->Receive(audio_parameters).ok());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
video_receiver->Receive(video_parameters).type());
// Test header extension ID conflict.
video_parameters.codecs[0].payload_type = 110;
audio_parameters.header_extensions.emplace_back("foo", 4);
video_parameters.header_extensions.emplace_back("bar", 4);
EXPECT_TRUE(audio_receiver->Receive(audio_parameters).ok());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
video_receiver->Receive(video_parameters).type());
// Test SSRC conflict. Have an RTX SSRC that conflicts with a primary SSRC
// for extra challenge.
video_parameters.header_extensions[0].uri = "foo";
audio_parameters.encodings[0].ssrc.emplace(0xabbaabba);
audio_parameters.encodings[0].rtx.emplace(0xdeadbeef);
video_parameters.encodings[0].ssrc.emplace(0xdeadbeef);
EXPECT_TRUE(audio_receiver->Receive(audio_parameters).ok());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
video_receiver->Receive(video_parameters).type());
// Sanity check that parameters can be set if the conflicts are all resolved.
video_parameters.encodings[0].ssrc.emplace(0xbaadf00d);
EXPECT_TRUE(video_receiver->Receive(video_parameters).ok());
}
// Ensure that deleting a receiver causes receive streams at the media engine
// level to be cleared.
TEST_F(OrtcRtpReceiverTest, DeletingReceiverClearsReceiveStreams) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
EXPECT_TRUE(audio_receiver->Receive(MakeMinimalOpusParameters()).ok());
// Also create an audio sender, to prevent the voice channel from being
// completely deleted.
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_TRUE(audio_sender->Send(MakeMinimalOpusParameters()).ok());
audio_receiver.reset(nullptr);
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->recv_streams().empty());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_receiver = video_receiver_result.MoveValue();
EXPECT_TRUE(video_receiver->Receive(MakeMinimalVp8Parameters()).ok());
// Also create an video sender, to prevent the video channel from being
// completely deleted.
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
EXPECT_TRUE(video_sender->Send(MakeMinimalVp8Parameters()).ok());
video_receiver.reset(nullptr);
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->recv_streams().empty());
}
// If Receive hasn't been called, GetParameters should return empty parameters.
TEST_F(OrtcRtpReceiverTest, GetDefaultParameters) {
auto result = ortc_factory_->CreateRtpReceiver(cricket::MEDIA_TYPE_AUDIO,
rtp_transport_.get());
EXPECT_EQ(RtpParameters(), result.value()->GetParameters());
result = ortc_factory_->CreateRtpReceiver(cricket::MEDIA_TYPE_VIDEO,
rtp_transport_.get());
EXPECT_EQ(RtpParameters(), result.value()->GetParameters());
}
// Test that GetParameters returns the last parameters passed into Receive,
// along with the implementation-default values filled in where they were left
// unset.
TEST_F(OrtcRtpReceiverTest,
GetParametersReturnsLastSetParametersWithDefaultsFilled) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
RtpParameters opus_parameters = MakeMinimalOpusParameters();
EXPECT_TRUE(audio_receiver->Receive(opus_parameters).ok());
EXPECT_EQ(opus_parameters, audio_receiver->GetParameters());
RtpParameters isac_parameters = MakeMinimalIsacParameters();
// Sanity check that num_channels actually is left unset.
ASSERT_FALSE(isac_parameters.codecs[0].num_channels);
EXPECT_TRUE(audio_receiver->Receive(isac_parameters).ok());
// Should be filled with a default "num channels" of 1.
// TODO(deadbeef): This should actually default to 2 for some codecs. Update
// this test once that's implemented.
isac_parameters.codecs[0].num_channels.emplace(1);
EXPECT_EQ(isac_parameters, audio_receiver->GetParameters());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_receiver = video_receiver_result.MoveValue();
RtpParameters vp8_parameters = MakeMinimalVp8Parameters();
// Sanity check that clock_rate actually is left unset.
EXPECT_TRUE(video_receiver->Receive(vp8_parameters).ok());
// Should be filled with a default clock rate of 90000.
vp8_parameters.codecs[0].clock_rate.emplace(90000);
EXPECT_EQ(vp8_parameters, video_receiver->GetParameters());
RtpParameters vp9_parameters = MakeMinimalVp9Parameters();
// Sanity check that clock_rate actually is left unset.
EXPECT_TRUE(video_receiver->Receive(vp9_parameters).ok());
// Should be filled with a default clock rate of 90000.
vp9_parameters.codecs[0].clock_rate.emplace(90000);
EXPECT_EQ(vp9_parameters, video_receiver->GetParameters());
}
TEST_F(OrtcRtpReceiverTest, GetKind) {
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
auto video_receiver = video_receiver_result.MoveValue();
EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, audio_receiver->GetKind());
EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, video_receiver->GetKind());
}
} // namespace webrtc

View File

@ -1,181 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "ortc/ortcrtpreceiveradapter.h"
#include <string>
#include <utility>
#include <vector>
#include "media/base/mediaconstants.h"
#include "ortc/rtptransportadapter.h"
#include "rtc_base/checks.h"
#include "rtc_base/helpers.h" // For "CreateRandomX".
namespace {
void FillAudioReceiverParameters(webrtc::RtpParameters* parameters) {
for (webrtc::RtpCodecParameters& codec : parameters->codecs) {
if (!codec.num_channels) {
codec.num_channels = 1;
}
}
}
void FillVideoReceiverParameters(webrtc::RtpParameters* parameters) {
for (webrtc::RtpCodecParameters& codec : parameters->codecs) {
if (!codec.clock_rate) {
codec.clock_rate = cricket::kVideoCodecClockrate;
}
}
}
} // namespace
namespace webrtc {
BEGIN_OWNED_PROXY_MAP(OrtcRtpReceiver)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, GetTrack)
PROXY_METHOD1(RTCError, SetTransport, RtpTransportInterface*)
PROXY_CONSTMETHOD0(RtpTransportInterface*, GetTransport)
PROXY_METHOD1(RTCError, Receive, const RtpParameters&)
PROXY_CONSTMETHOD0(RtpParameters, GetParameters)
PROXY_CONSTMETHOD0(cricket::MediaType, GetKind)
END_PROXY_MAP()
// static
std::unique_ptr<OrtcRtpReceiverInterface> OrtcRtpReceiverAdapter::CreateProxy(
std::unique_ptr<OrtcRtpReceiverAdapter> wrapped_receiver) {
RTC_DCHECK(wrapped_receiver);
rtc::Thread* signaling =
wrapped_receiver->rtp_transport_controller_->signaling_thread();
rtc::Thread* worker =
wrapped_receiver->rtp_transport_controller_->worker_thread();
return OrtcRtpReceiverProxy::Create(signaling, worker,
std::move(wrapped_receiver));
}
OrtcRtpReceiverAdapter::~OrtcRtpReceiverAdapter() {
internal_receiver_ = nullptr;
SignalDestroyed();
}
rtc::scoped_refptr<MediaStreamTrackInterface> OrtcRtpReceiverAdapter::GetTrack()
const {
return internal_receiver_ ? internal_receiver_->track() : nullptr;
}
RTCError OrtcRtpReceiverAdapter::SetTransport(
RtpTransportInterface* transport) {
LOG_AND_RETURN_ERROR(
RTCErrorType::UNSUPPORTED_OPERATION,
"Changing the transport of an RtpReceiver is not yet supported.");
}
RtpTransportInterface* OrtcRtpReceiverAdapter::GetTransport() const {
return transport_;
}
RTCError OrtcRtpReceiverAdapter::Receive(const RtpParameters& parameters) {
RtpParameters filled_parameters = parameters;
RTCError err;
switch (kind_) {
case cricket::MEDIA_TYPE_AUDIO:
FillAudioReceiverParameters(&filled_parameters);
err = rtp_transport_controller_->ValidateAndApplyAudioReceiverParameters(
filled_parameters);
if (!err.ok()) {
return err;
}
break;
case cricket::MEDIA_TYPE_VIDEO:
FillVideoReceiverParameters(&filled_parameters);
err = rtp_transport_controller_->ValidateAndApplyVideoReceiverParameters(
filled_parameters);
if (!err.ok()) {
return err;
}
break;
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR);
}
last_applied_parameters_ = filled_parameters;
// Now that parameters were applied, can create (or recreate) the internal
// receiver.
//
// This is analogous to a PeerConnection creating a receiver after
// SetRemoteDescription is successful.
MaybeRecreateInternalReceiver();
return RTCError::OK();
}
RtpParameters OrtcRtpReceiverAdapter::GetParameters() const {
return last_applied_parameters_;
}
cricket::MediaType OrtcRtpReceiverAdapter::GetKind() const {
return kind_;
}
OrtcRtpReceiverAdapter::OrtcRtpReceiverAdapter(
cricket::MediaType kind,
RtpTransportInterface* transport,
RtpTransportControllerAdapter* rtp_transport_controller)
: kind_(kind),
transport_(transport),
rtp_transport_controller_(rtp_transport_controller) {}
void OrtcRtpReceiverAdapter::MaybeRecreateInternalReceiver() {
if (last_applied_parameters_.encodings.empty()) {
internal_receiver_ = nullptr;
return;
}
// An SSRC of 0 is valid; this is used to identify "the default SSRC" (which
// is the first one seen by the underlying media engine).
uint32_t ssrc = 0;
if (last_applied_parameters_.encodings[0].ssrc) {
ssrc = *last_applied_parameters_.encodings[0].ssrc;
}
if (internal_receiver_ && ssrc == internal_receiver_->ssrc()) {
// SSRC not changing; nothing to do.
return;
}
internal_receiver_ = nullptr;
switch (kind_) {
case cricket::MEDIA_TYPE_AUDIO: {
auto* audio_receiver = new AudioRtpReceiver(
rtp_transport_controller_->worker_thread(), rtc::CreateRandomUuid(),
std::vector<std::string>({}));
auto* voice_channel = rtp_transport_controller_->voice_channel();
RTC_DCHECK(voice_channel);
audio_receiver->SetVoiceMediaChannel(voice_channel->media_channel());
internal_receiver_ = audio_receiver;
break;
}
case cricket::MEDIA_TYPE_VIDEO: {
auto* video_receiver = new VideoRtpReceiver(
rtp_transport_controller_->worker_thread(), rtc::CreateRandomUuid(),
std::vector<std::string>({}));
auto* video_channel = rtp_transport_controller_->video_channel();
RTC_DCHECK(video_channel);
video_receiver->SetVideoMediaChannel(video_channel->media_channel());
internal_receiver_ = video_receiver;
break;
}
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
}
internal_receiver_->SetupMediaChannel(ssrc);
}
} // namespace webrtc

View File

@ -1,79 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ORTC_ORTCRTPRECEIVERADAPTER_H_
#define ORTC_ORTCRTPRECEIVERADAPTER_H_
#include <memory>
#include "api/ortc/ortcrtpreceiverinterface.h"
#include "api/rtcerror.h"
#include "api/rtpparameters.h"
#include "ortc/rtptransportcontrolleradapter.h"
#include "pc/rtpreceiver.h"
#include "rtc_base/constructormagic.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
namespace webrtc {
// Implementation of OrtcRtpReceiverInterface that works with
// RtpTransportAdapter, and wraps a VideoRtpReceiver/AudioRtpReceiver that's
// normally used with the PeerConnection.
//
// TODO(deadbeef): When BaseChannel is split apart into separate
// "RtpReceiver"/"RtpTransceiver"/"RtpReceiver"/"RtpReceiver" objects, this
// adapter object can be removed.
class OrtcRtpReceiverAdapter : public OrtcRtpReceiverInterface {
public:
// Wraps |wrapped_receiver| in a proxy that will safely call methods on the
// correct thread.
static std::unique_ptr<OrtcRtpReceiverInterface> CreateProxy(
std::unique_ptr<OrtcRtpReceiverAdapter> wrapped_receiver);
// Should only be called by RtpTransportControllerAdapter.
OrtcRtpReceiverAdapter(
cricket::MediaType kind,
RtpTransportInterface* transport,
RtpTransportControllerAdapter* rtp_transport_controller);
~OrtcRtpReceiverAdapter() override;
// OrtcRtpReceiverInterface implementation.
rtc::scoped_refptr<MediaStreamTrackInterface> GetTrack() const override;
RTCError SetTransport(RtpTransportInterface* transport) override;
RtpTransportInterface* GetTransport() const override;
RTCError Receive(const RtpParameters& parameters) override;
RtpParameters GetParameters() const override;
cricket::MediaType GetKind() const override;
// Used so that the RtpTransportControllerAdapter knows when it can
// deallocate resources allocated for this object.
sigslot::signal0<> SignalDestroyed;
private:
void MaybeRecreateInternalReceiver();
cricket::MediaType kind_;
RtpTransportInterface* transport_;
RtpTransportControllerAdapter* rtp_transport_controller_;
// Scoped refptr due to ref-counted interface, but we should be the only
// reference holder.
rtc::scoped_refptr<RtpReceiverInternal> internal_receiver_;
RtpParameters last_applied_parameters_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(OrtcRtpReceiverAdapter);
};
} // namespace webrtc
#endif // ORTC_ORTCRTPRECEIVERADAPTER_H_

View File

@ -1,670 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "media/base/fakemediaengine.h"
#include "ortc/ortcfactory.h"
#include "ortc/testrtpparameters.h"
#include "p2p/base/fakepackettransport.h"
#include "pc/test/fakevideotracksource.h"
#include "rtc_base/gunit.h"
namespace webrtc {
// This test uses an individual RtpSender using only the public interface, and
// verifies that its behaves as designed at an API level. Also tests that
// parameters are applied to the audio/video engines as expected. Network and
// media interfaces are faked to isolate what's being tested.
//
// This test shouldn't result any any actual media being sent. That sort of
// test should go in ortcfactory_integrationtest.cc.
class OrtcRtpSenderTest : public testing::Test {
public:
OrtcRtpSenderTest() : fake_packet_transport_("fake") {
// Need to set the fake packet transport to writable, in order to test that
// the "send" flag is applied to the media engine based on the encoding
// |active| flag.
fake_packet_transport_.SetWritable(true);
fake_media_engine_ = new cricket::FakeMediaEngine();
// Note: This doesn't need to use fake network classes, since we already
// use FakePacketTransport.
auto ortc_factory_result = OrtcFactory::Create(
nullptr, nullptr, nullptr, nullptr, nullptr,
std::unique_ptr<cricket::MediaEngineInterface>(fake_media_engine_),
CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory());
ortc_factory_ = ortc_factory_result.MoveValue();
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
auto rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport_, nullptr, nullptr);
rtp_transport_ = rtp_transport_result.MoveValue();
}
protected:
rtc::scoped_refptr<AudioTrackInterface> CreateAudioTrack(
const std::string& id) {
return ortc_factory_->CreateAudioTrack(id, nullptr);
}
rtc::scoped_refptr<VideoTrackInterface> CreateVideoTrack(
const std::string& id) {
return rtc::scoped_refptr<webrtc::VideoTrackInterface>(
ortc_factory_->CreateVideoTrack(id, FakeVideoTrackSource::Create()));
}
// Owned by |ortc_factory_|.
cricket::FakeMediaEngine* fake_media_engine_;
rtc::FakePacketTransport fake_packet_transport_;
std::unique_ptr<OrtcFactoryInterface> ortc_factory_;
std::unique_ptr<RtpTransportInterface> rtp_transport_;
};
TEST_F(OrtcRtpSenderTest, GetAndSetTrack) {
// Test GetTrack with a sender constructed with a track.
auto audio_track = CreateAudioTrack("audio");
auto audio_sender_result =
ortc_factory_->CreateRtpSender(audio_track, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_EQ(audio_track, audio_sender->GetTrack());
// Test GetTrack after SetTrack.
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
auto video_track = CreateVideoTrack("video1");
EXPECT_TRUE(video_sender->SetTrack(video_track).ok());
EXPECT_EQ(video_track, video_sender->GetTrack());
video_track = CreateVideoTrack("video2");
EXPECT_TRUE(video_sender->SetTrack(video_track).ok());
EXPECT_EQ(video_track, video_sender->GetTrack());
}
// Test that track can be set when previously unset, even after Send has been
// called.
TEST_F(OrtcRtpSenderTest, SetTrackWhileSending) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_TRUE(audio_sender->Send(MakeMinimalOpusParameters()).ok());
EXPECT_TRUE(audio_sender->SetTrack(CreateAudioTrack("audio")).ok());
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
EXPECT_TRUE(video_sender->Send(MakeMinimalVp8Parameters()).ok());
EXPECT_TRUE(video_sender->SetTrack(CreateVideoTrack("video")).ok());
}
// Test that track can be changed mid-sending. Differs from the above test in
// that the track is set and being changed, rather than unset and being set for
// the first time.
TEST_F(OrtcRtpSenderTest, ChangeTrackWhileSending) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
CreateAudioTrack("audio1"), rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_TRUE(audio_sender->Send(MakeMinimalOpusParameters()).ok());
EXPECT_TRUE(audio_sender->SetTrack(CreateAudioTrack("audio2")).ok());
auto video_sender_result = ortc_factory_->CreateRtpSender(
CreateVideoTrack("video1"), rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
EXPECT_TRUE(video_sender->Send(MakeMinimalVp8Parameters()).ok());
EXPECT_TRUE(video_sender->SetTrack(CreateVideoTrack("video2")).ok());
}
// Test that track can be set to null while sending.
TEST_F(OrtcRtpSenderTest, UnsetTrackWhileSending) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
CreateAudioTrack("audio"), rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_TRUE(audio_sender->Send(MakeMinimalOpusParameters()).ok());
EXPECT_TRUE(audio_sender->SetTrack(nullptr).ok());
auto video_sender_result = ortc_factory_->CreateRtpSender(
CreateVideoTrack("video"), rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
EXPECT_TRUE(video_sender->Send(MakeMinimalVp8Parameters()).ok());
EXPECT_TRUE(video_sender->SetTrack(nullptr).ok());
}
// Shouldn't be able to set an audio track on a video sender or vice versa.
TEST_F(OrtcRtpSenderTest, SetTrackOfWrongKindFails) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
audio_sender->SetTrack(CreateVideoTrack("video")).type());
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
video_sender->SetTrack(CreateAudioTrack("audio")).type());
}
// Currently SetTransport isn't supported. When it is, replace this test with a
// test/tests for it.
TEST_F(OrtcRtpSenderTest, SetTransportFails) {
rtc::FakePacketTransport fake_packet_transport("another_transport");
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
auto rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport, nullptr, nullptr);
auto rtp_transport = rtp_transport_result.MoveValue();
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO,
rtp_transport_.get());
auto sender = sender_result.MoveValue();
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
sender->SetTransport(rtp_transport.get()).type());
}
TEST_F(OrtcRtpSenderTest, GetTransport) {
auto result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO,
rtp_transport_.get());
EXPECT_EQ(rtp_transport_.get(), result.value()->GetTransport());
}
// Test that "Send" causes the expected parameters to be applied to the media
// engine level, for an audio sender.
TEST_F(OrtcRtpSenderTest, SendAppliesAudioParametersToMediaEngine) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
// First, create parameters with all the bells and whistles.
RtpParameters parameters;
RtpCodecParameters opus_codec;
opus_codec.name = "opus";
opus_codec.kind = cricket::MEDIA_TYPE_AUDIO;
opus_codec.payload_type = 120;
opus_codec.clock_rate.emplace(48000);
opus_codec.num_channels.emplace(2);
opus_codec.parameters["minptime"] = "10";
opus_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC);
parameters.codecs.push_back(std::move(opus_codec));
// Add two codecs, expecting the first to be used.
// TODO(deadbeef): Once "codec_payload_type" is supported, use it to select a
// codec that's not at the top of the list.
RtpCodecParameters isac_codec;
isac_codec.name = "ISAC";
isac_codec.kind = cricket::MEDIA_TYPE_AUDIO;
isac_codec.payload_type = 110;
isac_codec.clock_rate.emplace(16000);
parameters.codecs.push_back(std::move(isac_codec));
RtpEncodingParameters encoding;
encoding.ssrc.emplace(0xdeadbeef);
encoding.max_bitrate_bps.emplace(20000);
parameters.encodings.push_back(std::move(encoding));
parameters.header_extensions.emplace_back(
"urn:ietf:params:rtp-hdrext:ssrc-audio-level", 3);
EXPECT_TRUE(audio_sender->Send(parameters).ok());
// Now verify that the parameters were applied to the fake media engine layer
// that exists below BaseChannel.
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->sending());
// Verify codec parameters.
ASSERT_GT(fake_voice_channel->send_codecs().size(), 0u);
const cricket::AudioCodec& top_codec = fake_voice_channel->send_codecs()[0];
EXPECT_EQ("opus", top_codec.name);
EXPECT_EQ(120, top_codec.id);
EXPECT_EQ(48000, top_codec.clockrate);
EXPECT_EQ(2u, top_codec.channels);
ASSERT_NE(top_codec.params.end(), top_codec.params.find("minptime"));
EXPECT_EQ("10", top_codec.params.at("minptime"));
// Verify encoding parameters.
EXPECT_EQ(20000, fake_voice_channel->max_bps());
ASSERT_EQ(1u, fake_voice_channel->send_streams().size());
const cricket::StreamParams& send_stream =
fake_voice_channel->send_streams()[0];
EXPECT_EQ(1u, send_stream.ssrcs.size());
EXPECT_EQ(0xdeadbeef, send_stream.first_ssrc());
// Verify header extensions.
ASSERT_EQ(1u, fake_voice_channel->send_extensions().size());
const RtpExtension& extension = fake_voice_channel->send_extensions()[0];
EXPECT_EQ("urn:ietf:params:rtp-hdrext:ssrc-audio-level", extension.uri);
EXPECT_EQ(3, extension.id);
}
// Test that "Send" causes the expected parameters to be applied to the media
// engine level, for a video sender.
TEST_F(OrtcRtpSenderTest, SendAppliesVideoParametersToMediaEngine) {
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
// First, create parameters with all the bells and whistles.
RtpParameters parameters;
RtpCodecParameters vp8_codec;
vp8_codec.name = "VP8";
vp8_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp8_codec.payload_type = 99;
// Try a couple types of feedback params. "Generic NACK" is a bit of a
// special case, so test it here.
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::CCM,
RtcpFeedbackMessageType::FIR);
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK,
RtcpFeedbackMessageType::GENERIC_NACK);
parameters.codecs.push_back(std::move(vp8_codec));
RtpCodecParameters vp8_rtx_codec;
vp8_rtx_codec.name = "rtx";
vp8_rtx_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp8_rtx_codec.payload_type = 100;
vp8_rtx_codec.parameters["apt"] = "99";
parameters.codecs.push_back(std::move(vp8_rtx_codec));
// Add two codecs, expecting the first to be used.
// TODO(deadbeef): Once "codec_payload_type" is supported, use it to select a
// codec that's not at the top of the list.
RtpCodecParameters vp9_codec;
vp9_codec.name = "VP9";
vp9_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp9_codec.payload_type = 102;
parameters.codecs.push_back(std::move(vp9_codec));
RtpCodecParameters vp9_rtx_codec;
vp9_rtx_codec.name = "rtx";
vp9_rtx_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp9_rtx_codec.payload_type = 103;
vp9_rtx_codec.parameters["apt"] = "102";
parameters.codecs.push_back(std::move(vp9_rtx_codec));
RtpEncodingParameters encoding;
encoding.ssrc.emplace(0xdeadbeef);
encoding.rtx.emplace(0xbaadfeed);
encoding.max_bitrate_bps.emplace(99999);
parameters.encodings.push_back(std::move(encoding));
parameters.header_extensions.emplace_back("urn:3gpp:video-orientation", 4);
parameters.header_extensions.emplace_back(
"http://www.webrtc.org/experiments/rtp-hdrext/playout-delay", 6);
EXPECT_TRUE(video_sender->Send(parameters).ok());
// Now verify that the parameters were applied to the fake media engine layer
// that exists below BaseChannel.
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->sending());
// Verify codec parameters.
ASSERT_GE(fake_video_channel->send_codecs().size(), 2u);
const cricket::VideoCodec& top_codec = fake_video_channel->send_codecs()[0];
EXPECT_EQ("VP8", top_codec.name);
EXPECT_EQ(99, top_codec.id);
EXPECT_TRUE(top_codec.feedback_params.Has({"ccm", "fir"}));
EXPECT_TRUE(top_codec.feedback_params.Has(cricket::FeedbackParam("nack")));
const cricket::VideoCodec& rtx_codec = fake_video_channel->send_codecs()[1];
EXPECT_EQ("rtx", rtx_codec.name);
EXPECT_EQ(100, rtx_codec.id);
ASSERT_NE(rtx_codec.params.end(), rtx_codec.params.find("apt"));
EXPECT_EQ("99", rtx_codec.params.at("apt"));
// Verify encoding parameters.
EXPECT_EQ(99999, fake_video_channel->max_bps());
ASSERT_EQ(1u, fake_video_channel->send_streams().size());
const cricket::StreamParams& send_stream =
fake_video_channel->send_streams()[0];
EXPECT_EQ(2u, send_stream.ssrcs.size());
EXPECT_EQ(0xdeadbeef, send_stream.first_ssrc());
uint32_t rtx_ssrc = 0u;
EXPECT_TRUE(send_stream.GetFidSsrc(send_stream.first_ssrc(), &rtx_ssrc));
EXPECT_EQ(0xbaadfeed, rtx_ssrc);
// Verify header extensions.
ASSERT_EQ(2u, fake_video_channel->send_extensions().size());
const RtpExtension& extension1 = fake_video_channel->send_extensions()[0];
EXPECT_EQ("urn:3gpp:video-orientation", extension1.uri);
EXPECT_EQ(4, extension1.id);
const RtpExtension& extension2 = fake_video_channel->send_extensions()[1];
EXPECT_EQ("http://www.webrtc.org/experiments/rtp-hdrext/playout-delay",
extension2.uri);
EXPECT_EQ(6, extension2.id);
}
// Ensure that when primary or RTX SSRCs are left unset, they're generated
// automatically.
TEST_F(OrtcRtpSenderTest, SendGeneratesSsrcsWhenEmpty) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
RtpParameters parameters = MakeMinimalOpusParametersWithNoSsrc();
// Default RTX parameters, with no SSRC.
parameters.encodings[0].rtx.emplace();
EXPECT_TRUE(audio_sender->Send(parameters).ok());
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
ASSERT_EQ(1u, fake_voice_channel->send_streams().size());
const cricket::StreamParams& audio_send_stream =
fake_voice_channel->send_streams()[0];
EXPECT_NE(0u, audio_send_stream.first_ssrc());
uint32_t rtx_ssrc = 0u;
EXPECT_TRUE(
audio_send_stream.GetFidSsrc(audio_send_stream.first_ssrc(), &rtx_ssrc));
EXPECT_NE(0u, rtx_ssrc);
EXPECT_NE(audio_send_stream.first_ssrc(), rtx_ssrc);
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
parameters = MakeMinimalVp8ParametersWithNoSsrc();
// Default RTX parameters, with no SSRC.
parameters.encodings[0].rtx.emplace();
EXPECT_TRUE(video_sender->Send(parameters).ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
ASSERT_EQ(1u, fake_video_channel->send_streams().size());
const cricket::StreamParams& video_send_stream =
fake_video_channel->send_streams()[0];
EXPECT_NE(0u, video_send_stream.first_ssrc());
rtx_ssrc = 0u;
EXPECT_TRUE(
video_send_stream.GetFidSsrc(video_send_stream.first_ssrc(), &rtx_ssrc));
EXPECT_NE(0u, rtx_ssrc);
EXPECT_NE(video_send_stream.first_ssrc(), rtx_ssrc);
EXPECT_NE(video_send_stream.first_ssrc(), audio_send_stream.first_ssrc());
}
// Test changing both the send codec and SSRC at the same time, and verify that
// the new parameters are applied to the media engine level.
TEST_F(OrtcRtpSenderTest, CallingSendTwiceChangesParameters) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_TRUE(
audio_sender->Send(MakeMinimalOpusParametersWithSsrc(0x11111111)).ok());
EXPECT_TRUE(
audio_sender->Send(MakeMinimalIsacParametersWithSsrc(0x22222222)).ok());
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
ASSERT_GT(fake_voice_channel->send_codecs().size(), 0u);
EXPECT_EQ("ISAC", fake_voice_channel->send_codecs()[0].name);
ASSERT_EQ(1u, fake_voice_channel->send_streams().size());
EXPECT_EQ(0x22222222u, fake_voice_channel->send_streams()[0].first_ssrc());
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
EXPECT_TRUE(
video_sender->Send(MakeMinimalVp8ParametersWithSsrc(0x33333333)).ok());
EXPECT_TRUE(
video_sender->Send(MakeMinimalVp9ParametersWithSsrc(0x44444444)).ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
ASSERT_GT(fake_video_channel->send_codecs().size(), 0u);
EXPECT_EQ("VP9", fake_video_channel->send_codecs()[0].name);
ASSERT_EQ(1u, fake_video_channel->send_streams().size());
EXPECT_EQ(0x44444444u, fake_video_channel->send_streams()[0].first_ssrc());
}
// Ensure that if the |active| flag of RtpEncodingParameters is set to false,
// sending stops at the media engine level.
TEST_F(OrtcRtpSenderTest, DeactivatingEncodingStopsSending) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
RtpParameters parameters = MakeMinimalOpusParameters();
EXPECT_TRUE(audio_sender->Send(parameters).ok());
// Expect "sending" flag to initially be true.
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->sending());
// Deactivate encoding and expect it to change to false.
parameters.encodings[0].active = false;
EXPECT_TRUE(audio_sender->Send(parameters).ok());
EXPECT_FALSE(fake_voice_channel->sending());
// Try the same thing for video now.
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
parameters = MakeMinimalVp8Parameters();
EXPECT_TRUE(video_sender->Send(parameters).ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->sending());
parameters.encodings[0].active = false;
EXPECT_TRUE(video_sender->Send(parameters).ok());
EXPECT_FALSE(fake_video_channel->sending());
}
// Ensure that calling Send with an empty list of encodings causes send streams
// at the media engine level to be cleared.
TEST_F(OrtcRtpSenderTest, CallingSendWithEmptyEncodingsClearsSendStreams) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
RtpParameters parameters = MakeMinimalOpusParameters();
EXPECT_TRUE(audio_sender->Send(parameters).ok());
parameters.encodings.clear();
EXPECT_TRUE(audio_sender->Send(parameters).ok());
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->send_streams().empty());
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
parameters = MakeMinimalVp8Parameters();
EXPECT_TRUE(video_sender->Send(parameters).ok());
parameters.encodings.clear();
EXPECT_TRUE(video_sender->Send(parameters).ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->send_streams().empty());
}
// These errors should be covered by rtpparametersconversion_unittest.cc, but
// we should at least test that those errors are propogated from calls to Send,
// with a few examples.
TEST_F(OrtcRtpSenderTest, SendReturnsErrorOnInvalidParameters) {
auto result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_VIDEO,
rtp_transport_.get());
auto sender = result.MoveValue();
// NACK feedback missing message type.
RtpParameters invalid_feedback = MakeMinimalVp8Parameters();
invalid_feedback.codecs[0].rtcp_feedback.emplace_back(RtcpFeedbackType::NACK);
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
sender->Send(invalid_feedback).type());
// Negative payload type.
RtpParameters invalid_pt = MakeMinimalVp8Parameters();
invalid_pt.codecs[0].payload_type = -1;
EXPECT_EQ(RTCErrorType::INVALID_RANGE, sender->Send(invalid_pt).type());
// Duplicate codec payload types.
RtpParameters duplicate_payload_types = MakeMinimalVp8Parameters();
duplicate_payload_types.codecs.push_back(duplicate_payload_types.codecs[0]);
duplicate_payload_types.codecs.back().name = "VP9";
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
sender->Send(duplicate_payload_types).type());
}
// Two senders using the same transport shouldn't be able to use the same
// payload type to refer to different codecs, same header extension IDs to
// refer to different extensions, or same SSRC.
TEST_F(OrtcRtpSenderTest, SendReturnsErrorOnIdConflicts) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
auto video_sender = video_sender_result.MoveValue();
// First test payload type conflict.
RtpParameters audio_parameters = MakeMinimalOpusParameters();
RtpParameters video_parameters = MakeMinimalVp8Parameters();
audio_parameters.codecs[0].payload_type = 100;
video_parameters.codecs[0].payload_type = 100;
EXPECT_TRUE(audio_sender->Send(audio_parameters).ok());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
video_sender->Send(video_parameters).type());
// Test header extension ID conflict.
video_parameters.codecs[0].payload_type = 110;
audio_parameters.header_extensions.emplace_back("foo", 4);
video_parameters.header_extensions.emplace_back("bar", 4);
EXPECT_TRUE(audio_sender->Send(audio_parameters).ok());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
video_sender->Send(video_parameters).type());
// Test SSRC conflict. Have an RTX SSRC that conflicts with a primary SSRC
// for extra challenge.
video_parameters.header_extensions[0].uri = "foo";
audio_parameters.encodings[0].ssrc.emplace(0xdeadbeef);
video_parameters.encodings[0].ssrc.emplace(0xabbaabba);
video_parameters.encodings[0].rtx.emplace(0xdeadbeef);
EXPECT_TRUE(audio_sender->Send(audio_parameters).ok());
EXPECT_EQ(RTCErrorType::INVALID_PARAMETER,
video_sender->Send(video_parameters).type());
// Sanity check that parameters can be set if the conflicts are all resolved.
video_parameters.encodings[0].rtx->ssrc.emplace(0xbaadf00d);
EXPECT_TRUE(video_sender->Send(video_parameters).ok());
}
// Ensure that deleting a sender causes send streams at the media engine level
// to be cleared.
TEST_F(OrtcRtpSenderTest, DeletingSenderClearsSendStreams) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
EXPECT_TRUE(audio_sender->Send(MakeMinimalOpusParameters()).ok());
// Also create an audio receiver, to prevent the voice channel from being
// completely deleted.
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto audio_receiver = audio_receiver_result.MoveValue();
EXPECT_TRUE(audio_receiver->Receive(MakeMinimalOpusParameters()).ok());
audio_sender.reset(nullptr);
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_TRUE(fake_voice_channel->send_streams().empty());
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
EXPECT_TRUE(video_sender->Send(MakeMinimalVp8Parameters()).ok());
// Also create an video receiver, to prevent the video channel from being
// completely deleted.
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport_.get());
auto video_receiver = video_receiver_result.MoveValue();
EXPECT_TRUE(video_receiver->Receive(MakeMinimalVp8Parameters()).ok());
video_sender.reset(nullptr);
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->send_streams().empty());
}
// If Send hasn't been called, GetParameters should return empty parameters.
TEST_F(OrtcRtpSenderTest, GetDefaultParameters) {
auto result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO,
rtp_transport_.get());
EXPECT_EQ(RtpParameters(), result.value()->GetParameters());
result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_VIDEO,
rtp_transport_.get());
EXPECT_EQ(RtpParameters(), result.value()->GetParameters());
}
// Test that GetParameters returns the last parameters passed into Send, along
// with the implementation-default values filled in where they were left unset.
TEST_F(OrtcRtpSenderTest,
GetParametersReturnsLastSetParametersWithDefaultsFilled) {
auto audio_sender_result = ortc_factory_->CreateRtpSender(
CreateAudioTrack("audio"), rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
RtpParameters opus_parameters = MakeMinimalOpusParameters();
EXPECT_TRUE(audio_sender->Send(opus_parameters).ok());
EXPECT_EQ(opus_parameters, audio_sender->GetParameters());
RtpParameters isac_parameters = MakeMinimalIsacParameters();
// Sanity check that num_channels actually is left unset.
ASSERT_FALSE(isac_parameters.codecs[0].num_channels);
EXPECT_TRUE(audio_sender->Send(isac_parameters).ok());
// Should be filled with a default "num channels" of 1.
// TODO(deadbeef): This should actually default to 2 for some codecs. Update
// this test once that's implemented.
isac_parameters.codecs[0].num_channels.emplace(1);
EXPECT_EQ(isac_parameters, audio_sender->GetParameters());
auto video_sender_result = ortc_factory_->CreateRtpSender(
CreateVideoTrack("video"), rtp_transport_.get());
auto video_sender = video_sender_result.MoveValue();
RtpParameters vp8_parameters = MakeMinimalVp8Parameters();
// Sanity check that clock_rate actually is left unset.
EXPECT_TRUE(video_sender->Send(vp8_parameters).ok());
// Should be filled with a default clock rate of 90000.
vp8_parameters.codecs[0].clock_rate.emplace(90000);
EXPECT_EQ(vp8_parameters, video_sender->GetParameters());
RtpParameters vp9_parameters = MakeMinimalVp9Parameters();
// Sanity check that clock_rate actually is left unset.
EXPECT_TRUE(video_sender->Send(vp9_parameters).ok());
// Should be filled with a default clock rate of 90000.
vp9_parameters.codecs[0].clock_rate.emplace(90000);
EXPECT_EQ(vp9_parameters, video_sender->GetParameters());
}
TEST_F(OrtcRtpSenderTest, GetKind) {
// Construct one sender from the "kind" enum and another from a track.
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport_.get());
auto video_sender_result = ortc_factory_->CreateRtpSender(
CreateVideoTrack("video"), rtp_transport_.get());
auto audio_sender = audio_sender_result.MoveValue();
auto video_sender = video_sender_result.MoveValue();
EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, audio_sender->GetKind());
EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, video_sender->GetKind());
}
} // namespace webrtc

View File

@ -1,188 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "ortc/ortcrtpsenderadapter.h"
#include <utility>
#include "media/base/mediaconstants.h"
#include "ortc/rtptransportadapter.h"
#include "rtc_base/checks.h"
namespace {
void FillAudioSenderParameters(webrtc::RtpParameters* parameters) {
for (webrtc::RtpCodecParameters& codec : parameters->codecs) {
if (!codec.num_channels) {
codec.num_channels = 1;
}
}
}
void FillVideoSenderParameters(webrtc::RtpParameters* parameters) {
for (webrtc::RtpCodecParameters& codec : parameters->codecs) {
if (!codec.clock_rate) {
codec.clock_rate = cricket::kVideoCodecClockrate;
}
}
}
} // namespace
namespace webrtc {
BEGIN_OWNED_PROXY_MAP(OrtcRtpSender)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_METHOD1(RTCError, SetTrack, MediaStreamTrackInterface*)
PROXY_CONSTMETHOD0(rtc::scoped_refptr<MediaStreamTrackInterface>, GetTrack)
PROXY_METHOD1(RTCError, SetTransport, RtpTransportInterface*)
PROXY_CONSTMETHOD0(RtpTransportInterface*, GetTransport)
PROXY_METHOD1(RTCError, Send, const RtpParameters&)
PROXY_CONSTMETHOD0(RtpParameters, GetParameters)
PROXY_CONSTMETHOD0(cricket::MediaType, GetKind)
END_PROXY_MAP()
// static
std::unique_ptr<OrtcRtpSenderInterface> OrtcRtpSenderAdapter::CreateProxy(
std::unique_ptr<OrtcRtpSenderAdapter> wrapped_sender) {
RTC_DCHECK(wrapped_sender);
rtc::Thread* signaling =
wrapped_sender->rtp_transport_controller_->signaling_thread();
rtc::Thread* worker =
wrapped_sender->rtp_transport_controller_->worker_thread();
return OrtcRtpSenderProxy::Create(signaling, worker,
std::move(wrapped_sender));
}
OrtcRtpSenderAdapter::~OrtcRtpSenderAdapter() {
internal_sender_ = nullptr;
SignalDestroyed();
}
RTCError OrtcRtpSenderAdapter::SetTrack(MediaStreamTrackInterface* track) {
if (track && cricket::MediaTypeFromString(track->kind()) != kind_) {
LOG_AND_RETURN_ERROR(
RTCErrorType::INVALID_PARAMETER,
"Track kind (audio/video) doesn't match the kind of this sender.");
}
if (internal_sender_ && !internal_sender_->SetTrack(track)) {
// Since we checked the track type above, this should never happen...
RTC_NOTREACHED();
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to set track on RtpSender.");
}
track_ = track;
return RTCError::OK();
}
rtc::scoped_refptr<MediaStreamTrackInterface> OrtcRtpSenderAdapter::GetTrack()
const {
return track_;
}
RTCError OrtcRtpSenderAdapter::SetTransport(RtpTransportInterface* transport) {
LOG_AND_RETURN_ERROR(
RTCErrorType::UNSUPPORTED_OPERATION,
"Changing the transport of an RtpSender is not yet supported.");
}
RtpTransportInterface* OrtcRtpSenderAdapter::GetTransport() const {
return transport_;
}
RTCError OrtcRtpSenderAdapter::Send(const RtpParameters& parameters) {
RtpParameters filled_parameters = parameters;
RTCError err;
uint32_t ssrc = 0;
switch (kind_) {
case cricket::MEDIA_TYPE_AUDIO:
FillAudioSenderParameters(&filled_parameters);
err = rtp_transport_controller_->ValidateAndApplyAudioSenderParameters(
filled_parameters, &ssrc);
if (!err.ok()) {
return err;
}
break;
case cricket::MEDIA_TYPE_VIDEO:
FillVideoSenderParameters(&filled_parameters);
err = rtp_transport_controller_->ValidateAndApplyVideoSenderParameters(
filled_parameters, &ssrc);
if (!err.ok()) {
return err;
}
break;
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
return webrtc::RTCError(webrtc::RTCErrorType::INTERNAL_ERROR);
}
last_applied_parameters_ = filled_parameters;
// Now that parameters were applied, can call SetSsrc on the internal sender.
// This is analogous to a PeerConnection calling SetSsrc after
// SetLocalDescription is successful.
//
// If there were no encodings, this SSRC may be 0, which is valid.
if (!internal_sender_) {
CreateInternalSender();
}
internal_sender_->SetSsrc(ssrc);
return RTCError::OK();
}
RtpParameters OrtcRtpSenderAdapter::GetParameters() const {
return last_applied_parameters_;
}
cricket::MediaType OrtcRtpSenderAdapter::GetKind() const {
return kind_;
}
OrtcRtpSenderAdapter::OrtcRtpSenderAdapter(
cricket::MediaType kind,
RtpTransportInterface* transport,
RtpTransportControllerAdapter* rtp_transport_controller)
: kind_(kind),
transport_(transport),
rtp_transport_controller_(rtp_transport_controller) {}
void OrtcRtpSenderAdapter::CreateInternalSender() {
switch (kind_) {
case cricket::MEDIA_TYPE_AUDIO: {
auto* audio_sender = new AudioRtpSender(
rtp_transport_controller_->worker_thread(), /*id=*/"", nullptr);
auto* voice_channel = rtp_transport_controller_->voice_channel();
RTC_DCHECK(voice_channel);
audio_sender->SetVoiceMediaChannel(voice_channel->media_channel());
internal_sender_ = audio_sender;
break;
}
case cricket::MEDIA_TYPE_VIDEO: {
auto* video_sender = new VideoRtpSender(
rtp_transport_controller_->worker_thread(), /*id=*/"");
auto* video_channel = rtp_transport_controller_->video_channel();
RTC_DCHECK(video_channel);
video_sender->SetVideoMediaChannel(video_channel->media_channel());
internal_sender_ = video_sender;
break;
}
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
}
if (track_) {
if (!internal_sender_->SetTrack(track_)) {
// Since we checked the track type when it was set, this should never
// happen...
RTC_NOTREACHED();
}
}
}
} // namespace webrtc

View File

@ -1,79 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ORTC_ORTCRTPSENDERADAPTER_H_
#define ORTC_ORTCRTPSENDERADAPTER_H_
#include <memory>
#include "api/ortc/ortcrtpsenderinterface.h"
#include "api/rtcerror.h"
#include "api/rtpparameters.h"
#include "ortc/rtptransportcontrolleradapter.h"
#include "pc/rtpsender.h"
#include "rtc_base/constructormagic.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
namespace webrtc {
// Implementation of OrtcRtpSenderInterface that works with RtpTransportAdapter,
// and wraps a VideoRtpSender/AudioRtpSender that's normally used with the
// PeerConnection.
//
// TODO(deadbeef): When BaseChannel is split apart into separate
// "RtpSender"/"RtpTransceiver"/"RtpSender"/"RtpReceiver" objects, this adapter
// object can be removed.
class OrtcRtpSenderAdapter : public OrtcRtpSenderInterface {
public:
// Wraps |wrapped_sender| in a proxy that will safely call methods on the
// correct thread.
static std::unique_ptr<OrtcRtpSenderInterface> CreateProxy(
std::unique_ptr<OrtcRtpSenderAdapter> wrapped_sender);
// Should only be called by RtpTransportControllerAdapter.
OrtcRtpSenderAdapter(cricket::MediaType kind,
RtpTransportInterface* transport,
RtpTransportControllerAdapter* rtp_transport_controller);
~OrtcRtpSenderAdapter() override;
// OrtcRtpSenderInterface implementation.
RTCError SetTrack(MediaStreamTrackInterface* track) override;
rtc::scoped_refptr<MediaStreamTrackInterface> GetTrack() const override;
RTCError SetTransport(RtpTransportInterface* transport) override;
RtpTransportInterface* GetTransport() const override;
RTCError Send(const RtpParameters& parameters) override;
RtpParameters GetParameters() const override;
cricket::MediaType GetKind() const override;
// Used so that the RtpTransportControllerAdapter knows when it can
// deallocate resources allocated for this object.
sigslot::signal0<> SignalDestroyed;
private:
void CreateInternalSender();
cricket::MediaType kind_;
RtpTransportInterface* transport_;
RtpTransportControllerAdapter* rtp_transport_controller_;
// Scoped refptr due to ref-counted interface, but we should be the only
// reference holder.
rtc::scoped_refptr<RtpSenderInternal> internal_sender_;
rtc::scoped_refptr<MediaStreamTrackInterface> track_;
RtpParameters last_applied_parameters_;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(OrtcRtpSenderAdapter);
};
} // namespace webrtc
#endif // ORTC_ORTCRTPSENDERADAPTER_H_

View File

@ -1,286 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "media/base/fakemediaengine.h"
#include "ortc/ortcfactory.h"
#include "ortc/testrtpparameters.h"
#include "p2p/base/fakepackettransport.h"
#include "rtc_base/gunit.h"
namespace webrtc {
// This test uses fake packet transports and a fake media engine, in order to
// test the RtpTransport at only an API level. Any end-to-end test should go in
// ortcfactory_integrationtest.cc instead.
class RtpTransportTest : public testing::Test {
public:
RtpTransportTest() {
fake_media_engine_ = new cricket::FakeMediaEngine();
// Note: This doesn't need to use fake network classes, since it uses
// FakePacketTransports.
auto result = OrtcFactory::Create(
nullptr, nullptr, nullptr, nullptr, nullptr,
std::unique_ptr<cricket::MediaEngineInterface>(fake_media_engine_),
CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory());
ortc_factory_ = result.MoveValue();
}
protected:
// Owned by |ortc_factory_|.
cricket::FakeMediaEngine* fake_media_engine_;
std::unique_ptr<OrtcFactoryInterface> ortc_factory_;
};
// Test GetRtpPacketTransport and GetRtcpPacketTransport, with and without RTCP
// muxing.
TEST_F(RtpTransportTest, GetPacketTransports) {
rtc::FakePacketTransport rtp("rtp");
rtc::FakePacketTransport rtcp("rtcp");
// With muxed RTCP.
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
auto result =
ortc_factory_->CreateRtpTransport(parameters, &rtp, nullptr, nullptr);
ASSERT_TRUE(result.ok());
EXPECT_EQ(&rtp, result.value()->GetRtpPacketTransport());
EXPECT_EQ(nullptr, result.value()->GetRtcpPacketTransport());
result.MoveValue().reset();
// With non-muxed RTCP.
parameters.rtcp.mux = false;
result = ortc_factory_->CreateRtpTransport(parameters, &rtp, &rtcp, nullptr);
ASSERT_TRUE(result.ok());
EXPECT_EQ(&rtp, result.value()->GetRtpPacketTransport());
EXPECT_EQ(&rtcp, result.value()->GetRtcpPacketTransport());
}
// If an RtpTransport starts out un-muxed and then starts muxing, the RTCP
// packet transport should be forgotten and GetRtcpPacketTransport should
// return null.
TEST_F(RtpTransportTest, EnablingRtcpMuxingUnsetsRtcpTransport) {
rtc::FakePacketTransport rtp("rtp");
rtc::FakePacketTransport rtcp("rtcp");
// Create non-muxed.
RtpTransportParameters parameters;
parameters.rtcp.mux = false;
auto result =
ortc_factory_->CreateRtpTransport(parameters, &rtp, &rtcp, nullptr);
ASSERT_TRUE(result.ok());
auto rtp_transport = result.MoveValue();
// Enable muxing.
parameters.rtcp.mux = true;
EXPECT_TRUE(rtp_transport->SetParameters(parameters).ok());
EXPECT_EQ(nullptr, rtp_transport->GetRtcpPacketTransport());
}
TEST_F(RtpTransportTest, GetAndSetRtcpParameters) {
rtc::FakePacketTransport rtp("rtp");
rtc::FakePacketTransport rtcp("rtcp");
// Start with non-muxed RTCP.
RtpTransportParameters parameters;
parameters.rtcp.mux = false;
parameters.rtcp.cname = "teST";
parameters.rtcp.reduced_size = false;
auto result =
ortc_factory_->CreateRtpTransport(parameters, &rtp, &rtcp, nullptr);
ASSERT_TRUE(result.ok());
auto transport = result.MoveValue();
EXPECT_EQ(parameters, transport->GetParameters());
// Changing the CNAME is currently unsupported.
parameters.rtcp.cname = "different";
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
transport->SetParameters(parameters).type());
parameters.rtcp.cname = "teST";
// Enable RTCP muxing and reduced-size RTCP.
parameters.rtcp.mux = true;
parameters.rtcp.reduced_size = true;
EXPECT_TRUE(transport->SetParameters(parameters).ok());
EXPECT_EQ(parameters, transport->GetParameters());
// Empty CNAME should result in the existing CNAME being used.
parameters.rtcp.cname.clear();
EXPECT_TRUE(transport->SetParameters(parameters).ok());
EXPECT_EQ("teST", transport->GetParameters().rtcp.cname);
// Disabling RTCP muxing after enabling shouldn't be allowed, since enabling
// muxing should have made the RTP transport forget about the RTCP packet
// transport initially passed into it.
parameters.rtcp.mux = false;
EXPECT_EQ(RTCErrorType::INVALID_STATE,
transport->SetParameters(parameters).type());
}
// When Send or Receive is called on a sender or receiver, the RTCP parameters
// from the RtpTransport underneath the sender should be applied to the created
// media stream. The only relevant parameters (currently) are |cname| and
// |reduced_size|.
TEST_F(RtpTransportTest, SendAndReceiveApplyRtcpParametersToMediaEngine) {
// First, create video transport with reduced-size RTCP.
rtc::FakePacketTransport fake_packet_transport1("1");
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
parameters.rtcp.reduced_size = true;
parameters.rtcp.cname = "foo";
auto rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport1, nullptr, nullptr);
auto video_transport = rtp_transport_result.MoveValue();
// Create video sender and call Send, expecting parameters to be applied.
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_VIDEO,
video_transport.get());
auto video_sender = sender_result.MoveValue();
EXPECT_TRUE(video_sender->Send(MakeMinimalVp8Parameters()).ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->send_rtcp_parameters().reduced_size);
ASSERT_EQ(1u, fake_video_channel->send_streams().size());
const cricket::StreamParams& video_send_stream =
fake_video_channel->send_streams()[0];
EXPECT_EQ("foo", video_send_stream.cname);
// Create video receiver and call Receive, expecting parameters to be applied
// (minus |cname|, since that's the sent cname, not received).
auto receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, video_transport.get());
auto video_receiver = receiver_result.MoveValue();
EXPECT_TRUE(
video_receiver->Receive(MakeMinimalVp8ParametersWithSsrc(0xdeadbeef))
.ok());
EXPECT_TRUE(fake_video_channel->recv_rtcp_parameters().reduced_size);
// Create audio transport with non-reduced size RTCP.
rtc::FakePacketTransport fake_packet_transport2("2");
parameters.rtcp.reduced_size = false;
parameters.rtcp.cname = "bar";
rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport2, nullptr, nullptr);
auto audio_transport = rtp_transport_result.MoveValue();
// Create audio sender and call Send, expecting parameters to be applied.
sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO,
audio_transport.get());
auto audio_sender = sender_result.MoveValue();
EXPECT_TRUE(audio_sender->Send(MakeMinimalIsacParameters()).ok());
cricket::FakeVoiceMediaChannel* fake_voice_channel =
fake_media_engine_->GetVoiceChannel(0);
ASSERT_NE(nullptr, fake_voice_channel);
EXPECT_FALSE(fake_voice_channel->send_rtcp_parameters().reduced_size);
ASSERT_EQ(1u, fake_voice_channel->send_streams().size());
const cricket::StreamParams& audio_send_stream =
fake_voice_channel->send_streams()[0];
EXPECT_EQ("bar", audio_send_stream.cname);
// Create audio receiver and call Receive, expecting parameters to be applied
// (minus |cname|, since that's the sent cname, not received).
receiver_result = ortc_factory_->CreateRtpReceiver(cricket::MEDIA_TYPE_AUDIO,
audio_transport.get());
auto audio_receiver = receiver_result.MoveValue();
EXPECT_TRUE(
audio_receiver->Receive(MakeMinimalOpusParametersWithSsrc(0xbaadf00d))
.ok());
EXPECT_FALSE(fake_voice_channel->recv_rtcp_parameters().reduced_size);
}
// When SetParameters is called, the modified parameters should be applied
// to the media engine.
// TODO(deadbeef): Once the implementation supports changing the CNAME,
// test that here.
TEST_F(RtpTransportTest, SetRtcpParametersAppliesParametersToMediaEngine) {
rtc::FakePacketTransport fake_packet_transport("fake");
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
parameters.rtcp.reduced_size = false;
auto rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport, nullptr, nullptr);
auto rtp_transport = rtp_transport_result.MoveValue();
// Create video sender and call Send, applying an initial set of parameters.
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_VIDEO,
rtp_transport.get());
auto sender = sender_result.MoveValue();
EXPECT_TRUE(sender->Send(MakeMinimalVp8Parameters()).ok());
// Modify parameters and expect them to be changed at the media engine level.
parameters.rtcp.reduced_size = true;
EXPECT_TRUE(rtp_transport->SetParameters(parameters).ok());
cricket::FakeVideoMediaChannel* fake_video_channel =
fake_media_engine_->GetVideoChannel(0);
ASSERT_NE(nullptr, fake_video_channel);
EXPECT_TRUE(fake_video_channel->send_rtcp_parameters().reduced_size);
}
// SetParameters should set keepalive for all RTP transports.
// It is impossible to modify keepalive parameters if any streams are created.
// Note: This is an implementation detail for current way of configuring the
// keep-alive. It may change in the future.
TEST_F(RtpTransportTest, CantChangeKeepAliveAfterCreatedSendStreams) {
rtc::FakePacketTransport fake_packet_transport("fake");
RtpTransportParameters parameters;
parameters.keepalive.timeout_interval_ms = 100;
auto rtp_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport, nullptr, nullptr);
ASSERT_TRUE(rtp_transport_result.ok());
std::unique_ptr<RtpTransportInterface> rtp_transport =
rtp_transport_result.MoveValue();
// Updating keepalive parameters is ok, since no rtp sender created.
parameters.keepalive.timeout_interval_ms = 200;
EXPECT_TRUE(rtp_transport->SetParameters(parameters).ok());
// Create video sender. Note: |sender_result| scope must extend past the
// SetParameters() call below.
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_VIDEO,
rtp_transport.get());
EXPECT_TRUE(sender_result.ok());
// Modify parameters second time after video send stream created.
parameters.keepalive.timeout_interval_ms = 10;
EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION,
rtp_transport->SetParameters(parameters).type());
}
// Note: This is an implementation detail for current way of configuring the
// keep-alive. It may change in the future.
TEST_F(RtpTransportTest, KeepAliveMustBeSameAcrossTransportController) {
rtc::FakePacketTransport fake_packet_transport("fake");
RtpTransportParameters parameters;
parameters.keepalive.timeout_interval_ms = 100;
// Manually create a controller, that can be shared by multiple transports.
auto controller_result = ortc_factory_->CreateRtpTransportController();
ASSERT_TRUE(controller_result.ok());
std::unique_ptr<RtpTransportControllerInterface> controller =
controller_result.MoveValue();
// Create a first transport.
auto first_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport, nullptr, controller.get());
ASSERT_TRUE(first_transport_result.ok());
// Update the parameters, and create another transport for the same
// controller.
parameters.keepalive.timeout_interval_ms = 10;
auto seconds_transport_result = ortc_factory_->CreateRtpTransport(
parameters, &fake_packet_transport, nullptr, controller.get());
EXPECT_EQ(RTCErrorType::INVALID_MODIFICATION,
seconds_transport_result.error().type());
}
} // namespace webrtc

View File

@ -1,230 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "ortc/rtptransportadapter.h"
#include <algorithm> // For std::find.
#include <set>
#include <utility> // For std::move.
#include "absl/memory/memory.h"
#include "api/proxy.h"
#include "rtc_base/logging.h"
namespace webrtc {
BEGIN_OWNED_PROXY_MAP(RtpTransport)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(PacketTransportInterface*, GetRtpPacketTransport)
PROXY_CONSTMETHOD0(PacketTransportInterface*, GetRtcpPacketTransport)
PROXY_METHOD1(RTCError, SetParameters, const RtpTransportParameters&)
PROXY_CONSTMETHOD0(RtpTransportParameters, GetParameters)
protected:
RtpTransportAdapter* GetInternal() override {
return internal();
}
END_PROXY_MAP()
BEGIN_OWNED_PROXY_MAP(SrtpTransport)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(PacketTransportInterface*, GetRtpPacketTransport)
PROXY_CONSTMETHOD0(PacketTransportInterface*, GetRtcpPacketTransport)
PROXY_METHOD1(RTCError, SetParameters, const RtpTransportParameters&)
PROXY_CONSTMETHOD0(RtpTransportParameters, GetParameters)
PROXY_METHOD1(RTCError, SetSrtpSendKey, const cricket::CryptoParams&)
PROXY_METHOD1(RTCError, SetSrtpReceiveKey, const cricket::CryptoParams&)
protected:
RtpTransportAdapter* GetInternal() override {
return internal();
}
END_PROXY_MAP()
// static
RTCErrorOr<std::unique_ptr<RtpTransportInterface>>
RtpTransportAdapter::CreateProxied(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerAdapter* rtp_transport_controller) {
if (!rtp) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Must provide an RTP packet transport.");
}
if (!parameters.rtcp.mux && !rtcp) {
LOG_AND_RETURN_ERROR(
RTCErrorType::INVALID_PARAMETER,
"Must provide an RTCP packet transport when RTCP muxing is not used.");
}
if (parameters.rtcp.mux && rtcp) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Creating an RtpTransport with RTCP muxing enabled, "
"with a separate RTCP packet transport?");
}
if (!rtp_transport_controller) {
// Since OrtcFactory::CreateRtpTransport creates an RtpTransportController
// automatically when one isn't passed in, this should never be reached.
RTC_NOTREACHED();
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Must provide an RTP transport controller.");
}
std::unique_ptr<RtpTransportAdapter> transport_adapter(
new RtpTransportAdapter(parameters.rtcp, rtp, rtcp,
rtp_transport_controller,
false /*is_srtp_transport*/));
RTCError params_result = transport_adapter->SetParameters(parameters);
if (!params_result.ok()) {
return std::move(params_result);
}
return RtpTransportProxyWithInternal<RtpTransportAdapter>::Create(
rtp_transport_controller->signaling_thread(),
rtp_transport_controller->worker_thread(), std::move(transport_adapter));
}
RTCErrorOr<std::unique_ptr<SrtpTransportInterface>>
RtpTransportAdapter::CreateSrtpProxied(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerAdapter* rtp_transport_controller) {
if (!rtp) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Must provide an RTP packet transport.");
}
if (!parameters.rtcp.mux && !rtcp) {
LOG_AND_RETURN_ERROR(
RTCErrorType::INVALID_PARAMETER,
"Must provide an RTCP packet transport when RTCP muxing is not used.");
}
if (parameters.rtcp.mux && rtcp) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Creating an RtpTransport with RTCP muxing enabled, "
"with a separate RTCP packet transport?");
}
if (!rtp_transport_controller) {
// Since OrtcFactory::CreateRtpTransport creates an RtpTransportController
// automatically when one isn't passed in, this should never be reached.
RTC_NOTREACHED();
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER,
"Must provide an RTP transport controller.");
}
std::unique_ptr<RtpTransportAdapter> transport_adapter;
transport_adapter.reset(new RtpTransportAdapter(parameters.rtcp, rtp, rtcp,
rtp_transport_controller,
true /*is_srtp_transport*/));
RTCError params_result = transport_adapter->SetParameters(parameters);
if (!params_result.ok()) {
return std::move(params_result);
}
return SrtpTransportProxyWithInternal<RtpTransportAdapter>::Create(
rtp_transport_controller->signaling_thread(),
rtp_transport_controller->worker_thread(), std::move(transport_adapter));
}
void RtpTransportAdapter::TakeOwnershipOfRtpTransportController(
std::unique_ptr<RtpTransportControllerInterface> controller) {
RTC_DCHECK_EQ(rtp_transport_controller_, controller->GetInternal());
RTC_DCHECK(owned_rtp_transport_controller_.get() == nullptr);
owned_rtp_transport_controller_ = std::move(controller);
}
RtpTransportAdapter::RtpTransportAdapter(
const RtcpParameters& rtcp_params,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerAdapter* rtp_transport_controller,
bool is_srtp_transport)
: rtp_packet_transport_(rtp),
rtcp_packet_transport_(rtcp),
rtp_transport_controller_(rtp_transport_controller),
network_thread_(rtp_transport_controller_->network_thread()) {
parameters_.rtcp = rtcp_params;
// CNAME should have been filled by OrtcFactory if empty.
RTC_DCHECK(!parameters_.rtcp.cname.empty());
RTC_DCHECK(rtp_transport_controller);
if (is_srtp_transport) {
srtp_transport_ = absl::make_unique<SrtpTransport>(rtcp == nullptr);
transport_ = srtp_transport_.get();
} else {
unencrypted_rtp_transport_ =
absl::make_unique<RtpTransport>(rtcp == nullptr);
transport_ = unencrypted_rtp_transport_.get();
}
RTC_DCHECK(transport_);
network_thread_->Invoke<void>(RTC_FROM_HERE, [=] {
SetRtpPacketTransport(rtp->GetInternal());
if (rtcp) {
SetRtcpPacketTransport(rtcp->GetInternal());
}
});
transport_->SignalReadyToSend.connect(this,
&RtpTransportAdapter::OnReadyToSend);
transport_->SignalRtcpPacketReceived.connect(
this, &RtpTransportAdapter::OnRtcpPacketReceived);
transport_->SignalWritableState.connect(
this, &RtpTransportAdapter::OnWritableState);
}
RtpTransportAdapter::~RtpTransportAdapter() {
SignalDestroyed(this);
}
RTCError RtpTransportAdapter::SetParameters(
const RtpTransportParameters& parameters) {
if (!parameters.rtcp.mux && parameters_.rtcp.mux) {
LOG_AND_RETURN_ERROR(webrtc::RTCErrorType::INVALID_STATE,
"Can't disable RTCP muxing after enabling.");
}
if (!parameters.rtcp.cname.empty() &&
parameters.rtcp.cname != parameters_.rtcp.cname) {
LOG_AND_RETURN_ERROR(webrtc::RTCErrorType::UNSUPPORTED_OPERATION,
"Changing the RTCP CNAME is currently unsupported.");
}
// If the CNAME is empty, use the existing one.
RtpTransportParameters copy = parameters;
if (copy.rtcp.cname.empty()) {
copy.rtcp.cname = parameters_.rtcp.cname;
}
RTCError err =
rtp_transport_controller_->SetRtpTransportParameters(copy, this);
if (!err.ok()) {
return err;
}
parameters_ = copy;
if (parameters_.rtcp.mux) {
rtcp_packet_transport_ = nullptr;
}
return RTCError::OK();
}
RTCError RtpTransportAdapter::SetSrtpSendKey(
const cricket::CryptoParams& params) {
if (!network_thread_->IsCurrent()) {
return network_thread_->Invoke<RTCError>(
RTC_FROM_HERE, [&] { return SetSrtpSendKey(params); });
}
return transport_->SetSrtpSendKey(params);
}
RTCError RtpTransportAdapter::SetSrtpReceiveKey(
const cricket::CryptoParams& params) {
if (!network_thread_->IsCurrent()) {
return network_thread_->Invoke<RTCError>(
RTC_FROM_HERE, [&] { return SetSrtpReceiveKey(params); });
}
return transport_->SetSrtpReceiveKey(params);
}
} // namespace webrtc

View File

@ -1,121 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ORTC_RTPTRANSPORTADAPTER_H_
#define ORTC_RTPTRANSPORTADAPTER_H_
#include <memory>
#include <vector>
#include "api/rtcerror.h"
#include "media/base/streamparams.h"
#include "ortc/rtptransportcontrolleradapter.h"
#include "pc/channel.h"
#include "pc/rtptransportinternaladapter.h"
#include "pc/srtptransport.h"
#include "rtc_base/constructormagic.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
namespace webrtc {
// This class is a wrapper over an RtpTransport or an SrtpTransport. The base
// class RtpTransportInternalAdapter keeps a raw pointer, |transport_|, of the
// transport object and implements both the public SrtpTransportInterface and
// RtpTransport internal interface by calling the |transport_| underneath.
//
// This adapter can be used as an unencrypted RTP transport or an SrtpTransport
// with RtpSenderAdapter, RtpReceiverAdapter, and RtpTransportControllerAdapter.
//
// TODO(deadbeef): When BaseChannel is split apart into separate
// "RtpTransport"/"RtpTransceiver"/"RtpSender"/"RtpReceiver" objects, this
// adapter object can be removed.
class RtpTransportAdapter : public RtpTransportInternalAdapter {
public:
// |rtp| can't be null. |rtcp| can if RTCP muxing is used immediately (meaning
// |rtcp_parameters.mux| is also true).
static RTCErrorOr<std::unique_ptr<RtpTransportInterface>> CreateProxied(
const RtpTransportParameters& rtcp_parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerAdapter* rtp_transport_controller);
static RTCErrorOr<std::unique_ptr<SrtpTransportInterface>> CreateSrtpProxied(
const RtpTransportParameters& rtcp_parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerAdapter* rtp_transport_controller);
~RtpTransportAdapter() override;
// RtpTransportInterface implementation.
PacketTransportInterface* GetRtpPacketTransport() const override {
return rtp_packet_transport_;
}
PacketTransportInterface* GetRtcpPacketTransport() const override {
return rtcp_packet_transport_;
}
RTCError SetParameters(const RtpTransportParameters& parameters) override;
RtpTransportParameters GetParameters() const override { return parameters_; }
// SRTP specific implementation.
RTCError SetSrtpSendKey(const cricket::CryptoParams& params) override;
RTCError SetSrtpReceiveKey(const cricket::CryptoParams& params) override;
// Methods used internally by OrtcFactory.
RtpTransportControllerAdapter* rtp_transport_controller() {
return rtp_transport_controller_;
}
void TakeOwnershipOfRtpTransportController(
std::unique_ptr<RtpTransportControllerInterface> controller);
// Used by RtpTransportControllerAdapter to tell when it should stop
// returning this transport from GetTransports().
sigslot::signal1<RtpTransportAdapter*> SignalDestroyed;
bool IsSrtpActive() const override { return transport_->IsSrtpActive(); }
protected:
RtpTransportAdapter* GetInternal() override { return this; }
private:
RtpTransportAdapter(const RtcpParameters& rtcp_params,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp,
RtpTransportControllerAdapter* rtp_transport_controller,
bool is_srtp_transport);
void OnReadyToSend(bool ready) { SignalReadyToSend(ready); }
void OnRtcpPacketReceived(rtc::CopyOnWriteBuffer* packet,
const rtc::PacketTime& time) {
SignalRtcpPacketReceived(packet, time);
}
void OnWritableState(bool writable) { SignalWritableState(writable); }
PacketTransportInterface* rtp_packet_transport_ = nullptr;
PacketTransportInterface* rtcp_packet_transport_ = nullptr;
RtpTransportControllerAdapter* const rtp_transport_controller_ = nullptr;
// Non-null if this class owns the transport controller.
std::unique_ptr<RtpTransportControllerInterface>
owned_rtp_transport_controller_;
RtpTransportParameters parameters_;
// Only one of them is non-null;
std::unique_ptr<RtpTransport> unencrypted_rtp_transport_;
std::unique_ptr<SrtpTransport> srtp_transport_;
rtc::Thread* network_thread_ = nullptr;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtpTransportAdapter);
};
} // namespace webrtc
#endif // ORTC_RTPTRANSPORTADAPTER_H_

View File

@ -1,199 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "media/base/fakemediaengine.h"
#include "ortc/ortcfactory.h"
#include "ortc/testrtpparameters.h"
#include "p2p/base/fakepackettransport.h"
#include "rtc_base/gunit.h"
namespace webrtc {
// This test uses fake packet transports and a fake media engine, in order to
// test the RtpTransportController at only an API level. Any end-to-end test
// should go in ortcfactory_integrationtest.cc instead.
//
// Currently, this test mainly focuses on the limitations of the "adapter"
// RtpTransportController implementation. Only one of each type of
// sender/receiver can be created, and the sender/receiver of the same media
// type must use the same transport.
class RtpTransportControllerTest : public testing::Test {
public:
RtpTransportControllerTest() {
// Note: This doesn't need to use fake network classes, since it uses
// FakePacketTransports.
auto result = OrtcFactory::Create(
nullptr, nullptr, nullptr, nullptr, nullptr,
std::unique_ptr<cricket::MediaEngineInterface>(
new cricket::FakeMediaEngine()),
CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory());
ortc_factory_ = result.MoveValue();
rtp_transport_controller_ =
ortc_factory_->CreateRtpTransportController().MoveValue();
}
protected:
std::unique_ptr<OrtcFactoryInterface> ortc_factory_;
std::unique_ptr<RtpTransportControllerInterface> rtp_transport_controller_;
};
TEST_F(RtpTransportControllerTest, GetTransports) {
rtc::FakePacketTransport packet_transport1("one");
rtc::FakePacketTransport packet_transport2("two");
auto rtp_transport_result1 = ortc_factory_->CreateRtpTransport(
MakeRtcpMuxParameters(), &packet_transport1, nullptr,
rtp_transport_controller_.get());
ASSERT_TRUE(rtp_transport_result1.ok());
auto rtp_transport_result2 = ortc_factory_->CreateRtpTransport(
MakeRtcpMuxParameters(), &packet_transport2, nullptr,
rtp_transport_controller_.get());
ASSERT_TRUE(rtp_transport_result2.ok());
auto returned_transports = rtp_transport_controller_->GetTransports();
ASSERT_EQ(2u, returned_transports.size());
EXPECT_EQ(rtp_transport_result1.value().get(), returned_transports[0]);
EXPECT_EQ(rtp_transport_result2.value().get(), returned_transports[1]);
// If a transport is deleted, it shouldn't be returned any more.
rtp_transport_result1.MoveValue().reset();
returned_transports = rtp_transport_controller_->GetTransports();
ASSERT_EQ(1u, returned_transports.size());
EXPECT_EQ(rtp_transport_result2.value().get(), returned_transports[0]);
}
// Create RtpSenders and RtpReceivers on top of RtpTransports controlled by the
// same RtpTransportController. Currently only one each of audio/video is
// supported.
TEST_F(RtpTransportControllerTest, AttachMultipleSendersAndReceivers) {
rtc::FakePacketTransport audio_packet_transport("audio");
rtc::FakePacketTransport video_packet_transport("video");
auto audio_rtp_transport_result = ortc_factory_->CreateRtpTransport(
MakeRtcpMuxParameters(), &audio_packet_transport, nullptr,
rtp_transport_controller_.get());
ASSERT_TRUE(audio_rtp_transport_result.ok());
auto audio_rtp_transport = audio_rtp_transport_result.MoveValue();
auto video_rtp_transport_result = ortc_factory_->CreateRtpTransport(
MakeRtcpMuxParameters(), &video_packet_transport, nullptr,
rtp_transport_controller_.get());
ASSERT_TRUE(video_rtp_transport_result.ok());
auto video_rtp_transport = video_rtp_transport_result.MoveValue();
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, audio_rtp_transport.get());
EXPECT_TRUE(audio_sender_result.ok());
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, audio_rtp_transport.get());
EXPECT_TRUE(audio_receiver_result.ok());
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, video_rtp_transport.get());
EXPECT_TRUE(video_sender_result.ok());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, video_rtp_transport.get());
EXPECT_TRUE(video_receiver_result.ok());
// Now that we have one each of audio/video senders/receivers, trying to
// create more on top of the same controller is expected to fail.
// TODO(deadbeef): Update this test once multiple senders/receivers on top of
// the same controller is supported.
auto failed_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, audio_rtp_transport.get());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
failed_sender_result.error().type());
auto failed_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, audio_rtp_transport.get());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
failed_receiver_result.error().type());
failed_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, video_rtp_transport.get());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
failed_sender_result.error().type());
failed_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, video_rtp_transport.get());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
failed_receiver_result.error().type());
// If we destroy the existing sender/receiver using a transport controller,
// we should be able to make a new one, despite the above limitation.
audio_sender_result.MoveValue().reset();
audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, audio_rtp_transport.get());
EXPECT_TRUE(audio_sender_result.ok());
audio_receiver_result.MoveValue().reset();
audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, audio_rtp_transport.get());
EXPECT_TRUE(audio_receiver_result.ok());
video_sender_result.MoveValue().reset();
video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, video_rtp_transport.get());
EXPECT_TRUE(video_sender_result.ok());
video_receiver_result.MoveValue().reset();
video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, video_rtp_transport.get());
EXPECT_TRUE(video_receiver_result.ok());
}
// Given the current limitations of the BaseChannel-based implementation, it's
// not possible for an audio sender and receiver to use different RtpTransports.
// TODO(deadbeef): Once this is supported, update/replace this test.
TEST_F(RtpTransportControllerTest,
SenderAndReceiverUsingDifferentTransportsUnsupported) {
rtc::FakePacketTransport packet_transport1("one");
rtc::FakePacketTransport packet_transport2("two");
auto rtp_transport_result1 = ortc_factory_->CreateRtpTransport(
MakeRtcpMuxParameters(), &packet_transport1, nullptr,
rtp_transport_controller_.get());
ASSERT_TRUE(rtp_transport_result1.ok());
auto rtp_transport1 = rtp_transport_result1.MoveValue();
auto rtp_transport_result2 = ortc_factory_->CreateRtpTransport(
MakeRtcpMuxParameters(), &packet_transport2, nullptr,
rtp_transport_controller_.get());
ASSERT_TRUE(rtp_transport_result2.ok());
auto rtp_transport2 = rtp_transport_result2.MoveValue();
// Create an audio sender on transport 1, then try to create a receiver on 2.
auto audio_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_AUDIO, rtp_transport1.get());
EXPECT_TRUE(audio_sender_result.ok());
auto audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport2.get());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
audio_receiver_result.error().type());
// Delete the sender; now we should be ok to create the receiver on 2.
audio_sender_result.MoveValue().reset();
audio_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, rtp_transport2.get());
EXPECT_TRUE(audio_receiver_result.ok());
// Do the same thing for video, reversing 1 and 2 (for variety).
auto video_sender_result = ortc_factory_->CreateRtpSender(
cricket::MEDIA_TYPE_VIDEO, rtp_transport2.get());
EXPECT_TRUE(video_sender_result.ok());
auto video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport1.get());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
video_receiver_result.error().type());
video_sender_result.MoveValue().reset();
video_receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_VIDEO, rtp_transport1.get());
EXPECT_TRUE(video_receiver_result.ok());
}
} // namespace webrtc

View File

@ -1,971 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "ortc/rtptransportcontrolleradapter.h"
#include <algorithm> // For "remove", "find".
#include <set>
#include <unordered_map>
#include <utility> // For std::move.
#include "absl/memory/memory.h"
#include "api/proxy.h"
#include "media/base/mediaconstants.h"
#include "ortc/ortcrtpreceiveradapter.h"
#include "ortc/ortcrtpsenderadapter.h"
#include "ortc/rtptransportadapter.h"
#include "pc/rtpmediautils.h"
#include "pc/rtpparametersconversion.h"
#include "rtc_base/checks.h"
#include "rtc_base/strings/string_builder.h"
namespace webrtc {
// Note: It's assumed that each individual list doesn't have conflicts, since
// they should have been detected already by rtpparametersconversion.cc. This
// only needs to detect conflicts *between* A and B.
template <typename C1, typename C2>
static RTCError CheckForIdConflicts(
const std::vector<C1>& codecs_a,
const cricket::RtpHeaderExtensions& extensions_a,
const cricket::StreamParamsVec& streams_a,
const std::vector<C2>& codecs_b,
const cricket::RtpHeaderExtensions& extensions_b,
const cricket::StreamParamsVec& streams_b) {
rtc::StringBuilder oss;
// Since it's assumed that C1 and C2 are different types, codecs_a and
// codecs_b should never contain the same payload type, and thus we can just
// use a set.
std::set<int> seen_payload_types;
for (const C1& codec : codecs_a) {
seen_payload_types.insert(codec.id);
}
for (const C2& codec : codecs_b) {
if (!seen_payload_types.insert(codec.id).second) {
oss << "Same payload type used for audio and video codecs: " << codec.id;
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, oss.str());
}
}
// Audio and video *may* use the same header extensions, so use a map.
std::unordered_map<int, std::string> seen_extensions;
for (const webrtc::RtpExtension& extension : extensions_a) {
seen_extensions[extension.id] = extension.uri;
}
for (const webrtc::RtpExtension& extension : extensions_b) {
if (seen_extensions.find(extension.id) != seen_extensions.end() &&
seen_extensions.at(extension.id) != extension.uri) {
oss << "Same ID used for different RTP header extensions: "
<< extension.id;
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, oss.str());
}
}
std::set<uint32_t> seen_ssrcs;
for (const cricket::StreamParams& stream : streams_a) {
seen_ssrcs.insert(stream.ssrcs.begin(), stream.ssrcs.end());
}
for (const cricket::StreamParams& stream : streams_b) {
for (uint32_t ssrc : stream.ssrcs) {
if (!seen_ssrcs.insert(ssrc).second) {
oss << "Same SSRC used for audio and video senders: " << ssrc;
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, oss.str());
}
}
}
return RTCError::OK();
}
BEGIN_OWNED_PROXY_MAP(RtpTransportController)
PROXY_SIGNALING_THREAD_DESTRUCTOR()
PROXY_CONSTMETHOD0(std::vector<RtpTransportInterface*>, GetTransports)
protected:
RtpTransportControllerAdapter* GetInternal() override {
return internal();
}
END_PROXY_MAP()
// static
std::unique_ptr<RtpTransportControllerInterface>
RtpTransportControllerAdapter::CreateProxied(
const cricket::MediaConfig& config,
cricket::ChannelManager* channel_manager,
webrtc::RtcEventLog* event_log,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
rtc::Thread* network_thread) {
std::unique_ptr<RtpTransportControllerAdapter> wrapped(
new RtpTransportControllerAdapter(config, channel_manager, event_log,
signaling_thread, worker_thread,
network_thread));
return RtpTransportControllerProxyWithInternal<
RtpTransportControllerAdapter>::Create(signaling_thread, worker_thread,
std::move(wrapped));
}
RtpTransportControllerAdapter::~RtpTransportControllerAdapter() {
RTC_DCHECK_RUN_ON(signaling_thread_);
if (!transport_proxies_.empty()) {
RTC_LOG(LS_ERROR)
<< "Destroying RtpTransportControllerAdapter while RtpTransports "
"are still using it; this is unsafe.";
}
if (voice_channel_) {
// This would mean audio RTP senders/receivers that are using us haven't
// been destroyed. This isn't safe (see error log above).
DestroyVoiceChannel();
}
if (voice_channel_) {
// This would mean video RTP senders/receivers that are using us haven't
// been destroyed. This isn't safe (see error log above).
DestroyVideoChannel();
}
// Call must be destroyed on the worker thread.
worker_thread_->Invoke<void>(
RTC_FROM_HERE, rtc::Bind(&RtpTransportControllerAdapter::Close_w, this));
}
RTCErrorOr<std::unique_ptr<RtpTransportInterface>>
RtpTransportControllerAdapter::CreateProxiedRtpTransport(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp) {
if (!transport_proxies_.empty() && (parameters.keepalive != keepalive_)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
"Cannot create RtpTransport with different keep-alive "
"from the RtpTransports already associated with this "
"transport controller.");
}
auto result = RtpTransportAdapter::CreateProxied(parameters, rtp, rtcp, this);
if (result.ok()) {
transport_proxies_.push_back(result.value().get());
transport_proxies_.back()->GetInternal()->SignalDestroyed.connect(
this, &RtpTransportControllerAdapter::OnRtpTransportDestroyed);
}
return result;
}
RTCErrorOr<std::unique_ptr<SrtpTransportInterface>>
RtpTransportControllerAdapter::CreateProxiedSrtpTransport(
const RtpTransportParameters& parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp) {
auto result =
RtpTransportAdapter::CreateSrtpProxied(parameters, rtp, rtcp, this);
if (result.ok()) {
transport_proxies_.push_back(result.value().get());
transport_proxies_.back()->GetInternal()->SignalDestroyed.connect(
this, &RtpTransportControllerAdapter::OnRtpTransportDestroyed);
}
return result;
}
RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>>
RtpTransportControllerAdapter::CreateProxiedRtpSender(
cricket::MediaType kind,
RtpTransportInterface* transport_proxy) {
RTC_DCHECK(transport_proxy);
RTC_DCHECK(std::find(transport_proxies_.begin(), transport_proxies_.end(),
transport_proxy) != transport_proxies_.end());
std::unique_ptr<OrtcRtpSenderAdapter> new_sender(
new OrtcRtpSenderAdapter(kind, transport_proxy, this));
RTCError err;
switch (kind) {
case cricket::MEDIA_TYPE_AUDIO:
err = AttachAudioSender(new_sender.get(), transport_proxy->GetInternal());
break;
case cricket::MEDIA_TYPE_VIDEO:
err = AttachVideoSender(new_sender.get(), transport_proxy->GetInternal());
break;
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
}
if (!err.ok()) {
return std::move(err);
}
return OrtcRtpSenderAdapter::CreateProxy(std::move(new_sender));
}
RTCErrorOr<std::unique_ptr<OrtcRtpReceiverInterface>>
RtpTransportControllerAdapter::CreateProxiedRtpReceiver(
cricket::MediaType kind,
RtpTransportInterface* transport_proxy) {
RTC_DCHECK(transport_proxy);
RTC_DCHECK(std::find(transport_proxies_.begin(), transport_proxies_.end(),
transport_proxy) != transport_proxies_.end());
std::unique_ptr<OrtcRtpReceiverAdapter> new_receiver(
new OrtcRtpReceiverAdapter(kind, transport_proxy, this));
RTCError err;
switch (kind) {
case cricket::MEDIA_TYPE_AUDIO:
err = AttachAudioReceiver(new_receiver.get(),
transport_proxy->GetInternal());
break;
case cricket::MEDIA_TYPE_VIDEO:
err = AttachVideoReceiver(new_receiver.get(),
transport_proxy->GetInternal());
break;
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
}
if (!err.ok()) {
return std::move(err);
}
return OrtcRtpReceiverAdapter::CreateProxy(std::move(new_receiver));
}
std::vector<RtpTransportInterface*>
RtpTransportControllerAdapter::GetTransports() const {
RTC_DCHECK_RUN_ON(signaling_thread_);
return transport_proxies_;
}
RTCError RtpTransportControllerAdapter::SetRtpTransportParameters(
const RtpTransportParameters& parameters,
RtpTransportInterface* inner_transport) {
if ((video_channel_ != nullptr || voice_channel_ != nullptr) &&
(parameters.keepalive != keepalive_)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_MODIFICATION,
"Cannot change keep-alive settings after creating "
"media streams or additional transports for the same "
"transport controller.");
}
// Call must be configured on the worker thread.
worker_thread_->Invoke<void>(
RTC_FROM_HERE,
rtc::Bind(&RtpTransportControllerAdapter::SetRtpTransportParameters_w,
this, parameters));
do {
if (inner_transport == inner_audio_transport_) {
CopyRtcpParametersToDescriptions(parameters.rtcp,
&local_audio_description_,
&remote_audio_description_);
if (!voice_channel_->SetLocalContent(&local_audio_description_,
SdpType::kOffer, nullptr)) {
break;
}
if (!voice_channel_->SetRemoteContent(&remote_audio_description_,
SdpType::kAnswer, nullptr)) {
break;
}
} else if (inner_transport == inner_video_transport_) {
CopyRtcpParametersToDescriptions(parameters.rtcp,
&local_video_description_,
&remote_video_description_);
if (!video_channel_->SetLocalContent(&local_video_description_,
SdpType::kOffer, nullptr)) {
break;
}
if (!video_channel_->SetRemoteContent(&remote_video_description_,
SdpType::kAnswer, nullptr)) {
break;
}
}
return RTCError::OK();
} while (false);
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply new RTCP parameters.");
}
void RtpTransportControllerAdapter::SetRtpTransportParameters_w(
const RtpTransportParameters& parameters) {
call_send_rtp_transport_controller_->SetKeepAliveConfig(parameters.keepalive);
}
RTCError RtpTransportControllerAdapter::ValidateAndApplyAudioSenderParameters(
const RtpParameters& parameters,
uint32_t* primary_ssrc) {
RTC_DCHECK(voice_channel_);
RTC_DCHECK(have_audio_sender_);
auto codecs_result = ToCricketCodecs<cricket::AudioCodec>(parameters.codecs);
if (!codecs_result.ok()) {
return codecs_result.MoveError();
}
auto extensions_result =
ToCricketRtpHeaderExtensions(parameters.header_extensions);
if (!extensions_result.ok()) {
return extensions_result.MoveError();
}
auto stream_params_result = MakeSendStreamParamsVec(
parameters.encodings, inner_audio_transport_->GetParameters().rtcp.cname,
local_audio_description_);
if (!stream_params_result.ok()) {
return stream_params_result.MoveError();
}
// Check that audio/video sender aren't using the same IDs to refer to
// different things, if they share the same transport.
if (inner_audio_transport_ == inner_video_transport_) {
RTCError err = CheckForIdConflicts(
codecs_result.value(), extensions_result.value(),
stream_params_result.value(), remote_video_description_.codecs(),
remote_video_description_.rtp_header_extensions(),
local_video_description_.streams());
if (!err.ok()) {
return err;
}
}
bool local_send = false;
int bandwidth = cricket::kAutoBandwidth;
if (parameters.encodings.size() == 1u) {
if (parameters.encodings[0].max_bitrate_bps) {
bandwidth = *parameters.encodings[0].max_bitrate_bps;
}
local_send = parameters.encodings[0].active;
}
const bool local_recv =
RtpTransceiverDirectionHasRecv(local_audio_description_.direction());
const auto local_direction =
RtpTransceiverDirectionFromSendRecv(local_send, local_recv);
if (primary_ssrc && !stream_params_result.value().empty()) {
*primary_ssrc = stream_params_result.value()[0].first_ssrc();
}
// Validation is done, so we can attempt applying the descriptions. Sent
// codecs and header extensions go in remote description, streams go in
// local.
//
// If there are no codecs or encodings, just leave the previous set of
// codecs. The media engine doesn't like an empty set of codecs.
if (local_audio_description_.streams().empty() &&
remote_audio_description_.codecs().empty()) {
} else {
remote_audio_description_.set_codecs(codecs_result.MoveValue());
}
remote_audio_description_.set_rtp_header_extensions(
extensions_result.MoveValue());
remote_audio_description_.set_bandwidth(bandwidth);
local_audio_description_.mutable_streams() = stream_params_result.MoveValue();
// Direction set based on encoding "active" flag.
local_audio_description_.set_direction(local_direction);
remote_audio_description_.set_direction(
RtpTransceiverDirectionReversed(local_direction));
// Set remote content first, to ensure the stream is created with the correct
// codec.
if (!voice_channel_->SetRemoteContent(&remote_audio_description_,
SdpType::kOffer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply remote parameters to media channel.");
}
if (!voice_channel_->SetLocalContent(&local_audio_description_,
SdpType::kAnswer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply local parameters to media channel.");
}
return RTCError::OK();
}
RTCError RtpTransportControllerAdapter::ValidateAndApplyVideoSenderParameters(
const RtpParameters& parameters,
uint32_t* primary_ssrc) {
RTC_DCHECK(video_channel_);
RTC_DCHECK(have_video_sender_);
auto codecs_result = ToCricketCodecs<cricket::VideoCodec>(parameters.codecs);
if (!codecs_result.ok()) {
return codecs_result.MoveError();
}
auto extensions_result =
ToCricketRtpHeaderExtensions(parameters.header_extensions);
if (!extensions_result.ok()) {
return extensions_result.MoveError();
}
auto stream_params_result = MakeSendStreamParamsVec(
parameters.encodings, inner_video_transport_->GetParameters().rtcp.cname,
local_video_description_);
if (!stream_params_result.ok()) {
return stream_params_result.MoveError();
}
// Check that audio/video sender aren't using the same IDs to refer to
// different things, if they share the same transport.
if (inner_audio_transport_ == inner_video_transport_) {
RTCError err = CheckForIdConflicts(
codecs_result.value(), extensions_result.value(),
stream_params_result.value(), remote_audio_description_.codecs(),
remote_audio_description_.rtp_header_extensions(),
local_audio_description_.streams());
if (!err.ok()) {
return err;
}
}
bool local_send = false;
int bandwidth = cricket::kAutoBandwidth;
if (parameters.encodings.size() == 1u) {
if (parameters.encodings[0].max_bitrate_bps) {
bandwidth = *parameters.encodings[0].max_bitrate_bps;
}
local_send = parameters.encodings[0].active;
}
const bool local_recv =
RtpTransceiverDirectionHasRecv(local_audio_description_.direction());
const auto local_direction =
RtpTransceiverDirectionFromSendRecv(local_send, local_recv);
if (primary_ssrc && !stream_params_result.value().empty()) {
*primary_ssrc = stream_params_result.value()[0].first_ssrc();
}
// Validation is done, so we can attempt applying the descriptions. Sent
// codecs and header extensions go in remote description, streams go in
// local.
//
// If there are no codecs or encodings, just leave the previous set of
// codecs. The media engine doesn't like an empty set of codecs.
if (local_video_description_.streams().empty() &&
remote_video_description_.codecs().empty()) {
} else {
remote_video_description_.set_codecs(codecs_result.MoveValue());
}
remote_video_description_.set_rtp_header_extensions(
extensions_result.MoveValue());
remote_video_description_.set_bandwidth(bandwidth);
local_video_description_.mutable_streams() = stream_params_result.MoveValue();
// Direction set based on encoding "active" flag.
local_video_description_.set_direction(local_direction);
remote_video_description_.set_direction(
RtpTransceiverDirectionReversed(local_direction));
// Set remote content first, to ensure the stream is created with the correct
// codec.
if (!video_channel_->SetRemoteContent(&remote_video_description_,
SdpType::kOffer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply remote parameters to media channel.");
}
if (!video_channel_->SetLocalContent(&local_video_description_,
SdpType::kAnswer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply local parameters to media channel.");
}
return RTCError::OK();
}
RTCError RtpTransportControllerAdapter::ValidateAndApplyAudioReceiverParameters(
const RtpParameters& parameters) {
RTC_DCHECK(voice_channel_);
RTC_DCHECK(have_audio_receiver_);
auto codecs_result = ToCricketCodecs<cricket::AudioCodec>(parameters.codecs);
if (!codecs_result.ok()) {
return codecs_result.MoveError();
}
auto extensions_result =
ToCricketRtpHeaderExtensions(parameters.header_extensions);
if (!extensions_result.ok()) {
return extensions_result.MoveError();
}
auto stream_params_result = ToCricketStreamParamsVec(parameters.encodings);
if (!stream_params_result.ok()) {
return stream_params_result.MoveError();
}
// Check that audio/video receive aren't using the same IDs to refer to
// different things, if they share the same transport.
if (inner_audio_transport_ == inner_video_transport_) {
RTCError err = CheckForIdConflicts(
codecs_result.value(), extensions_result.value(),
stream_params_result.value(), local_video_description_.codecs(),
local_video_description_.rtp_header_extensions(),
remote_video_description_.streams());
if (!err.ok()) {
return err;
}
}
const bool local_send =
RtpTransceiverDirectionHasSend(local_audio_description_.direction());
const bool local_recv =
!parameters.encodings.empty() && parameters.encodings[0].active;
const auto local_direction =
RtpTransceiverDirectionFromSendRecv(local_send, local_recv);
// Validation is done, so we can attempt applying the descriptions. Received
// codecs and header extensions go in local description, streams go in
// remote.
//
// If there are no codecs or encodings, just leave the previous set of
// codecs. The media engine doesn't like an empty set of codecs.
if (remote_audio_description_.streams().empty() &&
local_audio_description_.codecs().empty()) {
} else {
local_audio_description_.set_codecs(codecs_result.MoveValue());
}
local_audio_description_.set_rtp_header_extensions(
extensions_result.MoveValue());
remote_audio_description_.mutable_streams() =
stream_params_result.MoveValue();
// Direction set based on encoding "active" flag.
local_audio_description_.set_direction(local_direction);
remote_audio_description_.set_direction(
RtpTransceiverDirectionReversed(local_direction));
if (!voice_channel_->SetLocalContent(&local_audio_description_,
SdpType::kOffer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply local parameters to media channel.");
}
if (!voice_channel_->SetRemoteContent(&remote_audio_description_,
SdpType::kAnswer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply remote parameters to media channel.");
}
return RTCError::OK();
}
RTCError RtpTransportControllerAdapter::ValidateAndApplyVideoReceiverParameters(
const RtpParameters& parameters) {
RTC_DCHECK(video_channel_);
RTC_DCHECK(have_video_receiver_);
auto codecs_result = ToCricketCodecs<cricket::VideoCodec>(parameters.codecs);
if (!codecs_result.ok()) {
return codecs_result.MoveError();
}
auto extensions_result =
ToCricketRtpHeaderExtensions(parameters.header_extensions);
if (!extensions_result.ok()) {
return extensions_result.MoveError();
}
int bandwidth = cricket::kAutoBandwidth;
auto stream_params_result = ToCricketStreamParamsVec(parameters.encodings);
if (!stream_params_result.ok()) {
return stream_params_result.MoveError();
}
// Check that audio/video receiver aren't using the same IDs to refer to
// different things, if they share the same transport.
if (inner_audio_transport_ == inner_video_transport_) {
RTCError err = CheckForIdConflicts(
codecs_result.value(), extensions_result.value(),
stream_params_result.value(), local_audio_description_.codecs(),
local_audio_description_.rtp_header_extensions(),
remote_audio_description_.streams());
if (!err.ok()) {
return err;
}
}
const bool local_send =
RtpTransceiverDirectionHasSend(local_video_description_.direction());
const bool local_recv =
!parameters.encodings.empty() && parameters.encodings[0].active;
const auto local_direction =
RtpTransceiverDirectionFromSendRecv(local_send, local_recv);
// Validation is done, so we can attempt applying the descriptions. Received
// codecs and header extensions go in local description, streams go in
// remote.
//
// If there are no codecs or encodings, just leave the previous set of
// codecs. The media engine doesn't like an empty set of codecs.
if (remote_video_description_.streams().empty() &&
local_video_description_.codecs().empty()) {
} else {
local_video_description_.set_codecs(codecs_result.MoveValue());
}
local_video_description_.set_rtp_header_extensions(
extensions_result.MoveValue());
local_video_description_.set_bandwidth(bandwidth);
remote_video_description_.mutable_streams() =
stream_params_result.MoveValue();
// Direction set based on encoding "active" flag.
local_video_description_.set_direction(local_direction);
remote_video_description_.set_direction(
RtpTransceiverDirectionReversed(local_direction));
if (!video_channel_->SetLocalContent(&local_video_description_,
SdpType::kOffer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply local parameters to media channel.");
}
if (!video_channel_->SetRemoteContent(&remote_video_description_,
SdpType::kAnswer, nullptr)) {
LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR,
"Failed to apply remote parameters to media channel.");
}
return RTCError::OK();
}
RtpTransportControllerAdapter::RtpTransportControllerAdapter(
const cricket::MediaConfig& config,
cricket::ChannelManager* channel_manager,
webrtc::RtcEventLog* event_log,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
rtc::Thread* network_thread)
: signaling_thread_(signaling_thread),
worker_thread_(worker_thread),
network_thread_(network_thread),
media_config_(config),
channel_manager_(channel_manager),
event_log_(event_log),
call_send_rtp_transport_controller_(nullptr) {
RTC_DCHECK_RUN_ON(signaling_thread_);
RTC_DCHECK(channel_manager_);
// Add "dummy" codecs to the descriptions, because the media engines
// currently reject empty lists of codecs. Note that these codecs will never
// actually be used, because when parameters are set, the dummy codecs will
// be replaced by actual codecs before any send/receive streams are created.
const cricket::AudioCodec dummy_audio(0, cricket::kPcmuCodecName, 8000, 0, 1);
const cricket::VideoCodec dummy_video(96, cricket::kVp8CodecName);
local_audio_description_.AddCodec(dummy_audio);
remote_audio_description_.AddCodec(dummy_audio);
local_video_description_.AddCodec(dummy_video);
remote_video_description_.AddCodec(dummy_video);
worker_thread_->Invoke<void>(
RTC_FROM_HERE, rtc::Bind(&RtpTransportControllerAdapter::Init_w, this));
}
// TODO(nisse): Duplicates corresponding method in PeerConnection (used
// to be in MediaController).
void RtpTransportControllerAdapter::Init_w() {
RTC_DCHECK(worker_thread_->IsCurrent());
RTC_DCHECK(!call_);
const int kMinBandwidthBps = 30000;
const int kStartBandwidthBps = 300000;
const int kMaxBandwidthBps = 2000000;
webrtc::Call::Config call_config(event_log_);
call_config.audio_state = channel_manager_->media_engine()->GetAudioState();
call_config.bitrate_config.min_bitrate_bps = kMinBandwidthBps;
call_config.bitrate_config.start_bitrate_bps = kStartBandwidthBps;
call_config.bitrate_config.max_bitrate_bps = kMaxBandwidthBps;
std::unique_ptr<RtpTransportControllerSend> controller_send =
absl::make_unique<RtpTransportControllerSend>(
Clock::GetRealTimeClock(), event_log_,
call_config.network_controller_factory, call_config.bitrate_config);
call_send_rtp_transport_controller_ = controller_send.get();
call_.reset(webrtc::Call::Create(call_config, std::move(controller_send)));
}
void RtpTransportControllerAdapter::Close_w() {
call_.reset();
call_send_rtp_transport_controller_ = nullptr;
}
RTCError RtpTransportControllerAdapter::AttachAudioSender(
OrtcRtpSenderAdapter* sender,
RtpTransportInterface* inner_transport) {
if (have_audio_sender_) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using two audio RtpSenders with the same "
"RtpTransportControllerAdapter is not currently "
"supported.");
}
if (inner_audio_transport_ && inner_audio_transport_ != inner_transport) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using different transports for the audio "
"RtpSender and RtpReceiver is not currently "
"supported.");
}
// If setting new transport, extract its RTCP parameters and create voice
// channel.
if (!inner_audio_transport_) {
CopyRtcpParametersToDescriptions(inner_transport->GetParameters().rtcp,
&local_audio_description_,
&remote_audio_description_);
inner_audio_transport_ = inner_transport;
CreateVoiceChannel();
}
have_audio_sender_ = true;
sender->SignalDestroyed.connect(
this, &RtpTransportControllerAdapter::OnAudioSenderDestroyed);
return RTCError::OK();
}
RTCError RtpTransportControllerAdapter::AttachVideoSender(
OrtcRtpSenderAdapter* sender,
RtpTransportInterface* inner_transport) {
if (have_video_sender_) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using two video RtpSenders with the same "
"RtpTransportControllerAdapter is not currently "
"supported.");
}
if (inner_video_transport_ && inner_video_transport_ != inner_transport) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using different transports for the video "
"RtpSender and RtpReceiver is not currently "
"supported.");
}
// If setting new transport, extract its RTCP parameters and create video
// channel.
if (!inner_video_transport_) {
CopyRtcpParametersToDescriptions(inner_transport->GetParameters().rtcp,
&local_video_description_,
&remote_video_description_);
inner_video_transport_ = inner_transport;
CreateVideoChannel();
}
have_video_sender_ = true;
sender->SignalDestroyed.connect(
this, &RtpTransportControllerAdapter::OnVideoSenderDestroyed);
return RTCError::OK();
}
RTCError RtpTransportControllerAdapter::AttachAudioReceiver(
OrtcRtpReceiverAdapter* receiver,
RtpTransportInterface* inner_transport) {
if (have_audio_receiver_) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using two audio RtpReceivers with the same "
"RtpTransportControllerAdapter is not currently "
"supported.");
}
if (inner_audio_transport_ && inner_audio_transport_ != inner_transport) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using different transports for the audio "
"RtpReceiver and RtpReceiver is not currently "
"supported.");
}
// If setting new transport, extract its RTCP parameters and create voice
// channel.
if (!inner_audio_transport_) {
CopyRtcpParametersToDescriptions(inner_transport->GetParameters().rtcp,
&local_audio_description_,
&remote_audio_description_);
inner_audio_transport_ = inner_transport;
CreateVoiceChannel();
}
have_audio_receiver_ = true;
receiver->SignalDestroyed.connect(
this, &RtpTransportControllerAdapter::OnAudioReceiverDestroyed);
return RTCError::OK();
}
RTCError RtpTransportControllerAdapter::AttachVideoReceiver(
OrtcRtpReceiverAdapter* receiver,
RtpTransportInterface* inner_transport) {
if (have_video_receiver_) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using two video RtpReceivers with the same "
"RtpTransportControllerAdapter is not currently "
"supported.");
}
if (inner_video_transport_ && inner_video_transport_ != inner_transport) {
LOG_AND_RETURN_ERROR(RTCErrorType::UNSUPPORTED_OPERATION,
"Using different transports for the video "
"RtpReceiver and RtpReceiver is not currently "
"supported.");
}
// If setting new transport, extract its RTCP parameters and create video
// channel.
if (!inner_video_transport_) {
CopyRtcpParametersToDescriptions(inner_transport->GetParameters().rtcp,
&local_video_description_,
&remote_video_description_);
inner_video_transport_ = inner_transport;
CreateVideoChannel();
}
have_video_receiver_ = true;
receiver->SignalDestroyed.connect(
this, &RtpTransportControllerAdapter::OnVideoReceiverDestroyed);
return RTCError::OK();
}
void RtpTransportControllerAdapter::OnRtpTransportDestroyed(
RtpTransportAdapter* transport) {
RTC_DCHECK_RUN_ON(signaling_thread_);
auto it = std::find_if(transport_proxies_.begin(), transport_proxies_.end(),
[transport](RtpTransportInterface* proxy) {
return proxy->GetInternal() == transport;
});
if (it == transport_proxies_.end()) {
RTC_NOTREACHED();
return;
}
transport_proxies_.erase(it);
}
void RtpTransportControllerAdapter::OnAudioSenderDestroyed() {
if (!have_audio_sender_) {
RTC_NOTREACHED();
return;
}
// Empty parameters should result in sending being stopped.
RTCError err =
ValidateAndApplyAudioSenderParameters(RtpParameters(), nullptr);
RTC_DCHECK(err.ok());
have_audio_sender_ = false;
if (!have_audio_receiver_) {
DestroyVoiceChannel();
}
}
void RtpTransportControllerAdapter::OnVideoSenderDestroyed() {
if (!have_video_sender_) {
RTC_NOTREACHED();
return;
}
// Empty parameters should result in sending being stopped.
RTCError err =
ValidateAndApplyVideoSenderParameters(RtpParameters(), nullptr);
RTC_DCHECK(err.ok());
have_video_sender_ = false;
if (!have_video_receiver_) {
DestroyVideoChannel();
}
}
void RtpTransportControllerAdapter::OnAudioReceiverDestroyed() {
if (!have_audio_receiver_) {
RTC_NOTREACHED();
return;
}
// Empty parameters should result in receiving being stopped.
RTCError err = ValidateAndApplyAudioReceiverParameters(RtpParameters());
RTC_DCHECK(err.ok());
have_audio_receiver_ = false;
if (!have_audio_sender_) {
DestroyVoiceChannel();
}
}
void RtpTransportControllerAdapter::OnVideoReceiverDestroyed() {
if (!have_video_receiver_) {
RTC_NOTREACHED();
return;
}
// Empty parameters should result in receiving being stopped.
RTCError err = ValidateAndApplyVideoReceiverParameters(RtpParameters());
RTC_DCHECK(err.ok());
have_video_receiver_ = false;
if (!have_video_sender_) {
DestroyVideoChannel();
}
}
void RtpTransportControllerAdapter::CreateVoiceChannel() {
voice_channel_ = channel_manager_->CreateVoiceChannel(
call_.get(), media_config_, inner_audio_transport_->GetInternal(),
signaling_thread_, "audio", false, rtc::CryptoOptions(),
cricket::AudioOptions());
RTC_DCHECK(voice_channel_);
voice_channel_->Enable(true);
}
void RtpTransportControllerAdapter::CreateVideoChannel() {
video_channel_ = channel_manager_->CreateVideoChannel(
call_.get(), media_config_, inner_video_transport_->GetInternal(),
signaling_thread_, "video", false, rtc::CryptoOptions(),
cricket::VideoOptions());
RTC_DCHECK(video_channel_);
video_channel_->Enable(true);
}
void RtpTransportControllerAdapter::DestroyVoiceChannel() {
RTC_DCHECK(voice_channel_);
channel_manager_->DestroyVoiceChannel(voice_channel_);
voice_channel_ = nullptr;
inner_audio_transport_ = nullptr;
}
void RtpTransportControllerAdapter::DestroyVideoChannel() {
RTC_DCHECK(video_channel_);
channel_manager_->DestroyVideoChannel(video_channel_);
video_channel_ = nullptr;
inner_video_transport_ = nullptr;
}
void RtpTransportControllerAdapter::CopyRtcpParametersToDescriptions(
const RtcpParameters& params,
cricket::MediaContentDescription* local,
cricket::MediaContentDescription* remote) {
local->set_rtcp_mux(params.mux);
remote->set_rtcp_mux(params.mux);
local->set_rtcp_reduced_size(params.reduced_size);
remote->set_rtcp_reduced_size(params.reduced_size);
for (cricket::StreamParams& stream_params : local->mutable_streams()) {
stream_params.cname = params.cname;
}
}
uint32_t RtpTransportControllerAdapter::GenerateUnusedSsrc(
std::set<uint32_t>* new_ssrcs) const {
uint32_t ssrc;
do {
ssrc = rtc::CreateRandomNonZeroId();
} while (
cricket::GetStreamBySsrc(local_audio_description_.streams(), ssrc) ||
cricket::GetStreamBySsrc(remote_audio_description_.streams(), ssrc) ||
cricket::GetStreamBySsrc(local_video_description_.streams(), ssrc) ||
cricket::GetStreamBySsrc(remote_video_description_.streams(), ssrc) ||
!new_ssrcs->insert(ssrc).second);
return ssrc;
}
RTCErrorOr<cricket::StreamParamsVec>
RtpTransportControllerAdapter::MakeSendStreamParamsVec(
std::vector<RtpEncodingParameters> encodings,
const std::string& cname,
const cricket::MediaContentDescription& description) const {
if (encodings.size() > 1u) {
LOG_AND_RETURN_ERROR(webrtc::RTCErrorType::UNSUPPORTED_PARAMETER,
"ORTC API implementation doesn't currently "
"support simulcast or layered encodings.");
} else if (encodings.empty()) {
return cricket::StreamParamsVec();
}
RtpEncodingParameters& encoding = encodings[0];
std::set<uint32_t> new_ssrcs;
if (encoding.ssrc) {
new_ssrcs.insert(*encoding.ssrc);
}
if (encoding.rtx && encoding.rtx->ssrc) {
new_ssrcs.insert(*encoding.rtx->ssrc);
}
// May need to fill missing SSRCs with generated ones.
if (!encoding.ssrc) {
if (!description.streams().empty()) {
encoding.ssrc.emplace(description.streams()[0].first_ssrc());
} else {
encoding.ssrc.emplace(GenerateUnusedSsrc(&new_ssrcs));
}
}
if (encoding.rtx && !encoding.rtx->ssrc) {
uint32_t existing_rtx_ssrc;
if (!description.streams().empty() &&
description.streams()[0].GetFidSsrc(
description.streams()[0].first_ssrc(), &existing_rtx_ssrc)) {
encoding.rtx->ssrc.emplace(existing_rtx_ssrc);
} else {
encoding.rtx->ssrc.emplace(GenerateUnusedSsrc(&new_ssrcs));
}
}
auto result = ToCricketStreamParamsVec(encodings);
if (!result.ok()) {
return result.MoveError();
}
// If conversion was successful, there should be one StreamParams.
RTC_DCHECK_EQ(1u, result.value().size());
result.value()[0].cname = cname;
return result;
}
} // namespace webrtc

View File

@ -1,221 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ORTC_RTPTRANSPORTCONTROLLERADAPTER_H_
#define ORTC_RTPTRANSPORTCONTROLLERADAPTER_H_
#include <memory>
#include <set>
#include <string>
#include <vector>
#include "api/ortc/ortcrtpreceiverinterface.h"
#include "api/ortc/ortcrtpsenderinterface.h"
#include "api/ortc/rtptransportcontrollerinterface.h"
#include "api/ortc/srtptransportinterface.h"
#include "call/call.h"
#include "call/rtp_transport_controller_send.h"
#include "logging/rtc_event_log/rtc_event_log.h"
#include "media/base/mediachannel.h" // For MediaConfig.
#include "pc/channelmanager.h"
#include "rtc_base/constructormagic.h"
#include "rtc_base/third_party/sigslot/sigslot.h"
#include "rtc_base/thread.h"
namespace webrtc {
class RtpTransportAdapter;
class OrtcRtpSenderAdapter;
class OrtcRtpReceiverAdapter;
// Implementation of RtpTransportControllerInterface. Wraps a Call,
// a VoiceChannel and VideoChannel, and maintains a list of dependent RTP
// transports.
//
// When used along with an RtpSenderAdapter or RtpReceiverAdapter, the
// sender/receiver passes its parameters along to this class, which turns them
// into cricket:: media descriptions (the interface used by BaseChannel).
//
// Due to the fact that BaseChannel has different subclasses for audio/video,
// the actual BaseChannel object is not created until an RtpSender/RtpReceiver
// needs them.
//
// All methods should be called on the signaling thread.
//
// TODO(deadbeef): When BaseChannel is split apart into separate
// "RtpSender"/"RtpTransceiver"/"RtpSender"/"RtpReceiver" objects, this adapter
// object can be replaced by a "real" one.
class RtpTransportControllerAdapter : public RtpTransportControllerInterface,
public sigslot::has_slots<> {
public:
// Creates a proxy that will call "public interface" methods on the correct
// thread.
//
// Doesn't take ownership of any objects passed in.
//
// |channel_manager| must not be null.
static std::unique_ptr<RtpTransportControllerInterface> CreateProxied(
const cricket::MediaConfig& config,
cricket::ChannelManager* channel_manager,
webrtc::RtcEventLog* event_log,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
rtc::Thread* network_thread);
~RtpTransportControllerAdapter() override;
// RtpTransportControllerInterface implementation.
std::vector<RtpTransportInterface*> GetTransports() const override;
// These methods are used by OrtcFactory to create RtpTransports, RtpSenders
// and RtpReceivers using this controller. Called "CreateProxied" because
// these methods return proxies that will safely call methods on the correct
// thread.
RTCErrorOr<std::unique_ptr<RtpTransportInterface>> CreateProxiedRtpTransport(
const RtpTransportParameters& rtcp_parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp);
RTCErrorOr<std::unique_ptr<SrtpTransportInterface>>
CreateProxiedSrtpTransport(const RtpTransportParameters& rtcp_parameters,
PacketTransportInterface* rtp,
PacketTransportInterface* rtcp);
// |transport_proxy| needs to be a proxy to a transport because the
// application may call GetTransport() on the returned sender or receiver,
// and expects it to return a thread-safe transport proxy.
RTCErrorOr<std::unique_ptr<OrtcRtpSenderInterface>> CreateProxiedRtpSender(
cricket::MediaType kind,
RtpTransportInterface* transport_proxy);
RTCErrorOr<std::unique_ptr<OrtcRtpReceiverInterface>>
CreateProxiedRtpReceiver(cricket::MediaType kind,
RtpTransportInterface* transport_proxy);
// Methods used internally by other "adapter" classes.
rtc::Thread* signaling_thread() const { return signaling_thread_; }
rtc::Thread* worker_thread() const { return worker_thread_; }
rtc::Thread* network_thread() const { return network_thread_; }
// |parameters.keepalive| will be set for ALL RTP transports in the call.
RTCError SetRtpTransportParameters(const RtpTransportParameters& parameters,
RtpTransportInterface* inner_transport);
void SetRtpTransportParameters_w(const RtpTransportParameters& parameters);
cricket::VoiceChannel* voice_channel() { return voice_channel_; }
cricket::VideoChannel* video_channel() { return video_channel_; }
// |primary_ssrc| out parameter is filled with either
// |parameters.encodings[0].ssrc|, or a generated SSRC if that's left unset.
RTCError ValidateAndApplyAudioSenderParameters(
const RtpParameters& parameters,
uint32_t* primary_ssrc);
RTCError ValidateAndApplyVideoSenderParameters(
const RtpParameters& parameters,
uint32_t* primary_ssrc);
RTCError ValidateAndApplyAudioReceiverParameters(
const RtpParameters& parameters);
RTCError ValidateAndApplyVideoReceiverParameters(
const RtpParameters& parameters);
protected:
RtpTransportControllerAdapter* GetInternal() override { return this; }
private:
// Only expected to be called by RtpTransportControllerAdapter::CreateProxied.
RtpTransportControllerAdapter(const cricket::MediaConfig& config,
cricket::ChannelManager* channel_manager,
webrtc::RtcEventLog* event_log,
rtc::Thread* signaling_thread,
rtc::Thread* worker_thread,
rtc::Thread* network_thread);
void Init_w();
void Close_w();
// These return an error if another of the same type of object is already
// attached, or if |transport_proxy| can't be used with the sender/receiver
// due to the limitation that the sender/receiver of the same media type must
// use the same transport.
RTCError AttachAudioSender(OrtcRtpSenderAdapter* sender,
RtpTransportInterface* inner_transport);
RTCError AttachVideoSender(OrtcRtpSenderAdapter* sender,
RtpTransportInterface* inner_transport);
RTCError AttachAudioReceiver(OrtcRtpReceiverAdapter* receiver,
RtpTransportInterface* inner_transport);
RTCError AttachVideoReceiver(OrtcRtpReceiverAdapter* receiver,
RtpTransportInterface* inner_transport);
void OnRtpTransportDestroyed(RtpTransportAdapter* transport);
void OnAudioSenderDestroyed();
void OnVideoSenderDestroyed();
void OnAudioReceiverDestroyed();
void OnVideoReceiverDestroyed();
void CreateVoiceChannel();
void CreateVideoChannel();
void DestroyVoiceChannel();
void DestroyVideoChannel();
void CopyRtcpParametersToDescriptions(
const RtcpParameters& params,
cricket::MediaContentDescription* local,
cricket::MediaContentDescription* remote);
// Helper function to generate an SSRC that doesn't match one in any of the
// "content description" structs, or in |new_ssrcs| (which is needed since
// multiple SSRCs may be generated in one go).
uint32_t GenerateUnusedSsrc(std::set<uint32_t>* new_ssrcs) const;
// |description| is the matching description where existing SSRCs can be
// found.
//
// This is a member function because it may need to generate SSRCs that don't
// match existing ones, which is more than ToStreamParamsVec does.
RTCErrorOr<cricket::StreamParamsVec> MakeSendStreamParamsVec(
std::vector<RtpEncodingParameters> encodings,
const std::string& cname,
const cricket::MediaContentDescription& description) const;
rtc::Thread* signaling_thread_;
rtc::Thread* worker_thread_;
rtc::Thread* network_thread_;
// |transport_proxies_| and |inner_audio_transport_|/|inner_audio_transport_|
// are somewhat redundant, but the latter are only set when
// RtpSenders/RtpReceivers are attached to the transport.
std::vector<RtpTransportInterface*> transport_proxies_;
RtpTransportInterface* inner_audio_transport_ = nullptr;
RtpTransportInterface* inner_video_transport_ = nullptr;
const cricket::MediaConfig media_config_;
RtpKeepAliveConfig keepalive_;
cricket::ChannelManager* channel_manager_;
webrtc::RtcEventLog* event_log_;
std::unique_ptr<Call> call_;
webrtc::RtpTransportControllerSend* call_send_rtp_transport_controller_;
// BaseChannel takes content descriptions as input, so we store them here
// such that they can be updated when a new RtpSenderAdapter/
// RtpReceiverAdapter attaches itself.
cricket::AudioContentDescription local_audio_description_;
cricket::AudioContentDescription remote_audio_description_;
cricket::VideoContentDescription local_video_description_;
cricket::VideoContentDescription remote_video_description_;
cricket::VoiceChannel* voice_channel_ = nullptr;
cricket::VideoChannel* video_channel_ = nullptr;
bool have_audio_sender_ = false;
bool have_video_sender_ = false;
bool have_audio_receiver_ = false;
bool have_video_receiver_ = false;
RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(RtpTransportControllerAdapter);
};
} // namespace webrtc
#endif // ORTC_RTPTRANSPORTCONTROLLERADAPTER_H_

View File

@ -1,125 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <memory>
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "media/base/fakemediaengine.h"
#include "ortc/ortcfactory.h"
#include "ortc/testrtpparameters.h"
#include "p2p/base/fakepackettransport.h"
#include "rtc_base/gunit.h"
namespace webrtc {
static const char kTestSha1KeyParams1[] =
"inline:WVNfX19zZW1jdGwgKCkgewkyMjA7fQp9CnVubGVz";
static const char kTestSha1KeyParams2[] =
"inline:PS1uQCVeeCFCanVmcjkpPywjNWhcYD0mXXtxaVBR";
static const char kTestGcmKeyParams3[] =
"inline:e166KFlKzJsGW0d5apX+rrI05vxbrvMJEzFI14aTDCa63IRTlLK4iH66uOI=";
static const cricket::CryptoParams kTestSha1CryptoParams1(
1,
"AES_CM_128_HMAC_SHA1_80",
kTestSha1KeyParams1,
"");
static const cricket::CryptoParams kTestSha1CryptoParams2(
1,
"AES_CM_128_HMAC_SHA1_80",
kTestSha1KeyParams2,
"");
static const cricket::CryptoParams kTestGcmCryptoParams3(1,
"AEAD_AES_256_GCM",
kTestGcmKeyParams3,
"");
// This test uses fake packet transports and a fake media engine, in order to
// test the SrtpTransport at only an API level. Any end-to-end test should go in
// ortcfactory_integrationtest.cc instead.
class SrtpTransportTest : public testing::Test {
public:
SrtpTransportTest() {
fake_media_engine_ = new cricket::FakeMediaEngine();
// Note: This doesn't need to use fake network classes, since it uses
// FakePacketTransports.
auto result = OrtcFactory::Create(
nullptr, nullptr, nullptr, nullptr, nullptr,
std::unique_ptr<cricket::MediaEngineInterface>(fake_media_engine_),
CreateBuiltinAudioEncoderFactory(), CreateBuiltinAudioDecoderFactory());
ortc_factory_ = result.MoveValue();
rtp_transport_controller_ =
ortc_factory_->CreateRtpTransportController().MoveValue();
fake_packet_transport_.reset(new rtc::FakePacketTransport("fake"));
auto srtp_transport_result = ortc_factory_->CreateSrtpTransport(
rtp_transport_parameters_, fake_packet_transport_.get(), nullptr,
rtp_transport_controller_.get());
srtp_transport_ = srtp_transport_result.MoveValue();
}
protected:
// Owned by |ortc_factory_|.
cricket::FakeMediaEngine* fake_media_engine_;
std::unique_ptr<OrtcFactoryInterface> ortc_factory_;
std::unique_ptr<RtpTransportControllerInterface> rtp_transport_controller_;
std::unique_ptr<SrtpTransportInterface> srtp_transport_;
RtpTransportParameters rtp_transport_parameters_;
std::unique_ptr<rtc::FakePacketTransport> fake_packet_transport_;
};
// Tests that setting the SRTP send/receive key succeeds.
TEST_F(SrtpTransportTest, SetSrtpSendAndReceiveKey) {
EXPECT_TRUE(srtp_transport_->SetSrtpSendKey(kTestSha1CryptoParams1).ok());
EXPECT_TRUE(srtp_transport_->SetSrtpReceiveKey(kTestSha1CryptoParams2).ok());
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO,
srtp_transport_.get());
EXPECT_TRUE(sender_result.ok());
auto receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, srtp_transport_.get());
EXPECT_TRUE(receiver_result.ok());
}
// Tests that setting the SRTP send/receive key twice is not supported.
TEST_F(SrtpTransportTest, SetSrtpSendAndReceiveKeyTwice) {
EXPECT_TRUE(srtp_transport_->SetSrtpSendKey(kTestSha1CryptoParams1).ok());
EXPECT_TRUE(srtp_transport_->SetSrtpReceiveKey(kTestSha1CryptoParams2).ok());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
srtp_transport_->SetSrtpSendKey(kTestSha1CryptoParams2).type());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
srtp_transport_->SetSrtpReceiveKey(kTestSha1CryptoParams1).type());
// Ensure that the senders and receivers can be created despite the previous
// errors.
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO,
srtp_transport_.get());
EXPECT_TRUE(sender_result.ok());
auto receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, srtp_transport_.get());
EXPECT_TRUE(receiver_result.ok());
}
// Test that the SRTP send key and receive key must have the same cipher suite.
TEST_F(SrtpTransportTest, SetSrtpSendAndReceiveKeyDifferentCipherSuite) {
EXPECT_TRUE(srtp_transport_->SetSrtpSendKey(kTestSha1CryptoParams1).ok());
EXPECT_EQ(RTCErrorType::UNSUPPORTED_OPERATION,
srtp_transport_->SetSrtpReceiveKey(kTestGcmCryptoParams3).type());
EXPECT_TRUE(srtp_transport_->SetSrtpReceiveKey(kTestSha1CryptoParams2).ok());
// Ensure that the senders and receivers can be created despite the previous
// error.
auto sender_result = ortc_factory_->CreateRtpSender(cricket::MEDIA_TYPE_AUDIO,
srtp_transport_.get());
EXPECT_TRUE(sender_result.ok());
auto receiver_result = ortc_factory_->CreateRtpReceiver(
cricket::MEDIA_TYPE_AUDIO, srtp_transport_.get());
EXPECT_TRUE(receiver_result.ok());
}
} // namespace webrtc

View File

@ -1,315 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "ortc/testrtpparameters.h"
#include <algorithm>
#include <utility>
namespace webrtc {
RtpParameters MakeMinimalOpusParameters() {
RtpParameters parameters;
RtpCodecParameters opus_codec;
opus_codec.name = "opus";
opus_codec.kind = cricket::MEDIA_TYPE_AUDIO;
opus_codec.payload_type = 111;
opus_codec.clock_rate.emplace(48000);
opus_codec.num_channels.emplace(2);
parameters.codecs.push_back(std::move(opus_codec));
RtpEncodingParameters encoding;
encoding.codec_payload_type.emplace(111);
parameters.encodings.push_back(std::move(encoding));
return parameters;
}
RtpParameters MakeMinimalIsacParameters() {
RtpParameters parameters;
RtpCodecParameters isac_codec;
isac_codec.name = "ISAC";
isac_codec.kind = cricket::MEDIA_TYPE_AUDIO;
isac_codec.payload_type = 103;
isac_codec.clock_rate.emplace(16000);
parameters.codecs.push_back(std::move(isac_codec));
RtpEncodingParameters encoding;
encoding.codec_payload_type.emplace(111);
parameters.encodings.push_back(std::move(encoding));
return parameters;
}
RtpParameters MakeMinimalOpusParametersWithSsrc(uint32_t ssrc) {
RtpParameters parameters = MakeMinimalOpusParameters();
parameters.encodings[0].ssrc.emplace(ssrc);
return parameters;
}
RtpParameters MakeMinimalIsacParametersWithSsrc(uint32_t ssrc) {
RtpParameters parameters = MakeMinimalIsacParameters();
parameters.encodings[0].ssrc.emplace(ssrc);
return parameters;
}
RtpParameters MakeMinimalVideoParameters(const char* codec_name) {
RtpParameters parameters;
RtpCodecParameters vp8_codec;
vp8_codec.name = codec_name;
vp8_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp8_codec.payload_type = 96;
parameters.codecs.push_back(std::move(vp8_codec));
RtpEncodingParameters encoding;
encoding.codec_payload_type.emplace(96);
parameters.encodings.push_back(std::move(encoding));
return parameters;
}
RtpParameters MakeMinimalVp8Parameters() {
return MakeMinimalVideoParameters("VP8");
}
RtpParameters MakeMinimalVp9Parameters() {
return MakeMinimalVideoParameters("VP9");
}
RtpParameters MakeMinimalVp8ParametersWithSsrc(uint32_t ssrc) {
RtpParameters parameters = MakeMinimalVp8Parameters();
parameters.encodings[0].ssrc.emplace(ssrc);
return parameters;
}
RtpParameters MakeMinimalVp9ParametersWithSsrc(uint32_t ssrc) {
RtpParameters parameters = MakeMinimalVp9Parameters();
parameters.encodings[0].ssrc.emplace(ssrc);
return parameters;
}
// Note: Currently, these "WithNoSsrc" methods are identical to the normal
// "MakeMinimal" methods, but with the added guarantee that they will never be
// changed to include an SSRC.
RtpParameters MakeMinimalOpusParametersWithNoSsrc() {
RtpParameters parameters = MakeMinimalOpusParameters();
RTC_DCHECK(!parameters.encodings[0].ssrc);
return parameters;
}
RtpParameters MakeMinimalIsacParametersWithNoSsrc() {
RtpParameters parameters = MakeMinimalIsacParameters();
RTC_DCHECK(!parameters.encodings[0].ssrc);
return parameters;
}
RtpParameters MakeMinimalVp8ParametersWithNoSsrc() {
RtpParameters parameters = MakeMinimalVp8Parameters();
RTC_DCHECK(!parameters.encodings[0].ssrc);
return parameters;
}
RtpParameters MakeMinimalVp9ParametersWithNoSsrc() {
RtpParameters parameters = MakeMinimalVp9Parameters();
RTC_DCHECK(!parameters.encodings[0].ssrc);
return parameters;
}
// Make audio parameters with all the available properties configured and
// features used, and with multiple codecs offered. Obtained by taking a
// snapshot of a default PeerConnection offer (and adding other things, like
// bitrate limit).
//
// See "MakeFullOpusParameters"/"MakeFullIsacParameters" below.
RtpParameters MakeFullAudioParameters(int preferred_payload_type) {
RtpParameters parameters;
RtpCodecParameters opus_codec;
opus_codec.name = "opus";
opus_codec.kind = cricket::MEDIA_TYPE_AUDIO;
opus_codec.payload_type = 111;
opus_codec.clock_rate.emplace(48000);
opus_codec.num_channels.emplace(2);
opus_codec.parameters["minptime"] = "10";
opus_codec.parameters["useinbandfec"] = "1";
opus_codec.parameters["usedtx"] = "1";
opus_codec.parameters["stereo"] = "1";
opus_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC);
parameters.codecs.push_back(std::move(opus_codec));
RtpCodecParameters isac_codec;
isac_codec.name = "ISAC";
isac_codec.kind = cricket::MEDIA_TYPE_AUDIO;
isac_codec.payload_type = 103;
isac_codec.clock_rate.emplace(16000);
parameters.codecs.push_back(std::move(isac_codec));
RtpCodecParameters cn_codec;
cn_codec.name = "CN";
cn_codec.kind = cricket::MEDIA_TYPE_AUDIO;
cn_codec.payload_type = 106;
cn_codec.clock_rate.emplace(32000);
parameters.codecs.push_back(std::move(cn_codec));
RtpCodecParameters dtmf_codec;
dtmf_codec.name = "telephone-event";
dtmf_codec.kind = cricket::MEDIA_TYPE_AUDIO;
dtmf_codec.payload_type = 126;
dtmf_codec.clock_rate.emplace(8000);
parameters.codecs.push_back(std::move(dtmf_codec));
// "codec_payload_type" isn't implemented, so we need to reorder codecs to
// cause one to be used.
// TODO(deadbeef): Remove this when it becomes unnecessary.
auto it = std::find_if(parameters.codecs.begin(), parameters.codecs.end(),
[preferred_payload_type](const RtpCodecParameters& p) {
return p.payload_type == preferred_payload_type;
});
RtpCodecParameters preferred = *it;
parameters.codecs.erase(it);
parameters.codecs.insert(parameters.codecs.begin(), preferred);
// Intentionally leave out SSRC so one's chosen automatically.
RtpEncodingParameters encoding;
encoding.codec_payload_type.emplace(preferred_payload_type);
encoding.dtx.emplace(DtxStatus::ENABLED);
// 20 kbps.
encoding.max_bitrate_bps.emplace(20000);
parameters.encodings.push_back(std::move(encoding));
parameters.header_extensions.emplace_back(
"urn:ietf:params:rtp-hdrext:ssrc-audio-level", 1);
return parameters;
}
RtpParameters MakeFullOpusParameters() {
return MakeFullAudioParameters(111);
}
RtpParameters MakeFullIsacParameters() {
return MakeFullAudioParameters(103);
}
// Make video parameters with all the available properties configured and
// features used, and with multiple codecs offered. Obtained by taking a
// snapshot of a default PeerConnection offer (and adding other things, like
// bitrate limit).
//
// See "MakeFullVp8Parameters"/"MakeFullVp9Parameters" below.
RtpParameters MakeFullVideoParameters(int preferred_payload_type) {
RtpParameters parameters;
RtpCodecParameters vp8_codec;
vp8_codec.name = "VP8";
vp8_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp8_codec.payload_type = 100;
vp8_codec.clock_rate.emplace(90000);
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::CCM,
RtcpFeedbackMessageType::FIR);
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK,
RtcpFeedbackMessageType::GENERIC_NACK);
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK,
RtcpFeedbackMessageType::PLI);
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::REMB);
vp8_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC);
parameters.codecs.push_back(std::move(vp8_codec));
RtpCodecParameters vp8_rtx_codec;
vp8_rtx_codec.name = "rtx";
vp8_rtx_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp8_rtx_codec.payload_type = 96;
vp8_rtx_codec.clock_rate.emplace(90000);
vp8_rtx_codec.parameters["apt"] = "100";
parameters.codecs.push_back(std::move(vp8_rtx_codec));
RtpCodecParameters vp9_codec;
vp9_codec.name = "VP9";
vp9_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp9_codec.payload_type = 101;
vp9_codec.clock_rate.emplace(90000);
vp9_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::CCM,
RtcpFeedbackMessageType::FIR);
vp9_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK,
RtcpFeedbackMessageType::GENERIC_NACK);
vp9_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::NACK,
RtcpFeedbackMessageType::PLI);
vp9_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::REMB);
vp9_codec.rtcp_feedback.emplace_back(RtcpFeedbackType::TRANSPORT_CC);
parameters.codecs.push_back(std::move(vp9_codec));
RtpCodecParameters vp9_rtx_codec;
vp9_rtx_codec.name = "rtx";
vp9_rtx_codec.kind = cricket::MEDIA_TYPE_VIDEO;
vp9_rtx_codec.payload_type = 97;
vp9_rtx_codec.clock_rate.emplace(90000);
vp9_rtx_codec.parameters["apt"] = "101";
parameters.codecs.push_back(std::move(vp9_rtx_codec));
RtpCodecParameters red_codec;
red_codec.name = "red";
red_codec.kind = cricket::MEDIA_TYPE_VIDEO;
red_codec.payload_type = 116;
red_codec.clock_rate.emplace(90000);
parameters.codecs.push_back(std::move(red_codec));
RtpCodecParameters red_rtx_codec;
red_rtx_codec.name = "rtx";
red_rtx_codec.kind = cricket::MEDIA_TYPE_VIDEO;
red_rtx_codec.payload_type = 98;
red_rtx_codec.clock_rate.emplace(90000);
red_rtx_codec.parameters["apt"] = "116";
parameters.codecs.push_back(std::move(red_rtx_codec));
RtpCodecParameters ulpfec_codec;
ulpfec_codec.name = "ulpfec";
ulpfec_codec.kind = cricket::MEDIA_TYPE_VIDEO;
ulpfec_codec.payload_type = 117;
ulpfec_codec.clock_rate.emplace(90000);
parameters.codecs.push_back(std::move(ulpfec_codec));
// "codec_payload_type" isn't implemented, so we need to reorder codecs to
// cause one to be used.
// TODO(deadbeef): Remove this when it becomes unnecessary.
auto it = std::find_if(parameters.codecs.begin(), parameters.codecs.end(),
[preferred_payload_type](const RtpCodecParameters& p) {
return p.payload_type == preferred_payload_type;
});
RtpCodecParameters preferred = *it;
parameters.codecs.erase(it);
parameters.codecs.insert(parameters.codecs.begin(), preferred);
// Intentionally leave out SSRC so one's chosen automatically.
RtpEncodingParameters encoding;
encoding.codec_payload_type.emplace(preferred_payload_type);
encoding.fec.emplace(FecMechanism::RED_AND_ULPFEC);
// Will create default RtxParameters, with unset SSRC.
encoding.rtx.emplace();
// 100 kbps.
encoding.max_bitrate_bps.emplace(100000);
parameters.encodings.push_back(std::move(encoding));
parameters.header_extensions.emplace_back(
"urn:ietf:params:rtp-hdrext:toffset", 2);
parameters.header_extensions.emplace_back(
"http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time", 3);
parameters.header_extensions.emplace_back("urn:3gpp:video-orientation", 4);
parameters.header_extensions.emplace_back(
"http://www.ietf.org/id/"
"draft-holmer-rmcat-transport-wide-cc-extensions-01",
5);
parameters.header_extensions.emplace_back(
"http://www.webrtc.org/experiments/rtp-hdrext/playout-delay", 6);
return parameters;
}
RtpParameters MakeFullVp8Parameters() {
return MakeFullVideoParameters(100);
}
RtpParameters MakeFullVp9Parameters() {
return MakeFullVideoParameters(101);
}
} // namespace webrtc

View File

@ -1,72 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef ORTC_TESTRTPPARAMETERS_H_
#define ORTC_TESTRTPPARAMETERS_H_
#include "api/ortc/rtptransportinterface.h"
#include "api/rtpparameters.h"
namespace webrtc {
// Helper methods to create RtpParameters to use for sending/receiving.
//
// "MakeMinimal" methods contain the minimal necessary information for an
// RtpSender or RtpReceiver to function. The "MakeFull" methods are the
// opposite, and include all features that would normally be offered by a
// PeerConnection, and in some cases additional ones.
//
// These methods are intended to be used for end-to-end testing (such as in
// ortcfactory_integrationtest.cc), or unit testing that doesn't care about the
// specific contents of the parameters. Tests should NOT assume that these
// methods will not change; tests that are testing that a specific value in the
// parameters is applied properly should construct the parameters in the test
// itself.
inline RtpTransportParameters MakeRtcpMuxParameters() {
RtpTransportParameters parameters;
parameters.rtcp.mux = true;
return parameters;
}
RtpParameters MakeMinimalOpusParameters();
RtpParameters MakeMinimalIsacParameters();
RtpParameters MakeMinimalOpusParametersWithSsrc(uint32_t ssrc);
RtpParameters MakeMinimalIsacParametersWithSsrc(uint32_t ssrc);
RtpParameters MakeMinimalVp8Parameters();
RtpParameters MakeMinimalVp9Parameters();
RtpParameters MakeMinimalVp8ParametersWithSsrc(uint32_t ssrc);
RtpParameters MakeMinimalVp9ParametersWithSsrc(uint32_t ssrc);
// Will create an encoding with no SSRC (meaning "match first SSRC seen" for a
// receiver, or "pick one automatically" for a sender).
RtpParameters MakeMinimalOpusParametersWithNoSsrc();
RtpParameters MakeMinimalIsacParametersWithNoSsrc();
RtpParameters MakeMinimalVp8ParametersWithNoSsrc();
RtpParameters MakeMinimalVp9ParametersWithNoSsrc();
// Make audio parameters with all the available properties configured and
// features used, and with multiple codecs offered. Obtained by taking a
// snapshot of a default PeerConnection offer (and adding other things, like
// bitrate limit).
RtpParameters MakeFullOpusParameters();
RtpParameters MakeFullIsacParameters();
// Make video parameters with all the available properties configured and
// features used, and with multiple codecs offered. Obtained by taking a
// snapshot of a default PeerConnection offer (and adding other things, like
// bitrate limit).
RtpParameters MakeFullVp8Parameters();
RtpParameters MakeFullVp9Parameters();
} // namespace webrtc
#endif // ORTC_TESTRTPPARAMETERS_H_

View File

@ -28,7 +28,6 @@ TESTS = [
'rtc_pc_unittests',
'rtc_media_unittests',
'peerconnection_unittests',
'ortc_unittests',
'modules_unittests',
'modules_tests',
'low_bandwidth_audio_test',

View File

@ -27,9 +27,6 @@
{
"app": "modules_unittests"
},
{
"app": "ortc_unittests"
},
{
"app": "rtc_media_unittests"
},

View File

@ -12,9 +12,6 @@
{
"app": "modules_unittests"
},
{
"app": "ortc_unittests"
},
{
"app": "rtc_pc_unittests"
},

View File

@ -82,10 +82,6 @@
"label": "//modules:modules_unittests",
"type": "windowed_test_launcher",
},
"ortc_unittests": {
"label": "//ortc:ortc_unittests",
"type": "console_test_launcher",
},
"peerconnection_unittests": {
"label": "//pc:peerconnection_unittests",
"type": "console_test_launcher",