Obj-C SDK Cleanup

This CL separates the files under sdk/objc into logical directories, replacing
the previous file layout under Framework/.

A long term goal is to have some system set up to generate the files under
sdk/objc/api (the PeerConnection API wrappers) from the C++ code. In the shorter
term the goal is to abstract out shared concepts from these classes in order to
make them as uniform as possible.

The separation into base/, components/, and helpers/ are to differentiate between
the base layer's common protocols, various utilities and the actual platform
specific components.

The old directory layout that resembled a framework's internal layout is not
necessary, since it is generated by the framework target when building it.

Bug: webrtc:9627
Change-Id: Ib084fd83f050ae980649ca99e841f4fb0580bd8f
Reviewed-on: https://webrtc-review.googlesource.com/94142
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Rasmus Brandt <brandtr@webrtc.org>
Reviewed-by: Henrik Andreassson <henrika@webrtc.org>
Commit-Queue: Anders Carlsson <andersc@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#24493}
This commit is contained in:
Anders Carlsson
2018-08-30 09:30:29 +02:00
committed by Commit Bot
parent 9ea5765f78
commit 7bca8ca4e2
470 changed files with 7255 additions and 5258 deletions

View File

@ -1,172 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAudioSession.h"
#import "WebRTC/RTCAudioSessionConfiguration.h"
#import "WebRTC/RTCLogging.h"
#import "RTCAudioSession+Private.h"
@implementation RTCAudioSession (Configuration)
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
error:(NSError **)outError {
return [self setConfiguration:configuration
active:NO
shouldSetActive:NO
error:outError];
}
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
active:(BOOL)active
error:(NSError **)outError {
return [self setConfiguration:configuration
active:active
shouldSetActive:YES
error:outError];
}
#pragma mark - Private
- (BOOL)setConfiguration:(RTCAudioSessionConfiguration *)configuration
active:(BOOL)active
shouldSetActive:(BOOL)shouldSetActive
error:(NSError **)outError {
NSParameterAssert(configuration);
if (outError) {
*outError = nil;
}
if (![self checkLock:outError]) {
return NO;
}
// Provide an error even if there isn't one so we can log it. We will not
// return immediately on error in this function and instead try to set
// everything we can.
NSError *error = nil;
if (self.category != configuration.category ||
self.categoryOptions != configuration.categoryOptions) {
NSError *categoryError = nil;
if (![self setCategory:configuration.category
withOptions:configuration.categoryOptions
error:&categoryError]) {
RTCLogError(@"Failed to set category: %@",
categoryError.localizedDescription);
error = categoryError;
} else {
RTCLog(@"Set category to: %@", configuration.category);
}
}
if (self.mode != configuration.mode) {
NSError *modeError = nil;
if (![self setMode:configuration.mode error:&modeError]) {
RTCLogError(@"Failed to set mode: %@",
modeError.localizedDescription);
error = modeError;
} else {
RTCLog(@"Set mode to: %@", configuration.mode);
}
}
// Sometimes category options don't stick after setting mode.
if (self.categoryOptions != configuration.categoryOptions) {
NSError *categoryError = nil;
if (![self setCategory:configuration.category
withOptions:configuration.categoryOptions
error:&categoryError]) {
RTCLogError(@"Failed to set category options: %@",
categoryError.localizedDescription);
error = categoryError;
} else {
RTCLog(@"Set category options to: %ld",
(long)configuration.categoryOptions);
}
}
if (self.preferredSampleRate != configuration.sampleRate) {
NSError *sampleRateError = nil;
if (![self setPreferredSampleRate:configuration.sampleRate
error:&sampleRateError]) {
RTCLogError(@"Failed to set preferred sample rate: %@",
sampleRateError.localizedDescription);
error = sampleRateError;
} else {
RTCLog(@"Set preferred sample rate to: %.2f",
configuration.sampleRate);
}
}
if (self.preferredIOBufferDuration != configuration.ioBufferDuration) {
NSError *bufferDurationError = nil;
if (![self setPreferredIOBufferDuration:configuration.ioBufferDuration
error:&bufferDurationError]) {
RTCLogError(@"Failed to set preferred IO buffer duration: %@",
bufferDurationError.localizedDescription);
error = bufferDurationError;
} else {
RTCLog(@"Set preferred IO buffer duration to: %f",
configuration.ioBufferDuration);
}
}
if (shouldSetActive) {
NSError *activeError = nil;
if (![self setActive:active error:&activeError]) {
RTCLogError(@"Failed to setActive to %d: %@",
active, activeError.localizedDescription);
error = activeError;
}
}
if (self.isActive &&
// TODO(tkchin): Figure out which category/mode numChannels is valid for.
[self.mode isEqualToString:AVAudioSessionModeVoiceChat]) {
// Try to set the preferred number of hardware audio channels. These calls
// must be done after setting the audio session’s category and mode and
// activating the session.
NSInteger inputNumberOfChannels = configuration.inputNumberOfChannels;
if (self.inputNumberOfChannels != inputNumberOfChannels) {
NSError *inputChannelsError = nil;
if (![self setPreferredInputNumberOfChannels:inputNumberOfChannels
error:&inputChannelsError]) {
RTCLogError(@"Failed to set preferred input number of channels: %@",
inputChannelsError.localizedDescription);
error = inputChannelsError;
} else {
RTCLog(@"Set input number of channels to: %ld",
(long)inputNumberOfChannels);
}
}
NSInteger outputNumberOfChannels = configuration.outputNumberOfChannels;
if (self.outputNumberOfChannels != outputNumberOfChannels) {
NSError *outputChannelsError = nil;
if (![self setPreferredOutputNumberOfChannels:outputNumberOfChannels
error:&outputChannelsError]) {
RTCLogError(@"Failed to set preferred output number of channels: %@",
outputChannelsError.localizedDescription);
error = outputChannelsError;
} else {
RTCLog(@"Set output number of channels to: %ld",
(long)outputNumberOfChannels);
}
}
}
if (outError) {
*outError = error;
}
return error == nil;
}
@end

View File

@ -1,95 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h"
#include <vector>
NS_ASSUME_NONNULL_BEGIN
@class RTCAudioSessionConfiguration;
@interface RTCAudioSession ()
/** Number of times setActive:YES has succeeded without a balanced call to
* setActive:NO.
*/
@property(nonatomic, readonly) int activationCount;
/** The number of times |beginWebRTCSession| was called without a balanced call
* to |endWebRTCSession|.
*/
@property(nonatomic, readonly) int webRTCSessionCount;
/** Convenience BOOL that checks useManualAudio and isAudioEnebled. */
@property(readonly) BOOL canPlayOrRecord;
/** Tracks whether we have been sent an interruption event that hasn't been matched by either an
* interrupted end event or a foreground event.
*/
@property(nonatomic, assign) BOOL isInterrupted;
- (BOOL)checkLock:(NSError **)outError;
/** Adds the delegate to the list of delegates, and places it at the front of
* the list. This delegate will be notified before other delegates of
* audio events.
*/
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate;
/** Signals RTCAudioSession that a WebRTC session is about to begin and
* audio configuration is needed. Will configure the audio session for WebRTC
* if not already configured and if configuration is not delayed.
* Successful calls must be balanced by a call to endWebRTCSession.
*/
- (BOOL)beginWebRTCSession:(NSError **)outError;
/** Signals RTCAudioSession that a WebRTC session is about to end and audio
* unconfiguration is needed. Will unconfigure the audio session for WebRTC
* if this is the last unmatched call and if configuration is not delayed.
*/
- (BOOL)endWebRTCSession:(NSError **)outError;
/** Configure the audio session for WebRTC. This call will fail if the session
* is already configured. On other failures, we will attempt to restore the
* previously used audio session configuration.
* |lockForConfiguration| must be called first.
* Successful calls to configureWebRTCSession must be matched by calls to
* |unconfigureWebRTCSession|.
*/
- (BOOL)configureWebRTCSession:(NSError **)outError;
/** Unconfigures the session for WebRTC. This will attempt to restore the
* audio session to the settings used before |configureWebRTCSession| was
* called.
* |lockForConfiguration| must be called first.
*/
- (BOOL)unconfigureWebRTCSession:(NSError **)outError;
/** Returns a configuration error with the given description. */
- (NSError *)configurationErrorWithDescription:(NSString *)description;
// Properties and methods for tests.
@property(nonatomic, readonly) std::vector<__weak id<RTCAudioSessionDelegate> > delegates;
- (void)notifyDidBeginInterruption;
- (void)notifyDidEndInterruptionWithShouldResumeSession:(BOOL)shouldResumeSession;
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute;
- (void)notifyMediaServicesWereLost;
- (void)notifyMediaServicesWereReset;
- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord;
- (void)notifyDidStartPlayOrRecord;
- (void)notifyDidStopPlayOrRecord;
- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,978 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAudioSession.h"
#import <UIKit/UIKit.h>
#include "rtc_base/atomicops.h"
#include "rtc_base/checks.h"
#include "rtc_base/criticalsection.h"
#import "WebRTC/RTCAudioSessionConfiguration.h"
#import "WebRTC/RTCLogging.h"
#import "RTCAudioSession+Private.h"
NSString * const kRTCAudioSessionErrorDomain = @"org.webrtc.RTCAudioSession";
NSInteger const kRTCAudioSessionErrorLockRequired = -1;
NSInteger const kRTCAudioSessionErrorConfiguration = -2;
NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume";
// This class needs to be thread-safe because it is accessed from many threads.
// TODO(tkchin): Consider more granular locking. We're not expecting a lot of
// lock contention so coarse locks should be fine for now.
@implementation RTCAudioSession {
rtc::CriticalSection _crit;
AVAudioSession *_session;
volatile int _activationCount;
volatile int _lockRecursionCount;
volatile int _webRTCSessionCount;
BOOL _isActive;
BOOL _useManualAudio;
BOOL _isAudioEnabled;
BOOL _canPlayOrRecord;
BOOL _isInterrupted;
}
@synthesize session = _session;
@synthesize delegates = _delegates;
+ (instancetype)sharedInstance {
static dispatch_once_t onceToken;
static RTCAudioSession *sharedInstance = nil;
dispatch_once(&onceToken, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
- (instancetype)init {
return [self initWithAudioSession:[AVAudioSession sharedInstance]];
}
/** This initializer provides a way for unit tests to inject a fake/mock audio session. */
- (instancetype)initWithAudioSession:(id)audioSession {
if (self = [super init]) {
_session = audioSession;
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center addObserver:self
selector:@selector(handleInterruptionNotification:)
name:AVAudioSessionInterruptionNotification
object:nil];
[center addObserver:self
selector:@selector(handleRouteChangeNotification:)
name:AVAudioSessionRouteChangeNotification
object:nil];
[center addObserver:self
selector:@selector(handleMediaServicesWereLost:)
name:AVAudioSessionMediaServicesWereLostNotification
object:nil];
[center addObserver:self
selector:@selector(handleMediaServicesWereReset:)
name:AVAudioSessionMediaServicesWereResetNotification
object:nil];
// Posted on the main thread when the primary audio from other applications
// starts and stops. Foreground applications may use this notification as a
// hint to enable or disable audio that is secondary.
[center addObserver:self
selector:@selector(handleSilenceSecondaryAudioHintNotification:)
name:AVAudioSessionSilenceSecondaryAudioHintNotification
object:nil];
// Also track foreground event in order to deal with interruption ended situation.
[center addObserver:self
selector:@selector(handleApplicationDidBecomeActive:)
name:UIApplicationDidBecomeActiveNotification
object:nil];
[_session addObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld
context:(__bridge void*)RTCAudioSession.class];
RTCLog(@"RTCAudioSession (%p): init.", self);
}
return self;
}
- (void)dealloc {
[[NSNotificationCenter defaultCenter] removeObserver:self];
[_session removeObserver:self
forKeyPath:kRTCAudioSessionOutputVolumeSelector
context:(__bridge void*)RTCAudioSession.class];
RTCLog(@"RTCAudioSession (%p): dealloc.", self);
}
- (NSString *)description {
NSString *format =
@"RTCAudioSession: {\n"
" category: %@\n"
" categoryOptions: %ld\n"
" mode: %@\n"
" isActive: %d\n"
" sampleRate: %.2f\n"
" IOBufferDuration: %f\n"
" outputNumberOfChannels: %ld\n"
" inputNumberOfChannels: %ld\n"
" outputLatency: %f\n"
" inputLatency: %f\n"
" outputVolume: %f\n"
"}";
NSString *description = [NSString stringWithFormat:format,
self.category, (long)self.categoryOptions, self.mode,
self.isActive, self.sampleRate, self.IOBufferDuration,
self.outputNumberOfChannels, self.inputNumberOfChannels,
self.outputLatency, self.inputLatency, self.outputVolume];
return description;
}
- (void)setIsActive:(BOOL)isActive {
@synchronized(self) {
_isActive = isActive;
}
}
- (BOOL)isActive {
@synchronized(self) {
return _isActive;
}
}
- (BOOL)isLocked {
return _lockRecursionCount > 0;
}
- (void)setUseManualAudio:(BOOL)useManualAudio {
@synchronized(self) {
if (_useManualAudio == useManualAudio) {
return;
}
_useManualAudio = useManualAudio;
}
[self updateCanPlayOrRecord];
}
- (BOOL)useManualAudio {
@synchronized(self) {
return _useManualAudio;
}
}
- (void)setIsAudioEnabled:(BOOL)isAudioEnabled {
@synchronized(self) {
if (_isAudioEnabled == isAudioEnabled) {
return;
}
_isAudioEnabled = isAudioEnabled;
}
[self updateCanPlayOrRecord];
}
- (BOOL)isAudioEnabled {
@synchronized(self) {
return _isAudioEnabled;
}
}
// TODO(tkchin): Check for duplicates.
- (void)addDelegate:(id<RTCAudioSessionDelegate>)delegate {
RTCLog(@"Adding delegate: (%p)", delegate);
if (!delegate) {
return;
}
@synchronized(self) {
_delegates.push_back(delegate);
[self removeZeroedDelegates];
}
}
- (void)removeDelegate:(id<RTCAudioSessionDelegate>)delegate {
RTCLog(@"Removing delegate: (%p)", delegate);
if (!delegate) {
return;
}
@synchronized(self) {
_delegates.erase(std::remove(_delegates.begin(),
_delegates.end(),
delegate),
_delegates.end());
[self removeZeroedDelegates];
}
}
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wthread-safety-analysis"
- (void)lockForConfiguration {
_crit.Enter();
rtc::AtomicOps::Increment(&_lockRecursionCount);
}
- (void)unlockForConfiguration {
// Don't let threads other than the one that called lockForConfiguration
// unlock.
if (_crit.TryEnter()) {
rtc::AtomicOps::Decrement(&_lockRecursionCount);
// One unlock for the tryLock, and another one to actually unlock. If this
// was called without anyone calling lock, we will hit an assertion.
_crit.Leave();
_crit.Leave();
}
}
#pragma clang diagnostic pop
#pragma mark - AVAudioSession proxy methods
- (NSString *)category {
return self.session.category;
}
- (AVAudioSessionCategoryOptions)categoryOptions {
return self.session.categoryOptions;
}
- (NSString *)mode {
return self.session.mode;
}
- (BOOL)secondaryAudioShouldBeSilencedHint {
return self.session.secondaryAudioShouldBeSilencedHint;
}
- (AVAudioSessionRouteDescription *)currentRoute {
return self.session.currentRoute;
}
- (NSInteger)maximumInputNumberOfChannels {
return self.session.maximumInputNumberOfChannels;
}
- (NSInteger)maximumOutputNumberOfChannels {
return self.session.maximumOutputNumberOfChannels;
}
- (float)inputGain {
return self.session.inputGain;
}
- (BOOL)inputGainSettable {
return self.session.inputGainSettable;
}
- (BOOL)inputAvailable {
return self.session.inputAvailable;
}
- (NSArray<AVAudioSessionDataSourceDescription *> *)inputDataSources {
return self.session.inputDataSources;
}
- (AVAudioSessionDataSourceDescription *)inputDataSource {
return self.session.inputDataSource;
}
- (NSArray<AVAudioSessionDataSourceDescription *> *)outputDataSources {
return self.session.outputDataSources;
}
- (AVAudioSessionDataSourceDescription *)outputDataSource {
return self.session.outputDataSource;
}
- (double)sampleRate {
return self.session.sampleRate;
}
- (double)preferredSampleRate {
return self.session.preferredSampleRate;
}
- (NSInteger)inputNumberOfChannels {
return self.session.inputNumberOfChannels;
}
- (NSInteger)outputNumberOfChannels {
return self.session.outputNumberOfChannels;
}
- (float)outputVolume {
return self.session.outputVolume;
}
- (NSTimeInterval)inputLatency {
return self.session.inputLatency;
}
- (NSTimeInterval)outputLatency {
return self.session.outputLatency;
}
- (NSTimeInterval)IOBufferDuration {
return self.session.IOBufferDuration;
}
- (NSTimeInterval)preferredIOBufferDuration {
return self.session.preferredIOBufferDuration;
}
// TODO(tkchin): Simplify the amount of locking happening here. Likely that we
// can just do atomic increments / decrements.
- (BOOL)setActive:(BOOL)active
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
int activationCount = _activationCount;
if (!active && activationCount == 0) {
RTCLogWarning(@"Attempting to deactivate without prior activation.");
}
[self notifyWillSetActive:active];
BOOL success = YES;
BOOL isActive = self.isActive;
// Keep a local error so we can log it.
NSError *error = nil;
BOOL shouldSetActive =
(active && !isActive) || (!active && isActive && activationCount == 1);
// Attempt to activate if we're not active.
// Attempt to deactivate if we're active and it's the last unbalanced call.
if (shouldSetActive) {
AVAudioSession *session = self.session;
// AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation is used to ensure
// that other audio sessions that were interrupted by our session can return
// to their active state. It is recommended for VoIP apps to use this
// option.
AVAudioSessionSetActiveOptions options =
active ? 0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
success = [session setActive:active
withOptions:options
error:&error];
if (outError) {
*outError = error;
}
}
if (success) {
if (shouldSetActive) {
self.isActive = active;
}
if (active) {
[self incrementActivationCount];
}
[self notifyDidSetActive:active];
} else {
RTCLogError(@"Failed to setActive:%d. Error: %@",
active, error.localizedDescription);
[self notifyFailedToSetActive:active error:error];
}
// Decrement activation count on deactivation whether or not it succeeded.
if (!active) {
[self decrementActivationCount];
}
RTCLog(@"Number of current activations: %d", _activationCount);
return success;
}
- (BOOL)setCategory:(NSString *)category
withOptions:(AVAudioSessionCategoryOptions)options
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setCategory:category withOptions:options error:outError];
}
- (BOOL)setMode:(NSString *)mode error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setMode:mode error:outError];
}
- (BOOL)setInputGain:(float)gain error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setInputGain:gain error:outError];
}
- (BOOL)setPreferredSampleRate:(double)sampleRate error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setPreferredSampleRate:sampleRate error:outError];
}
- (BOOL)setPreferredIOBufferDuration:(NSTimeInterval)duration
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setPreferredIOBufferDuration:duration error:outError];
}
- (BOOL)setPreferredInputNumberOfChannels:(NSInteger)count
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setPreferredInputNumberOfChannels:count error:outError];
}
- (BOOL)setPreferredOutputNumberOfChannels:(NSInteger)count
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setPreferredOutputNumberOfChannels:count error:outError];
}
- (BOOL)overrideOutputAudioPort:(AVAudioSessionPortOverride)portOverride
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session overrideOutputAudioPort:portOverride error:outError];
}
- (BOOL)setPreferredInput:(AVAudioSessionPortDescription *)inPort
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setPreferredInput:inPort error:outError];
}
- (BOOL)setInputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setInputDataSource:dataSource error:outError];
}
- (BOOL)setOutputDataSource:(AVAudioSessionDataSourceDescription *)dataSource
error:(NSError **)outError {
if (![self checkLock:outError]) {
return NO;
}
return [self.session setOutputDataSource:dataSource error:outError];
}
#pragma mark - Notifications
- (void)handleInterruptionNotification:(NSNotification *)notification {
NSNumber* typeNumber =
notification.userInfo[AVAudioSessionInterruptionTypeKey];
AVAudioSessionInterruptionType type =
(AVAudioSessionInterruptionType)typeNumber.unsignedIntegerValue;
switch (type) {
case AVAudioSessionInterruptionTypeBegan:
RTCLog(@"Audio session interruption began.");
self.isActive = NO;
self.isInterrupted = YES;
[self notifyDidBeginInterruption];
break;
case AVAudioSessionInterruptionTypeEnded: {
RTCLog(@"Audio session interruption ended.");
self.isInterrupted = NO;
[self updateAudioSessionAfterEvent];
NSNumber *optionsNumber =
notification.userInfo[AVAudioSessionInterruptionOptionKey];
AVAudioSessionInterruptionOptions options =
optionsNumber.unsignedIntegerValue;
BOOL shouldResume =
options & AVAudioSessionInterruptionOptionShouldResume;
[self notifyDidEndInterruptionWithShouldResumeSession:shouldResume];
break;
}
}
}
- (void)handleRouteChangeNotification:(NSNotification *)notification {
// Get reason for current route change.
NSNumber* reasonNumber =
notification.userInfo[AVAudioSessionRouteChangeReasonKey];
AVAudioSessionRouteChangeReason reason =
(AVAudioSessionRouteChangeReason)reasonNumber.unsignedIntegerValue;
RTCLog(@"Audio route changed:");
switch (reason) {
case AVAudioSessionRouteChangeReasonUnknown:
RTCLog(@"Audio route changed: ReasonUnknown");
break;
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
RTCLog(@"Audio route changed: NewDeviceAvailable");
break;
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
RTCLog(@"Audio route changed: OldDeviceUnavailable");
break;
case AVAudioSessionRouteChangeReasonCategoryChange:
RTCLog(@"Audio route changed: CategoryChange to :%@",
self.session.category);
break;
case AVAudioSessionRouteChangeReasonOverride:
RTCLog(@"Audio route changed: Override");
break;
case AVAudioSessionRouteChangeReasonWakeFromSleep:
RTCLog(@"Audio route changed: WakeFromSleep");
break;
case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
RTCLog(@"Audio route changed: NoSuitableRouteForCategory");
break;
case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
RTCLog(@"Audio route changed: RouteConfigurationChange");
break;
}
AVAudioSessionRouteDescription* previousRoute =
notification.userInfo[AVAudioSessionRouteChangePreviousRouteKey];
// Log previous route configuration.
RTCLog(@"Previous route: %@\nCurrent route:%@",
previousRoute, self.session.currentRoute);
[self notifyDidChangeRouteWithReason:reason previousRoute:previousRoute];
}
- (void)handleMediaServicesWereLost:(NSNotification *)notification {
RTCLog(@"Media services were lost.");
[self updateAudioSessionAfterEvent];
[self notifyMediaServicesWereLost];
}
- (void)handleMediaServicesWereReset:(NSNotification *)notification {
RTCLog(@"Media services were reset.");
[self updateAudioSessionAfterEvent];
[self notifyMediaServicesWereReset];
}
- (void)handleSilenceSecondaryAudioHintNotification:(NSNotification *)notification {
// TODO(henrika): just adding logs here for now until we know if we are ever
// see this notification and might be affected by it or if further actions
// are required.
NSNumber *typeNumber =
notification.userInfo[AVAudioSessionSilenceSecondaryAudioHintTypeKey];
AVAudioSessionSilenceSecondaryAudioHintType type =
(AVAudioSessionSilenceSecondaryAudioHintType)typeNumber.unsignedIntegerValue;
switch (type) {
case AVAudioSessionSilenceSecondaryAudioHintTypeBegin:
RTCLog(@"Another application's primary audio has started.");
break;
case AVAudioSessionSilenceSecondaryAudioHintTypeEnd:
RTCLog(@"Another application's primary audio has stopped.");
break;
}
}
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
BOOL isInterrupted = self.isInterrupted;
RTCLog(@"Application became active after an interruption. Treating as interruption "
"end. isInterrupted changed from %d to 0.",
isInterrupted);
if (isInterrupted) {
self.isInterrupted = NO;
[self updateAudioSessionAfterEvent];
}
// Always treat application becoming active as an interruption end event.
[self notifyDidEndInterruptionWithShouldResumeSession:YES];
}
#pragma mark - Private
+ (NSError *)lockError {
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey:
@"Must call lockForConfiguration before calling this method."
};
NSError *error =
[[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
code:kRTCAudioSessionErrorLockRequired
userInfo:userInfo];
return error;
}
- (std::vector<__weak id<RTCAudioSessionDelegate> >)delegates {
@synchronized(self) {
// Note: this returns a copy.
return _delegates;
}
}
// TODO(tkchin): check for duplicates.
- (void)pushDelegate:(id<RTCAudioSessionDelegate>)delegate {
@synchronized(self) {
_delegates.insert(_delegates.begin(), delegate);
}
}
- (void)removeZeroedDelegates {
@synchronized(self) {
_delegates.erase(
std::remove_if(_delegates.begin(),
_delegates.end(),
[](id delegate) -> bool { return delegate == nil; }),
_delegates.end());
}
}
- (int)activationCount {
return _activationCount;
}
- (int)incrementActivationCount {
RTCLog(@"Incrementing activation count.");
return rtc::AtomicOps::Increment(&_activationCount);
}
- (NSInteger)decrementActivationCount {
RTCLog(@"Decrementing activation count.");
return rtc::AtomicOps::Decrement(&_activationCount);
}
- (int)webRTCSessionCount {
return _webRTCSessionCount;
}
- (BOOL)canPlayOrRecord {
return !self.useManualAudio || self.isAudioEnabled;
}
- (BOOL)isInterrupted {
@synchronized(self) {
return _isInterrupted;
}
}
- (void)setIsInterrupted:(BOOL)isInterrupted {
@synchronized(self) {
if (_isInterrupted == isInterrupted) {
return;
}
_isInterrupted = isInterrupted;
}
}
- (BOOL)checkLock:(NSError **)outError {
// Check ivar instead of trying to acquire lock so that we won't accidentally
// acquire lock if it hasn't already been called.
if (!self.isLocked) {
if (outError) {
*outError = [RTCAudioSession lockError];
}
return NO;
}
return YES;
}
- (BOOL)beginWebRTCSession:(NSError **)outError {
if (outError) {
*outError = nil;
}
if (![self checkLock:outError]) {
return NO;
}
rtc::AtomicOps::Increment(&_webRTCSessionCount);
[self notifyDidStartPlayOrRecord];
return YES;
}
- (BOOL)endWebRTCSession:(NSError **)outError {
if (outError) {
*outError = nil;
}
if (![self checkLock:outError]) {
return NO;
}
rtc::AtomicOps::Decrement(&_webRTCSessionCount);
[self notifyDidStopPlayOrRecord];
return YES;
}
- (BOOL)configureWebRTCSession:(NSError **)outError {
if (outError) {
*outError = nil;
}
if (![self checkLock:outError]) {
return NO;
}
RTCLog(@"Configuring audio session for WebRTC.");
// Configure the AVAudioSession and activate it.
// Provide an error even if there isn't one so we can log it.
NSError *error = nil;
RTCAudioSessionConfiguration *webRTCConfig =
[RTCAudioSessionConfiguration webRTCConfiguration];
if (![self setConfiguration:webRTCConfig active:YES error:&error]) {
RTCLogError(@"Failed to set WebRTC audio configuration: %@",
error.localizedDescription);
// Do not call setActive:NO if setActive:YES failed.
if (outError) {
*outError = error;
}
return NO;
}
// Ensure that the device currently supports audio input.
// TODO(tkchin): Figure out if this is really necessary.
if (!self.inputAvailable) {
RTCLogError(@"No audio input path is available!");
[self unconfigureWebRTCSession:nil];
if (outError) {
*outError = [self configurationErrorWithDescription:@"No input path."];
}
return NO;
}
// It can happen (e.g. in combination with BT devices) that the attempt to set
// the preferred sample rate for WebRTC (48kHz) fails. If so, make a new
// configuration attempt using the sample rate that worked using the active
// audio session. A typical case is that only 8 or 16kHz can be set, e.g. in
// combination with BT headsets. Using this "trick" seems to avoid a state
// where Core Audio asks for a different number of audio frames than what the
// session's I/O buffer duration corresponds to.
// TODO(henrika): this fix resolves bugs.webrtc.org/6004 but it has only been
// tested on a limited set of iOS devices and BT devices.
double sessionSampleRate = self.sampleRate;
double preferredSampleRate = webRTCConfig.sampleRate;
if (sessionSampleRate != preferredSampleRate) {
RTCLogWarning(
@"Current sample rate (%.2f) is not the preferred rate (%.2f)",
sessionSampleRate, preferredSampleRate);
if (![self setPreferredSampleRate:sessionSampleRate
error:&error]) {
RTCLogError(@"Failed to set preferred sample rate: %@",
error.localizedDescription);
if (outError) {
*outError = error;
}
}
}
return YES;
}
- (BOOL)unconfigureWebRTCSession:(NSError **)outError {
if (outError) {
*outError = nil;
}
if (![self checkLock:outError]) {
return NO;
}
RTCLog(@"Unconfiguring audio session for WebRTC.");
[self setActive:NO error:outError];
return YES;
}
- (NSError *)configurationErrorWithDescription:(NSString *)description {
NSDictionary* userInfo = @{
NSLocalizedDescriptionKey: description,
};
return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain
code:kRTCAudioSessionErrorConfiguration
userInfo:userInfo];
}
- (void)updateAudioSessionAfterEvent {
BOOL shouldActivate = self.activationCount > 0;
AVAudioSessionSetActiveOptions options = shouldActivate ?
0 : AVAudioSessionSetActiveOptionNotifyOthersOnDeactivation;
NSError *error = nil;
if ([self.session setActive:shouldActivate
withOptions:options
error:&error]) {
self.isActive = shouldActivate;
} else {
RTCLogError(@"Failed to set session active to %d. Error:%@",
shouldActivate, error.localizedDescription);
}
}
- (void)updateCanPlayOrRecord {
BOOL canPlayOrRecord = NO;
BOOL shouldNotify = NO;
@synchronized(self) {
canPlayOrRecord = !self.useManualAudio || self.isAudioEnabled;
if (_canPlayOrRecord == canPlayOrRecord) {
return;
}
_canPlayOrRecord = canPlayOrRecord;
shouldNotify = YES;
}
if (shouldNotify) {
[self notifyDidChangeCanPlayOrRecord:canPlayOrRecord];
}
}
- (void)audioSessionDidActivate:(AVAudioSession *)session {
if (_session != session) {
RTCLogError(@"audioSessionDidActivate called on different AVAudioSession");
}
RTCLog(@"Audio session was externally activated.");
[self incrementActivationCount];
self.isActive = YES;
// When a CallKit call begins, it's possible that we receive an interruption
// begin without a corresponding end. Since we know that we have an activated
// audio session at this point, just clear any saved interruption flag since
// the app may never be foregrounded during the duration of the call.
if (self.isInterrupted) {
RTCLog(@"Clearing interrupted state due to external activation.");
self.isInterrupted = NO;
}
// Treat external audio session activation as an end interruption event.
[self notifyDidEndInterruptionWithShouldResumeSession:YES];
}
- (void)audioSessionDidDeactivate:(AVAudioSession *)session {
if (_session != session) {
RTCLogError(@"audioSessionDidDeactivate called on different AVAudioSession");
}
RTCLog(@"Audio session was externally deactivated.");
self.isActive = NO;
[self decrementActivationCount];
}
- (void)observeValueForKeyPath:(NSString *)keyPath
ofObject:(id)object
change:(NSDictionary *)change
context:(void *)context {
if (context == (__bridge void*)RTCAudioSession.class) {
if (object == _session) {
NSNumber *newVolume = change[NSKeyValueChangeNewKey];
RTCLog(@"OutputVolumeDidChange to %f", newVolume.floatValue);
[self notifyDidChangeOutputVolume:newVolume.floatValue];
}
} else {
[super observeValueForKeyPath:keyPath
ofObject:object
change:change
context:context];
}
}
- (void)notifyDidBeginInterruption {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidBeginInterruption:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSessionDidBeginInterruption:self];
}
}
}
- (void)notifyDidEndInterruptionWithShouldResumeSession:
(BOOL)shouldResumeSession {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidEndInterruption:shouldResumeSession:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSessionDidEndInterruption:self
shouldResumeSession:shouldResumeSession];
}
}
}
- (void)notifyDidChangeRouteWithReason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidChangeRoute:reason:previousRoute:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSessionDidChangeRoute:self
reason:reason
previousRoute:previousRoute];
}
}
}
- (void)notifyMediaServicesWereLost {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionMediaServerTerminated:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSessionMediaServerTerminated:self];
}
}
}
- (void)notifyMediaServicesWereReset {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionMediaServerReset:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSessionMediaServerReset:self];
}
}
}
- (void)notifyDidChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSession:didChangeCanPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self didChangeCanPlayOrRecord:canPlayOrRecord];
}
}
}
- (void)notifyDidStartPlayOrRecord {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidStartPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSessionDidStartPlayOrRecord:self];
}
}
}
- (void)notifyDidStopPlayOrRecord {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSessionDidStopPlayOrRecord:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSessionDidStopPlayOrRecord:self];
}
}
}
- (void)notifyDidChangeOutputVolume:(float)volume {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSession:didChangeOutputVolume:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self didChangeOutputVolume:volume];
}
}
}
- (void)notifyDidDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
for (auto delegate : self.delegates) {
SEL sel = @selector(audioSession:didDetectPlayoutGlitch:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self didDetectPlayoutGlitch:totalNumberOfGlitches];
}
}
}
- (void)notifyWillSetActive:(BOOL)active {
for (id delegate : self.delegates) {
SEL sel = @selector(audioSession:willSetActive:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self willSetActive:active];
}
}
}
- (void)notifyDidSetActive:(BOOL)active {
for (id delegate : self.delegates) {
SEL sel = @selector(audioSession:didSetActive:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self didSetActive:active];
}
}
}
- (void)notifyFailedToSetActive:(BOOL)active error:(NSError *)error {
for (id delegate : self.delegates) {
SEL sel = @selector(audioSession:failedToSetActive:error:);
if ([delegate respondsToSelector:sel]) {
[delegate audioSession:self failedToSetActive:active error:error];
}
}
}
@end

View File

@ -1,134 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAudioSession.h"
#import "WebRTC/RTCAudioSessionConfiguration.h"
#import "WebRTC/RTCDispatcher.h"
#import "WebRTC/UIDevice+RTCDevice.h"
// Try to use mono to save resources. Also avoids channel format conversion
// in the I/O audio unit. Initial tests have shown that it is possible to use
// mono natively for built-in microphones and for BT headsets but not for
// wired headsets. Wired headsets only support stereo as native channel format
// but it is a low cost operation to do a format conversion to mono in the
// audio unit. Hence, we will not hit a RTC_CHECK in
// VerifyAudioParametersForActiveAudioSession() for a mismatch between the
// preferred number of channels and the actual number of channels.
const int kRTCAudioSessionPreferredNumberOfChannels = 1;
// Preferred hardware sample rate (unit is in Hertz). The client sample rate
// will be set to this value as well to avoid resampling the the audio unit's
// format converter. Note that, some devices, e.g. BT headsets, only supports
// 8000Hz as native sample rate.
const double kRTCAudioSessionHighPerformanceSampleRate = 48000.0;
// A lower sample rate will be used for devices with only one core
// (e.g. iPhone 4). The goal is to reduce the CPU load of the application.
const double kRTCAudioSessionLowComplexitySampleRate = 16000.0;
// Use a hardware I/O buffer size (unit is in seconds) that matches the 10ms
// size used by WebRTC. The exact actual size will differ between devices.
// Example: using 48kHz on iPhone 6 results in a native buffer size of
// ~10.6667ms or 512 audio frames per buffer. The FineAudioBuffer instance will
// take care of any buffering required to convert between native buffers and
// buffers used by WebRTC. It is beneficial for the performance if the native
// size is as an even multiple of 10ms as possible since it results in "clean"
// callback sequence without bursts of callbacks back to back.
const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02;
// Use a larger buffer size on devices with only one core (e.g. iPhone 4).
// It will result in a lower CPU consumption at the cost of a larger latency.
// The size of 60ms is based on instrumentation that shows a significant
// reduction in CPU load compared with 10ms on low-end devices.
// TODO(henrika): monitor this size and determine if it should be modified.
const double kRTCAudioSessionLowComplexityIOBufferDuration = 0.06;
static RTCAudioSessionConfiguration *gWebRTCConfiguration = nil;
@implementation RTCAudioSessionConfiguration
@synthesize category = _category;
@synthesize categoryOptions = _categoryOptions;
@synthesize mode = _mode;
@synthesize sampleRate = _sampleRate;
@synthesize ioBufferDuration = _ioBufferDuration;
@synthesize inputNumberOfChannels = _inputNumberOfChannels;
@synthesize outputNumberOfChannels = _outputNumberOfChannels;
- (instancetype)init {
if (self = [super init]) {
// Use a category which supports simultaneous recording and playback.
// By default, using this category implies that our apps audio is
// nonmixable, hence activating the session will interrupt any other
// audio sessions which are also nonmixable.
_category = AVAudioSessionCategoryPlayAndRecord;
_categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth;
// Specify mode for two-way voice communication (e.g. VoIP).
_mode = AVAudioSessionModeVoiceChat;
// Set the session's sample rate or the hardware sample rate.
// It is essential that we use the same sample rate as stream format
// to ensure that the I/O unit does not have to do sample rate conversion.
// Set the preferred audio I/O buffer duration, in seconds.
NSUInteger processorCount = [NSProcessInfo processInfo].processorCount;
// Use best sample rate and buffer duration if the CPU has more than one
// core.
if (processorCount > 1 && [UIDevice deviceType] != RTCDeviceTypeIPhone4S) {
_sampleRate = kRTCAudioSessionHighPerformanceSampleRate;
_ioBufferDuration = kRTCAudioSessionHighPerformanceIOBufferDuration;
} else {
_sampleRate = kRTCAudioSessionLowComplexitySampleRate;
_ioBufferDuration = kRTCAudioSessionLowComplexityIOBufferDuration;
}
// We try to use mono in both directions to save resources and format
// conversions in the audio unit. Some devices does only support stereo;
// e.g. wired headset on iPhone 6.
// TODO(henrika): add support for stereo if needed.
_inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
_outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels;
}
return self;
}
+ (void)initialize {
gWebRTCConfiguration = [[self alloc] init];
}
+ (instancetype)currentConfiguration {
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTCAudioSessionConfiguration *config =
[[RTCAudioSessionConfiguration alloc] init];
config.category = session.category;
config.categoryOptions = session.categoryOptions;
config.mode = session.mode;
config.sampleRate = session.sampleRate;
config.ioBufferDuration = session.IOBufferDuration;
config.inputNumberOfChannels = session.inputNumberOfChannels;
config.outputNumberOfChannels = session.outputNumberOfChannels;
return config;
}
+ (instancetype)webRTCConfiguration {
@synchronized(self) {
return (RTCAudioSessionConfiguration *)gWebRTCConfiguration;
}
}
+ (void)setWebRTCConfiguration:(RTCAudioSessionConfiguration *)configuration {
@synchronized(self) {
gWebRTCConfiguration = configuration;
}
}
@end

View File

@ -1,33 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "sdk/objc/Framework/Headers/WebRTC/RTCAudioSession.h"
NS_ASSUME_NONNULL_BEGIN
namespace webrtc {
class AudioSessionObserver;
}
/** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate
* methods on the AudioSessionObserver.
*/
@interface RTCNativeAudioSessionDelegateAdapter : NSObject <RTCAudioSessionDelegate>
- (instancetype)init NS_UNAVAILABLE;
/** |observer| is a raw pointer and should be kept alive
* for this object's lifetime.
*/
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,89 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCNativeAudioSessionDelegateAdapter.h"
#include "sdk/objc/Framework/Native/src/audio/audio_session_observer.h"
#import "WebRTC/RTCLogging.h"
@implementation RTCNativeAudioSessionDelegateAdapter {
webrtc::AudioSessionObserver *_observer;
}
- (instancetype)initWithObserver:(webrtc::AudioSessionObserver *)observer {
RTC_DCHECK(observer);
if (self = [super init]) {
_observer = observer;
}
return self;
}
#pragma mark - RTCAudioSessionDelegate
- (void)audioSessionDidBeginInterruption:(RTCAudioSession *)session {
_observer->OnInterruptionBegin();
}
- (void)audioSessionDidEndInterruption:(RTCAudioSession *)session
shouldResumeSession:(BOOL)shouldResumeSession {
_observer->OnInterruptionEnd();
}
- (void)audioSessionDidChangeRoute:(RTCAudioSession *)session
reason:(AVAudioSessionRouteChangeReason)reason
previousRoute:(AVAudioSessionRouteDescription *)previousRoute {
switch (reason) {
case AVAudioSessionRouteChangeReasonUnknown:
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
case AVAudioSessionRouteChangeReasonCategoryChange:
// It turns out that we see a category change (at least in iOS 9.2)
// when making a switch from a BT device to e.g. Speaker using the
// iOS Control Center and that we therefore must check if the sample
// rate has changed. And if so is the case, restart the audio unit.
case AVAudioSessionRouteChangeReasonOverride:
case AVAudioSessionRouteChangeReasonWakeFromSleep:
case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
_observer->OnValidRouteChange();
break;
case AVAudioSessionRouteChangeReasonRouteConfigurationChange:
// The set of input and output ports has not changed, but their
// configuration has, e.g., a port’s selected data source has
// changed. Ignore this type of route change since we are focusing
// on detecting headset changes.
RTCLog(@"Ignoring RouteConfigurationChange");
break;
}
}
- (void)audioSessionMediaServerTerminated:(RTCAudioSession *)session {
}
- (void)audioSessionMediaServerReset:(RTCAudioSession *)session {
}
- (void)audioSession:(RTCAudioSession *)session
didChangeCanPlayOrRecord:(BOOL)canPlayOrRecord {
_observer->OnCanPlayOrRecordChange(canPlayOrRecord);
}
- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
}
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
}
- (void)audioSession:(RTCAudioSession *)audioSession
didChangeOutputVolume:(float)outputVolume {
_observer->OnChangedOutputVolume();
}
@end

View File

@ -8,19 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#include <string>
NS_ASSUME_NONNULL_BEGIN
@interface NSString (StdString)
@property(nonatomic, readonly) std::string stdString;
+ (std::string)stdStringForString:(NSString *)nsString;
+ (NSString *)stringForStdString:(const std::string &)stdString;
@end
NS_ASSUME_NONNULL_END
#import "helpers/NSString+StdString.h"

View File

@ -1,33 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "NSString+StdString.h"
@implementation NSString (StdString)
- (std::string)stdString {
return [NSString stdStringForString:self];
}
+ (std::string)stdStringForString:(NSString *)nsString {
NSData *charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
return std::string(reinterpret_cast<const char *>(charData.bytes),
charData.length);
}
+ (NSString *)stringForStdString:(const std::string&)stdString {
// std::string may contain null termination character so we construct
// using length.
return [[NSString alloc] initWithBytes:stdString.data()
length:stdString.length()
encoding:NSUTF8StringEncoding];
}
@end

View File

@ -1,92 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCCallbackLogger.h"
#include <memory>
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "rtc_base/logsinks.h"
class CallbackLogSink : public rtc::LogSink {
public:
CallbackLogSink(void (^callbackHandler)(NSString *message)) {
callback_handler_ = callbackHandler;
}
~CallbackLogSink() override { callback_handler_ = nil; }
void OnLogMessage(const std::string &message) override {
if (callback_handler_) {
callback_handler_([NSString stringWithUTF8String:message.c_str()]);
}
}
private:
void (^callback_handler_)(NSString *message);
};
@implementation RTCCallbackLogger {
BOOL _hasStarted;
std::unique_ptr<CallbackLogSink> _logSink;
}
@synthesize severity = _severity;
- (instancetype)init {
self = [super init];
if (self != nil) {
_severity = RTCLoggingSeverityInfo;
}
return self;
}
- (void)dealloc {
[self stop];
}
- (void)start:(nullable void (^)(NSString *))callback {
if (_hasStarted) {
return;
}
_logSink.reset(new CallbackLogSink(callback));
rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
_hasStarted = YES;
}
- (void)stop {
if (!_hasStarted) {
return;
}
RTC_DCHECK(_logSink);
rtc::LogMessage::RemoveLogToStream(_logSink.get());
_hasStarted = NO;
_logSink.reset();
}
#pragma mark - Private
- (rtc::LoggingSeverity)rtcSeverity {
switch (_severity) {
case RTCLoggingSeverityVerbose:
return rtc::LS_VERBOSE;
case RTCLoggingSeverityInfo:
return rtc::LS_INFO;
case RTCLoggingSeverityWarning:
return rtc::LS_WARNING;
case RTCLoggingSeverityError:
return rtc::LS_ERROR;
}
}
@end

View File

@ -1,17 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCDispatcher.h"
@interface RTCDispatcher ()
+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType;
@end

View File

@ -1,61 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDispatcher+Private.h"
static dispatch_queue_t kAudioSessionQueue = nil;
static dispatch_queue_t kCaptureSessionQueue = nil;
@implementation RTCDispatcher
+ (void)initialize {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
kAudioSessionQueue = dispatch_queue_create(
"org.webrtc.RTCDispatcherAudioSession",
DISPATCH_QUEUE_SERIAL);
kCaptureSessionQueue = dispatch_queue_create(
"org.webrtc.RTCDispatcherCaptureSession",
DISPATCH_QUEUE_SERIAL);
});
}
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
block:(dispatch_block_t)block {
dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
dispatch_async(queue, block);
}
+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType {
dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType];
const char* targetLabel = dispatch_queue_get_label(targetQueue);
const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL);
NSAssert(strlen(targetLabel) > 0, @"Label is required for the target queue.");
NSAssert(strlen(currentLabel) > 0, @"Label is required for the current queue.");
return strcmp(targetLabel, currentLabel) == 0;
}
#pragma mark - Private
+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
switch (dispatchType) {
case RTCDispatcherTypeMain:
return dispatch_get_main_queue();
case RTCDispatcherTypeCaptureSession:
return kCaptureSessionQueue;
case RTCDispatcherTypeAudioSession:
return kAudioSessionQueue;
}
}
@end

View File

@ -1,72 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCFieldTrials.h"
#include <memory>
#import "WebRTC/RTCLogging.h"
// Adding 'nogncheck' to disable the gn include headers check.
// We don't want to depend on 'system_wrappers:field_trial_default' because
// clients should be able to provide their own implementation.
#include "system_wrappers/include/field_trial_default.h" // nogncheck
NSString * const kRTCFieldTrialAudioSendSideBweKey = @"WebRTC-Audio-SendSideBwe";
NSString * const kRTCFieldTrialAudioSendSideBweForVideoKey = @"WebRTC-Audio-SendSideBwe-For-Video";
NSString * const kRTCFieldTrialAudioForceNoTWCCKey = @"WebRTC-Audio-ForceNoTWCC";
NSString * const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC";
NSString * const kRTCFieldTrialSendSideBweWithOverheadKey = @"WebRTC-SendSideBwe-WithOverhead";
NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised";
NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03";
NSString * const kRTCFieldTrialImprovedBitrateEstimateKey = @"WebRTC-ImprovedBitrateEstimate";
NSString * const kRTCFieldTrialMedianSlopeFilterKey = @"WebRTC-BweMedianSlopeFilter";
NSString * const kRTCFieldTrialTrendlineFilterKey = @"WebRTC-BweTrendlineFilter";
NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile";
NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey =
@"WebRTC-Audio-MinimizeResamplingOnMobile";
NSString * const kRTCFieldTrialEnabledValue = @"Enabled";
static std::unique_ptr<char[]> gFieldTrialInitString;
NSString *RTCFieldTrialMedianSlopeFilterValue(
size_t windowSize, double thresholdGain) {
NSString *format = @"Enabled-%zu,%lf";
return [NSString stringWithFormat:format, windowSize, thresholdGain];
}
NSString *RTCFieldTrialTrendlineFilterValue(
size_t windowSize, double smoothingCoeff, double thresholdGain) {
NSString *format = @"Enabled-%zu,%lf,%lf";
return [NSString stringWithFormat:format, windowSize, smoothingCoeff, thresholdGain];
}
void RTCInitFieldTrialDictionary(NSDictionary<NSString *, NSString *> *fieldTrials) {
if (!fieldTrials) {
RTCLogWarning(@"No fieldTrials provided.");
return;
}
// Assemble the keys and values into the field trial string.
// We don't perform any extra format checking. That should be done by the underlying WebRTC calls.
NSMutableString *fieldTrialInitString = [NSMutableString string];
for (NSString *key in fieldTrials) {
NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]];
[fieldTrialInitString appendString:fieldTrialEntry];
}
size_t len = fieldTrialInitString.length + 1;
gFieldTrialInitString.reset(new char[len]);
if (![fieldTrialInitString getCString:gFieldTrialInitString.get()
maxLength:len
encoding:NSUTF8StringEncoding]) {
RTCLogError(@"Failed to convert field trial string.");
return;
}
webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString.get());
}

View File

@ -1,175 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCFileLogger.h"
#include <memory>
#include "rtc_base/checks.h"
#include "rtc_base/filerotatingstream.h"
#include "rtc_base/logging.h"
#include "rtc_base/logsinks.h"
NSString *const kDefaultLogDirName = @"webrtc_logs";
NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB.
const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log";
@implementation RTCFileLogger {
BOOL _hasStarted;
NSString *_dirPath;
NSUInteger _maxFileSize;
std::unique_ptr<rtc::FileRotatingLogSink> _logSink;
}
@synthesize severity = _severity;
@synthesize rotationType = _rotationType;
@synthesize shouldDisableBuffering = _shouldDisableBuffering;
- (instancetype)init {
NSArray *paths = NSSearchPathForDirectoriesInDomains(
NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirPath = [paths firstObject];
NSString *defaultDirPath =
[documentsDirPath stringByAppendingPathComponent:kDefaultLogDirName];
return [self initWithDirPath:defaultDirPath
maxFileSize:kDefaultMaxFileSize];
}
- (instancetype)initWithDirPath:(NSString *)dirPath
maxFileSize:(NSUInteger)maxFileSize {
return [self initWithDirPath:dirPath
maxFileSize:maxFileSize
rotationType:RTCFileLoggerTypeCall];
}
- (instancetype)initWithDirPath:(NSString *)dirPath
maxFileSize:(NSUInteger)maxFileSize
rotationType:(RTCFileLoggerRotationType)rotationType {
NSParameterAssert(dirPath.length);
NSParameterAssert(maxFileSize);
if (self = [super init]) {
BOOL isDir = NO;
NSFileManager *fileManager = [NSFileManager defaultManager];
if ([fileManager fileExistsAtPath:dirPath isDirectory:&isDir]) {
if (!isDir) {
// Bail if something already exists there.
return nil;
}
} else {
if (![fileManager createDirectoryAtPath:dirPath
withIntermediateDirectories:NO
attributes:nil
error:nil]) {
// Bail if we failed to create a directory.
return nil;
}
}
_dirPath = dirPath;
_maxFileSize = maxFileSize;
_severity = RTCFileLoggerSeverityInfo;
}
return self;
}
- (void)dealloc {
[self stop];
}
- (void)start {
if (_hasStarted) {
return;
}
switch (_rotationType) {
case RTCFileLoggerTypeApp:
_logSink.reset(
new rtc::FileRotatingLogSink(_dirPath.UTF8String,
kRTCFileLoggerRotatingLogPrefix,
_maxFileSize,
_maxFileSize / 10));
break;
case RTCFileLoggerTypeCall:
_logSink.reset(
new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String,
_maxFileSize));
break;
}
if (!_logSink->Init()) {
RTC_LOG(LS_ERROR) << "Failed to open log files at path: " << _dirPath.UTF8String;
_logSink.reset();
return;
}
if (_shouldDisableBuffering) {
_logSink->DisableBuffering();
}
rtc::LogMessage::LogThreads(true);
rtc::LogMessage::LogTimestamps(true);
rtc::LogMessage::AddLogToStream(_logSink.get(), [self rtcSeverity]);
_hasStarted = YES;
}
- (void)stop {
if (!_hasStarted) {
return;
}
RTC_DCHECK(_logSink);
rtc::LogMessage::RemoveLogToStream(_logSink.get());
_hasStarted = NO;
_logSink.reset();
}
- (nullable NSData *)logData {
if (_hasStarted) {
return nil;
}
NSMutableData* logData = [NSMutableData data];
std::unique_ptr<rtc::FileRotatingStream> stream;
switch(_rotationType) {
case RTCFileLoggerTypeApp:
stream.reset(
new rtc::FileRotatingStream(_dirPath.UTF8String,
kRTCFileLoggerRotatingLogPrefix));
break;
case RTCFileLoggerTypeCall:
stream.reset(new rtc::CallSessionFileRotatingStream(_dirPath.UTF8String));
break;
}
if (!stream->Open()) {
return logData;
}
size_t bufferSize = 0;
if (!stream->GetSize(&bufferSize) || bufferSize == 0) {
return logData;
}
size_t read = 0;
// Allocate memory using malloc so we can pass it direcly to NSData without
// copying.
std::unique_ptr<uint8_t[]> buffer(static_cast<uint8_t*>(malloc(bufferSize)));
stream->ReadAll(buffer.get(), bufferSize, &read, nullptr);
logData = [[NSMutableData alloc] initWithBytesNoCopy:buffer.release()
length:read];
return logData;
}
#pragma mark - Private
- (rtc::LoggingSeverity)rtcSeverity {
switch (_severity) {
case RTCFileLoggerSeverityVerbose:
return rtc::LS_VERBOSE;
case RTCFileLoggerSeverityInfo:
return rtc::LS_INFO;
case RTCFileLoggerSeverityWarning:
return rtc::LS_WARNING;
case RTCFileLoggerSeverityError:
return rtc::LS_ERROR;
}
}
@end

View File

@ -1,47 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCLogging.h"
#include "rtc_base/logging.h"
rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) {
switch (severity) {
case RTCLoggingSeverityVerbose:
return rtc::LS_VERBOSE;
case RTCLoggingSeverityInfo:
return rtc::LS_INFO;
case RTCLoggingSeverityWarning:
return rtc::LS_WARNING;
case RTCLoggingSeverityError:
return rtc::LS_ERROR;
}
}
void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) {
if (log_string.length) {
const char* utf8_string = log_string.UTF8String;
RTC_LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string;
}
}
void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) {
rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity));
}
NSString* RTCFileName(const char* file_path) {
NSString* ns_file_path =
[[NSString alloc] initWithBytesNoCopy:const_cast<char*>(file_path)
length:strlen(file_path)
encoding:NSUTF8StringEncoding
freeWhenDone:NO];
return ns_file_path.lastPathComponent;
}

View File

@ -8,18 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#if defined(WEBRTC_IOS)
#import <Foundation/Foundation.h>
NS_EXTENSION_UNAVAILABLE_IOS("Application status not available in app extensions.")
@interface RTCUIApplicationStatusObserver : NSObject
+ (instancetype)sharedInstance;
+ (void)prepareForUse;
- (BOOL)isApplicationActive;
@end
#endif // WEBRTC_IOS
#import "helpers/RTCUIApplicationStatusObserver.h"

View File

@ -1,114 +0,0 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "RTCUIApplicationStatusObserver.h"
#if defined(WEBRTC_IOS)
#import <UIKit/UIKit.h>
#include "rtc_base/checks.h"
@interface RTCUIApplicationStatusObserver ()
@property(nonatomic, assign) BOOL initialized;
@property(nonatomic, assign) UIApplicationState state;
@end
@implementation RTCUIApplicationStatusObserver {
BOOL _initialized;
dispatch_block_t _initializeBlock;
dispatch_semaphore_t _waitForInitializeSemaphore;
UIApplicationState _state;
id<NSObject> _activeObserver;
id<NSObject> _backgroundObserver;
}
@synthesize initialized = _initialized;
@synthesize state = _state;
+ (instancetype)sharedInstance {
static id sharedInstance;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
sharedInstance = [[self alloc] init];
});
return sharedInstance;
}
// Method to make sure observers are added and the initialization block is
// scheduled to run on the main queue.
+ (void)prepareForUse {
__unused RTCUIApplicationStatusObserver *observer = [self sharedInstance];
}
- (id)init {
if (self = [super init]) {
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
__weak RTCUIApplicationStatusObserver *weakSelf = self;
_activeObserver = [center addObserverForName:UIApplicationDidBecomeActiveNotification
object:nil
queue:[NSOperationQueue mainQueue]
usingBlock:^(NSNotification *note) {
weakSelf.state =
[UIApplication sharedApplication].applicationState;
}];
_backgroundObserver = [center addObserverForName:UIApplicationDidEnterBackgroundNotification
object:nil
queue:[NSOperationQueue mainQueue]
usingBlock:^(NSNotification *note) {
weakSelf.state =
[UIApplication sharedApplication].applicationState;
}];
_waitForInitializeSemaphore = dispatch_semaphore_create(1);
_initialized = NO;
_initializeBlock = dispatch_block_create(DISPATCH_BLOCK_INHERIT_QOS_CLASS, ^{
weakSelf.state = [UIApplication sharedApplication].applicationState;
weakSelf.initialized = YES;
});
dispatch_async(dispatch_get_main_queue(), _initializeBlock);
}
return self;
}
- (void)dealloc {
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
[center removeObserver:_activeObserver];
[center removeObserver:_backgroundObserver];
}
- (BOOL)isApplicationActive {
// NOTE: The function `dispatch_block_wait` can only legally be called once.
// Because of this, if several threads call the `isApplicationActive` method before
// the `_initializeBlock` has been executed, instead of multiple threads calling
// `dispatch_block_wait`, the other threads need to wait for the first waiting thread
// instead.
if (!_initialized) {
dispatch_semaphore_wait(_waitForInitializeSemaphore, DISPATCH_TIME_FOREVER);
if (!_initialized) {
long ret = dispatch_block_wait(_initializeBlock,
dispatch_time(DISPATCH_TIME_NOW, 10.0 * NSEC_PER_SEC));
RTC_DCHECK_EQ(ret, 0);
}
dispatch_semaphore_signal(_waitForInitializeSemaphore);
}
return _state == UIApplicationStateActive;
}
@end
#endif // WEBRTC_IOS

View File

@ -1,120 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/UIDevice+RTCDevice.h"
#include <memory>
#import <sys/utsname.h>
@implementation UIDevice (RTCDevice)
+ (RTCDeviceType)deviceType {
NSDictionary *machineNameToType = @{
@"iPhone1,1": @(RTCDeviceTypeIPhone1G),
@"iPhone1,2": @(RTCDeviceTypeIPhone3G),
@"iPhone2,1": @(RTCDeviceTypeIPhone3GS),
@"iPhone3,1": @(RTCDeviceTypeIPhone4),
@"iPhone3,2": @(RTCDeviceTypeIPhone4),
@"iPhone3,3": @(RTCDeviceTypeIPhone4Verizon),
@"iPhone4,1": @(RTCDeviceTypeIPhone4S),
@"iPhone5,1": @(RTCDeviceTypeIPhone5GSM),
@"iPhone5,2": @(RTCDeviceTypeIPhone5GSM_CDMA),
@"iPhone5,3": @(RTCDeviceTypeIPhone5CGSM),
@"iPhone5,4": @(RTCDeviceTypeIPhone5CGSM_CDMA),
@"iPhone6,1": @(RTCDeviceTypeIPhone5SGSM),
@"iPhone6,2": @(RTCDeviceTypeIPhone5SGSM_CDMA),
@"iPhone7,1": @(RTCDeviceTypeIPhone6Plus),
@"iPhone7,2": @(RTCDeviceTypeIPhone6),
@"iPhone8,1": @(RTCDeviceTypeIPhone6S),
@"iPhone8,2": @(RTCDeviceTypeIPhone6SPlus),
@"iPhone8,4": @(RTCDeviceTypeIPhoneSE),
@"iPhone9,1": @(RTCDeviceTypeIPhone7),
@"iPhone9,2": @(RTCDeviceTypeIPhone7Plus),
@"iPhone9,3": @(RTCDeviceTypeIPhone7),
@"iPhone9,4": @(RTCDeviceTypeIPhone7Plus),
@"iPhone10,1": @(RTCDeviceTypeIPhone8),
@"iPhone10,2": @(RTCDeviceTypeIPhone8Plus),
@"iPhone10,3": @(RTCDeviceTypeIPhoneX),
@"iPhone10,4": @(RTCDeviceTypeIPhone8),
@"iPhone10,5": @(RTCDeviceTypeIPhone8Plus),
@"iPhone10,6": @(RTCDeviceTypeIPhoneX),
@"iPod1,1": @(RTCDeviceTypeIPodTouch1G),
@"iPod2,1": @(RTCDeviceTypeIPodTouch2G),
@"iPod3,1": @(RTCDeviceTypeIPodTouch3G),
@"iPod4,1": @(RTCDeviceTypeIPodTouch4G),
@"iPod5,1": @(RTCDeviceTypeIPodTouch5G),
@"iPod7,1": @(RTCDeviceTypeIPodTouch6G),
@"iPad1,1": @(RTCDeviceTypeIPad),
@"iPad2,1": @(RTCDeviceTypeIPad2Wifi),
@"iPad2,2": @(RTCDeviceTypeIPad2GSM),
@"iPad2,3": @(RTCDeviceTypeIPad2CDMA),
@"iPad2,4": @(RTCDeviceTypeIPad2Wifi2),
@"iPad2,5": @(RTCDeviceTypeIPadMiniWifi),
@"iPad2,6": @(RTCDeviceTypeIPadMiniGSM),
@"iPad2,7": @(RTCDeviceTypeIPadMiniGSM_CDMA),
@"iPad3,1": @(RTCDeviceTypeIPad3Wifi),
@"iPad3,2": @(RTCDeviceTypeIPad3GSM_CDMA),
@"iPad3,3": @(RTCDeviceTypeIPad3GSM),
@"iPad3,4": @(RTCDeviceTypeIPad4Wifi),
@"iPad3,5": @(RTCDeviceTypeIPad4GSM),
@"iPad3,6": @(RTCDeviceTypeIPad4GSM_CDMA),
@"iPad4,1": @(RTCDeviceTypeIPadAirWifi),
@"iPad4,2": @(RTCDeviceTypeIPadAirCellular),
@"iPad4,3": @(RTCDeviceTypeIPadAirWifiCellular),
@"iPad4,4": @(RTCDeviceTypeIPadMini2GWifi),
@"iPad4,5": @(RTCDeviceTypeIPadMini2GCellular),
@"iPad4,6": @(RTCDeviceTypeIPadMini2GWifiCellular),
@"iPad4,7": @(RTCDeviceTypeIPadMini3),
@"iPad4,8": @(RTCDeviceTypeIPadMini3),
@"iPad4,9": @(RTCDeviceTypeIPadMini3),
@"iPad5,1": @(RTCDeviceTypeIPadMini4),
@"iPad5,2": @(RTCDeviceTypeIPadMini4),
@"iPad5,3": @(RTCDeviceTypeIPadAir2),
@"iPad5,4": @(RTCDeviceTypeIPadAir2),
@"iPad6,3": @(RTCDeviceTypeIPadPro9Inch),
@"iPad6,4": @(RTCDeviceTypeIPadPro9Inch),
@"iPad6,7": @(RTCDeviceTypeIPadPro12Inch),
@"iPad6,8": @(RTCDeviceTypeIPadPro12Inch),
@"iPad6,11": @(RTCDeviceTypeIPad5),
@"iPad6,12": @(RTCDeviceTypeIPad5),
@"iPad7,1": @(RTCDeviceTypeIPadPro12Inch2),
@"iPad7,2": @(RTCDeviceTypeIPadPro12Inch2),
@"iPad7,3": @(RTCDeviceTypeIPadPro10Inch),
@"iPad7,4": @(RTCDeviceTypeIPadPro10Inch),
@"iPad7,5": @(RTCDeviceTypeIPad6),
@"iPad7,6": @(RTCDeviceTypeIPad6),
@"i386": @(RTCDeviceTypeSimulatori386),
@"x86_64": @(RTCDeviceTypeSimulatorx86_64),
};
RTCDeviceType deviceType = RTCDeviceTypeUnknown;
NSNumber *typeNumber = machineNameToType[[self machineName]];
if (typeNumber) {
deviceType = static_cast<RTCDeviceType>(typeNumber.integerValue);
}
return deviceType;
}
+ (NSString *)machineName {
struct utsname systemInfo;
uname(&systemInfo);
return [[NSString alloc] initWithCString:systemInfo.machine
encoding:NSUTF8StringEncoding];
}
+ (double)currentDeviceSystemVersion {
return [self currentDevice].systemVersion.doubleValue;
}
+ (BOOL)isIOS11OrLater {
return [self currentDeviceSystemVersion] >= 11.0;
}
@end

View File

@ -1,76 +0,0 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
#define SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_
#include <string>
namespace webrtc {
namespace ios {
bool CheckAndLogError(BOOL success, NSError* error);
NSString* NSStringFromStdString(const std::string& stdString);
std::string StdStringFromNSString(NSString* nsString);
// Return thread ID as a string.
std::string GetThreadId();
// Return thread ID as string suitable for debug logging.
std::string GetThreadInfo();
// Returns [NSThread currentThread] description as string.
// Example: <NSThread: 0x170066d80>{number = 1, name = main}
std::string GetCurrentThreadDescription();
#if defined(WEBRTC_IOS)
// Returns the current name of the operating system.
std::string GetSystemName();
// Returns the current version of the operating system as a string.
std::string GetSystemVersionAsString();
// Returns the version of the operating system in double representation.
// Uses a cached value of the system version.
double GetSystemVersion();
// Returns the device type.
// Examples: ”iPhone” and ”iPod touch”.
std::string GetDeviceType();
#endif // defined(WEBRTC_IOS)
// Returns a more detailed device name.
// Examples: "iPhone 5s (GSM)" and "iPhone 6 Plus".
std::string GetDeviceName();
// Returns the name of the process. Does not uniquely identify the process.
std::string GetProcessName();
// Returns the identifier of the process (often called process ID).
int GetProcessID();
// Returns a string containing the version of the operating system on which the
// process is executing. The string is string is human readable, localized, and
// is appropriate for displaying to the user.
std::string GetOSVersionString();
// Returns the number of processing cores available on the device.
int GetProcessorCount();
#if defined(WEBRTC_IOS)
// Indicates whether Low Power Mode is enabled on the iOS device.
bool GetLowPowerModeEnabled();
#endif
} // namespace ios
} // namespace webrtc
#endif // SDK_OBJC_FRAMEWORK_CLASSES_COMMON_HELPERS_H_

View File

@ -1,111 +0,0 @@
/*
* Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <sys/sysctl.h>
#if defined(WEBRTC_IOS)
#import <UIKit/UIKit.h>
#endif
#include <memory>
#include "rtc_base/checks.h"
#include "rtc_base/logging.h"
#include "sdk/objc/Framework/Classes/Common/helpers.h"
namespace webrtc {
namespace ios {
NSString* NSStringFromStdString(const std::string& stdString) {
// std::string may contain null termination character so we construct
// using length.
return [[NSString alloc] initWithBytes:stdString.data()
length:stdString.length()
encoding:NSUTF8StringEncoding];
}
std::string StdStringFromNSString(NSString* nsString) {
NSData* charData = [nsString dataUsingEncoding:NSUTF8StringEncoding];
return std::string(reinterpret_cast<const char*>([charData bytes]),
[charData length]);
}
bool CheckAndLogError(BOOL success, NSError* error) {
if (!success) {
NSString* msg =
[NSString stringWithFormat:@"Error: %ld, %@, %@", (long)error.code,
error.localizedDescription,
error.localizedFailureReason];
RTC_LOG(LS_ERROR) << StdStringFromNSString(msg);
return false;
}
return true;
}
// TODO(henrika): see if it is possible to move to GetThreadName in
// platform_thread.h and base it on pthread methods instead.
std::string GetCurrentThreadDescription() {
NSString* name = [NSString stringWithFormat:@"%@", [NSThread currentThread]];
return StdStringFromNSString(name);
}
#if defined(WEBRTC_IOS)
std::string GetSystemName() {
NSString* osName = [[UIDevice currentDevice] systemName];
return StdStringFromNSString(osName);
}
std::string GetSystemVersionAsString() {
NSString* osVersion = [[UIDevice currentDevice] systemVersion];
return StdStringFromNSString(osVersion);
}
std::string GetDeviceType() {
NSString* deviceModel = [[UIDevice currentDevice] model];
return StdStringFromNSString(deviceModel);
}
bool GetLowPowerModeEnabled() {
return [NSProcessInfo processInfo].lowPowerModeEnabled;
}
#endif
std::string GetDeviceName() {
size_t size;
sysctlbyname("hw.machine", NULL, &size, NULL, 0);
std::unique_ptr<char[]> machine;
machine.reset(new char[size]);
sysctlbyname("hw.machine", machine.get(), &size, NULL, 0);
return std::string(machine.get());
}
std::string GetProcessName() {
NSString* processName = [NSProcessInfo processInfo].processName;
return StdStringFromNSString(processName);
}
int GetProcessID() {
return [NSProcessInfo processInfo].processIdentifier;
}
std::string GetOSVersionString() {
NSString* osVersion =
[NSProcessInfo processInfo].operatingSystemVersionString;
return StdStringFromNSString(osVersion);
}
int GetProcessorCount() {
return [NSProcessInfo processInfo].processorCount;
}
} // namespace ios
} // namespace webrtc

View File

@ -1,13 +0,0 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
// This file is only needed to make ninja happy on some platforms.
// On some platforms it is not possible to link an rtc_static_library
// without any source file listed in the GN target.

View File

@ -9,108 +9,4 @@
*
*/
#ifndef WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_SCOPED_CFTYPEREF_H_
#define WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_SCOPED_CFTYPEREF_H_
#include <CoreFoundation/CoreFoundation.h>
namespace rtc {
// RETAIN: ScopedTypeRef should retain the object when it takes
// ownership.
// ASSUME: Assume the object already has already been retained.
// ScopedTypeRef takes over ownership.
enum class RetainPolicy { RETAIN, ASSUME };
namespace internal {
template <typename T>
struct CFTypeRefTraits {
static T InvalidValue() { return nullptr; }
static void Release(T ref) { CFRelease(ref); }
static T Retain(T ref) {
CFRetain(ref);
return ref;
}
};
template <typename T, typename Traits>
class ScopedTypeRef {
public:
ScopedTypeRef() : ptr_(Traits::InvalidValue()) {}
explicit ScopedTypeRef(T ptr) : ptr_(ptr) {}
ScopedTypeRef(T ptr, RetainPolicy policy) : ScopedTypeRef(ptr) {
if (ptr_ && policy == RetainPolicy::RETAIN)
Traits::Retain(ptr_);
}
ScopedTypeRef(const ScopedTypeRef<T, Traits>& rhs) : ptr_(rhs.ptr_) {
if (ptr_)
ptr_ = Traits::Retain(ptr_);
}
~ScopedTypeRef() {
if (ptr_) {
Traits::Release(ptr_);
}
}
T get() const { return ptr_; }
T operator->() const { return ptr_; }
explicit operator bool() const { return ptr_; }
bool operator!() const { return !ptr_; }
ScopedTypeRef& operator=(const T& rhs) {
if (ptr_)
Traits::Release(ptr_);
ptr_ = rhs;
return *this;
}
ScopedTypeRef& operator=(const ScopedTypeRef<T, Traits>& rhs) {
reset(rhs.get(), RetainPolicy::RETAIN);
return *this;
}
// This is intended to take ownership of objects that are
// created by pass-by-pointer initializers.
T* InitializeInto() {
RTC_DCHECK(!ptr_);
return &ptr_;
}
void reset(T ptr, RetainPolicy policy = RetainPolicy::ASSUME) {
if (ptr && policy == RetainPolicy::RETAIN)
Traits::Retain(ptr);
if (ptr_)
Traits::Release(ptr_);
ptr_ = ptr;
}
T release() {
T temp = ptr_;
ptr_ = Traits::InvalidValue();
return temp;
}
private:
T ptr_;
};
} // namespace internal
template <typename T>
using ScopedCFTypeRef =
internal::ScopedTypeRef<T, internal::CFTypeRefTraits<T>>;
template <typename T>
static ScopedCFTypeRef<T> AdoptCF(T cftype) {
return ScopedCFTypeRef<T>(cftype, RetainPolicy::RETAIN);
}
template <typename T>
static ScopedCFTypeRef<T> ScopedCF(T cftype) {
return ScopedCFTypeRef<T>(cftype);
}
} // namespace rtc
#endif // WEBRTC_SDK_OBJC_FRAMEWORK_CLASSES_COMMON_SCOPED_CFTYPEREF_H_
#import "helpers/scoped_cftyperef.h"

View File

@ -1,17 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "RTCMTLRenderer.h"
NS_AVAILABLE(10_11, 9_0)
@interface RTCMTLI420Renderer : RTCMTLRenderer
@end

View File

@ -1,169 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMTLI420Renderer.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h"
#import "RTCMTLRenderer+Private.h"
static NSString *const shaderSource = MTL_STRINGIFY(
using namespace metal;
typedef struct {
packed_float2 position;
packed_float2 texcoord;
} Vertex;
typedef struct {
float4 position[[position]];
float2 texcoord;
} Varyings;
vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
unsigned int vid[[vertex_id]]) {
Varyings out;
device Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
out.texcoord = v.texcoord;
return out;
}
fragment half4 fragmentColorConversion(
Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[texture(0)]],
texture2d<float, access::sample> textureU[[texture(1)]],
texture2d<float, access::sample> textureV[[texture(2)]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
float y;
float u;
float v;
float r;
float g;
float b;
// Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
y = textureY.sample(s, in.texcoord).r;
u = textureU.sample(s, in.texcoord).r;
v = textureV.sample(s, in.texcoord).r;
u = u - 0.5;
v = v - 0.5;
r = y + 1.403 * v;
g = y - 0.344 * u - 0.714 * v;
b = y + 1.770 * u;
float4 out = float4(r, g, b, 1.0);
return half4(out);
});
@implementation RTCMTLI420Renderer {
// Textures.
id<MTLTexture> _yTexture;
id<MTLTexture> _uTexture;
id<MTLTexture> _vTexture;
MTLTextureDescriptor *_descriptor;
MTLTextureDescriptor *_chromaDescriptor;
int _width;
int _height;
int _chromaWidth;
int _chromaHeight;
}
#pragma mark - Virtual
- (NSString *)shaderSource {
return shaderSource;
}
- (void)getWidth:(nonnull int *)width
height:(nonnull int *)height
cropWidth:(nonnull int *)cropWidth
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
*width = frame.width;
*height = frame.height;
*cropWidth = frame.width;
*cropHeight = frame.height;
*cropX = 0;
*cropY = 0;
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
if (![super setupTexturesForFrame:frame]) {
return NO;
}
id<MTLDevice> device = [self currentMetalDevice];
if (!device) {
return NO;
}
id<RTCI420Buffer> buffer = [frame.buffer toI420];
// Luma (y) texture.
if (!_descriptor || (_width != frame.width && _height != frame.height)) {
_width = frame.width;
_height = frame.height;
_descriptor = [MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_width
height:_height
mipmapped:NO];
_descriptor.usage = MTLTextureUsageShaderRead;
_yTexture = [device newTextureWithDescriptor:_descriptor];
}
// Chroma (u,v) textures
[_yTexture replaceRegion:MTLRegionMake2D(0, 0, _width, _height)
mipmapLevel:0
withBytes:buffer.dataY
bytesPerRow:buffer.strideY];
if (!_chromaDescriptor ||
(_chromaWidth != frame.width / 2 && _chromaHeight != frame.height / 2)) {
_chromaWidth = frame.width / 2;
_chromaHeight = frame.height / 2;
_chromaDescriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatR8Unorm
width:_chromaWidth
height:_chromaHeight
mipmapped:NO];
_chromaDescriptor.usage = MTLTextureUsageShaderRead;
_uTexture = [device newTextureWithDescriptor:_chromaDescriptor];
_vTexture = [device newTextureWithDescriptor:_chromaDescriptor];
}
[_uTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
mipmapLevel:0
withBytes:buffer.dataU
bytesPerRow:buffer.strideU];
[_vTexture replaceRegion:MTLRegionMake2D(0, 0, _chromaWidth, _chromaHeight)
mipmapLevel:0
withBytes:buffer.dataV
bytesPerRow:buffer.strideV];
return (_uTexture != nil) && (_yTexture != nil) && (_vTexture != nil);
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
[renderEncoder setFragmentTexture:_yTexture atIndex:0];
[renderEncoder setFragmentTexture:_uTexture atIndex:1];
[renderEncoder setFragmentTexture:_vTexture atIndex:2];
}
@end

View File

@ -1,122 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMTLNSVideoView.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCVideoFrame.h"
#import "RTCMTLI420Renderer.h"
@interface RTCMTLNSVideoView ()<MTKViewDelegate>
@property(nonatomic) id<RTCMTLRenderer> renderer;
@property(nonatomic, strong) MTKView *metalView;
@property(atomic, strong) RTCVideoFrame *videoFrame;
@end
@implementation RTCMTLNSVideoView {
id<RTCMTLRenderer> _renderer;
}
@synthesize delegate = _delegate;
@synthesize renderer = _renderer;
@synthesize metalView = _metalView;
@synthesize videoFrame = _videoFrame;
- (instancetype)initWithFrame:(CGRect)frameRect {
self = [super initWithFrame:frameRect];
if (self) {
[self configure];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aCoder {
self = [super initWithCoder:aCoder];
if (self) {
[self configure];
}
return self;
}
#pragma mark - Private
+ (BOOL)isMetalAvailable {
return [MTLCopyAllDevices() count] > 0;
}
- (void)configure {
if ([[self class] isMetalAvailable]) {
_metalView = [[MTKView alloc] initWithFrame:self.bounds];
[self addSubview:_metalView];
_metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit;
_metalView.translatesAutoresizingMaskIntoConstraints = NO;
_metalView.framebufferOnly = YES;
_metalView.delegate = self;
_renderer = [[RTCMTLI420Renderer alloc] init];
if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) {
_renderer = nil;
};
}
}
- (void)updateConstraints {
NSDictionary *views = NSDictionaryOfVariableBindings(_metalView);
NSArray *constraintsHorizontal =
[NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|"
options:0
metrics:nil
views:views];
[self addConstraints:constraintsHorizontal];
NSArray *constraintsVertical =
[NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|"
options:0
metrics:nil
views:views];
[self addConstraints:constraintsVertical];
[super updateConstraints];
}
#pragma mark - MTKViewDelegate methods
- (void)drawInMTKView:(nonnull MTKView *)view {
if (self.videoFrame == nil) {
return;
}
if (view == self.metalView) {
[_renderer drawFrame:self.videoFrame];
}
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
#pragma mark - RTCVideoRenderer
- (void)setSize:(CGSize)size {
_metalView.drawableSize = size;
dispatch_async(dispatch_get_main_queue(), ^{
[self.delegate videoView:self didChangeVideoSize:size];
});
[_metalView draw];
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
if (frame == nil) {
return;
}
self.videoFrame = [frame newI420VideoFrame];
}
@end

View File

@ -1,18 +0,0 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "RTCMTLRenderer.h"
NS_AVAILABLE(10_11, 9_0)
@interface RTCMTLNV12Renderer : RTCMTLRenderer
@end

View File

@ -1,162 +0,0 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMTLNV12Renderer.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "RTCMTLRenderer+Private.h"
#include "rtc_base/checks.h"
static NSString *const shaderSource = MTL_STRINGIFY(
using namespace metal;
typedef struct {
packed_float2 position;
packed_float2 texcoord;
} Vertex;
typedef struct {
float4 position[[position]];
float2 texcoord;
} Varyings;
vertex Varyings vertexPassthrough(device Vertex * verticies[[buffer(0)]],
unsigned int vid[[vertex_id]]) {
Varyings out;
device Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
out.texcoord = v.texcoord;
return out;
}
// Receiving YCrCb textures.
fragment half4 fragmentColorConversion(
Varyings in[[stage_in]], texture2d<float, access::sample> textureY[[texture(0)]],
texture2d<float, access::sample> textureCbCr[[texture(1)]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
float y;
float2 uv;
y = textureY.sample(s, in.texcoord).r;
uv = textureCbCr.sample(s, in.texcoord).rg - float2(0.5, 0.5);
// Conversion for YUV to rgb from http://www.fourcc.org/fccyvrgb.php
float4 out = float4(y + 1.403 * uv.y, y - 0.344 * uv.x - 0.714 * uv.y, y + 1.770 * uv.x, 1.0);
return half4(out);
});
@implementation RTCMTLNV12Renderer {
// Textures.
CVMetalTextureCacheRef _textureCache;
id<MTLTexture> _yTexture;
id<MTLTexture> _CrCbTexture;
}
- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
if ([super addRenderingDestination:view]) {
return [self initializeTextureCache];
}
return NO;
}
- (BOOL)initializeTextureCache {
CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
nil, &_textureCache);
if (status != kCVReturnSuccess) {
RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
return NO;
}
return YES;
}
- (NSString *)shaderSource {
return shaderSource;
}
- (void)getWidth:(nonnull int *)width
height:(nonnull int *)height
cropWidth:(nonnull int *)cropWidth
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
*width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
*height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
*cropWidth = pixelBuffer.cropWidth;
*cropHeight = pixelBuffer.cropHeight;
*cropX = pixelBuffer.cropX;
*cropY = pixelBuffer.cropY;
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
if (![super setupTexturesForFrame:frame]) {
return NO;
}
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
id<MTLTexture> lumaTexture = nil;
id<MTLTexture> chromaTexture = nil;
CVMetalTextureRef outTexture = nullptr;
// Luma (y) texture.
int lumaWidth = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
int lumaHeight = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
int indexPlane = 0;
CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatR8Unorm, lumaWidth,
lumaHeight, indexPlane, &outTexture);
if (result == kCVReturnSuccess) {
lumaTexture = CVMetalTextureGetTexture(outTexture);
}
// Same as CFRelease except it can be passed NULL without crashing.
CVBufferRelease(outTexture);
outTexture = nullptr;
// Chroma (CrCb) texture.
indexPlane = 1;
result = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, MTLPixelFormatRG8Unorm, lumaWidth / 2,
lumaHeight / 2, indexPlane, &outTexture);
if (result == kCVReturnSuccess) {
chromaTexture = CVMetalTextureGetTexture(outTexture);
}
CVBufferRelease(outTexture);
if (lumaTexture != nil && chromaTexture != nil) {
_yTexture = lumaTexture;
_CrCbTexture = chromaTexture;
return YES;
}
return NO;
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
[renderEncoder setFragmentTexture:_yTexture atIndex:0];
[renderEncoder setFragmentTexture:_CrCbTexture atIndex:1];
}
- (void)dealloc {
if (_textureCache) {
CFRelease(_textureCache);
}
}
@end

View File

@ -1,22 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "RTCMTLRenderer.h"
/** @abstract RGB/BGR renderer.
* @discussion This renderer handles both kCVPixelFormatType_32BGRA and
* kCVPixelFormatType_32ARGB.
*/
NS_AVAILABLE(10_11, 9_0)
@interface RTCMTLRGBRenderer : RTCMTLRenderer
@end

View File

@ -1,163 +0,0 @@
/*
* Copyright 2018 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMTLRGBRenderer.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "RTCMTLRenderer+Private.h"
#include "rtc_base/checks.h"
static NSString *const shaderSource = MTL_STRINGIFY(
using namespace metal;
typedef struct {
packed_float2 position;
packed_float2 texcoord;
} Vertex;
typedef struct {
float4 position[[position]];
float2 texcoord;
} VertexIO;
vertex VertexIO vertexPassthrough(device Vertex * verticies[[buffer(0)]],
uint vid[[vertex_id]]) {
VertexIO out;
device Vertex &v = verticies[vid];
out.position = float4(float2(v.position), 0.0, 1.0);
out.texcoord = v.texcoord;
return out;
}
fragment half4 fragmentColorConversion(
VertexIO in[[stage_in]], texture2d<half, access::sample> texture[[texture(0)]],
constant bool &isARGB[[buffer(0)]]) {
constexpr sampler s(address::clamp_to_edge, filter::linear);
half4 out = texture.sample(s, in.texcoord);
if (isARGB) {
out = half4(out.g, out.b, out.a, out.r);
}
return out;
});
@implementation RTCMTLRGBRenderer {
// Textures.
CVMetalTextureCacheRef _textureCache;
id<MTLTexture> _texture;
// Uniforms.
id<MTLBuffer> _uniformsBuffer;
}
- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
if ([super addRenderingDestination:view]) {
return [self initializeTextureCache];
}
return NO;
}
- (BOOL)initializeTextureCache {
CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice],
nil, &_textureCache);
if (status != kCVReturnSuccess) {
RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status);
return NO;
}
return YES;
}
- (NSString *)shaderSource {
return shaderSource;
}
- (void)getWidth:(nonnull int *)width
height:(nonnull int *)height
cropWidth:(nonnull int *)cropWidth
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
RTCCVPixelBuffer *pixelBuffer = (RTCCVPixelBuffer *)frame.buffer;
*width = CVPixelBufferGetWidth(pixelBuffer.pixelBuffer);
*height = CVPixelBufferGetHeight(pixelBuffer.pixelBuffer);
*cropWidth = pixelBuffer.cropWidth;
*cropHeight = pixelBuffer.cropHeight;
*cropX = pixelBuffer.cropX;
*cropY = pixelBuffer.cropY;
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
RTC_DCHECK([frame.buffer isKindOfClass:[RTCCVPixelBuffer class]]);
if (![super setupTexturesForFrame:frame]) {
return NO;
}
CVPixelBufferRef pixelBuffer = ((RTCCVPixelBuffer *)frame.buffer).pixelBuffer;
id<MTLTexture> gpuTexture = nil;
CVMetalTextureRef textureOut = nullptr;
bool isARGB;
int width = CVPixelBufferGetWidth(pixelBuffer);
int height = CVPixelBufferGetHeight(pixelBuffer);
OSType pixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
MTLPixelFormat mtlPixelFormat;
if (pixelFormat == kCVPixelFormatType_32BGRA) {
mtlPixelFormat = MTLPixelFormatBGRA8Unorm;
isARGB = false;
} else if (pixelFormat == kCVPixelFormatType_32ARGB) {
mtlPixelFormat = MTLPixelFormatRGBA8Unorm;
isARGB = true;
} else {
RTC_NOTREACHED();
return NO;
}
CVReturn result = CVMetalTextureCacheCreateTextureFromImage(
kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat,
width, height, 0, &textureOut);
if (result == kCVReturnSuccess) {
gpuTexture = CVMetalTextureGetTexture(textureOut);
}
CVBufferRelease(textureOut);
if (gpuTexture != nil) {
_texture = gpuTexture;
_uniformsBuffer =
[[self currentMetalDevice] newBufferWithBytes:&isARGB
length:sizeof(isARGB)
options:MTLResourceCPUCacheModeDefaultCache];
return YES;
}
return NO;
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
[renderEncoder setFragmentTexture:_texture atIndex:0];
[renderEncoder setFragmentBuffer:_uniformsBuffer offset:0 atIndex:0];
}
- (void)dealloc {
if (_textureCache) {
CFRelease(_textureCache);
}
}
@end

View File

@ -1,30 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Metal/Metal.h>
#import "RTCMTLRenderer.h"
#define MTL_STRINGIFY(s) @ #s
NS_ASSUME_NONNULL_BEGIN
@interface RTCMTLRenderer (Private)
- (nullable id<MTLDevice>)currentMetalDevice;
- (NSString *)shaderSource;
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame;
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder;
- (void)getWidth:(nonnull int *)width
height:(nonnull int *)height
cropWidth:(nonnull int *)cropWidth
cropHeight:(nonnull int *)cropHeight
cropX:(nonnull int *)cropX
cropY:(nonnull int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,61 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#else
#import <AppKit/AppKit.h>
#endif
#import "WebRTC/RTCVideoFrame.h"
NS_ASSUME_NONNULL_BEGIN
/**
* Protocol defining ability to render RTCVideoFrame in Metal enabled views.
*/
@protocol RTCMTLRenderer <NSObject>
/**
* Method to be implemented to perform actual rendering of the provided frame.
*
* @param frame The frame to be rendered.
*/
- (void)drawFrame:(RTCVideoFrame *)frame;
/**
* Sets the provided view as rendering destination if possible.
*
* If not possible method returns NO and callers of the method are responisble for performing
* cleanups.
*/
#if TARGET_OS_IOS
- (BOOL)addRenderingDestination:(__kindof UIView *)view;
#else
- (BOOL)addRenderingDestination:(__kindof NSView *)view;
#endif
@end
/**
* Implementation of RTCMTLRenderer protocol.
*/
NS_AVAILABLE(10_11, 9_0)
@interface RTCMTLRenderer : NSObject <RTCMTLRenderer>
/** @abstract A wrapped RTCVideoRotation, or nil.
@discussion When not nil, the rotation of the actual frame is ignored when rendering.
*/
@property(atomic, nullable) NSValue *rotationOverride;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,325 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMTLRenderer+Private.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#include "api/video/video_rotation.h"
#include "rtc_base/checks.h"
// As defined in shaderSource.
static NSString *const vertexFunctionName = @"vertexPassthrough";
static NSString *const fragmentFunctionName = @"fragmentColorConversion";
static NSString *const pipelineDescriptorLabel = @"RTCPipeline";
static NSString *const commandBufferLabel = @"RTCCommandBuffer";
static NSString *const renderEncoderLabel = @"RTCEncoder";
static NSString *const renderEncoderDebugGroup = @"RTCDrawFrame";
// Computes the texture coordinates given rotation and cropping.
static inline void getCubeVertexData(int cropX,
int cropY,
int cropWidth,
int cropHeight,
size_t frameWidth,
size_t frameHeight,
RTCVideoRotation rotation,
float *buffer) {
// The computed values are the adjusted texture coordinates, in [0..1].
// For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the
// left/top edge.
// For the right and bottom, 1.0 means no cropping and e.g. 0.8 means we're skipping 20% of the
// right/bottom edge (i.e. render up to 80% of the width/height).
float cropLeft = cropX / (float)frameWidth;
float cropRight = (cropX + cropWidth) / (float)frameWidth;
float cropTop = cropY / (float)frameHeight;
float cropBottom = (cropY + cropHeight) / (float)frameHeight;
// These arrays map the view coordinates to texture coordinates, taking cropping and rotation
// into account. The first two columns are view coordinates, the last two are texture coordinates.
switch (rotation) {
case RTCVideoRotation_0: {
float values[16] = {-1.0, -1.0, cropLeft, cropBottom,
1.0, -1.0, cropRight, cropBottom,
-1.0, 1.0, cropLeft, cropTop,
1.0, 1.0, cropRight, cropTop};
memcpy(buffer, &values, sizeof(values));
} break;
case RTCVideoRotation_90: {
float values[16] = {-1.0, -1.0, cropRight, cropBottom,
1.0, -1.0, cropRight, cropTop,
-1.0, 1.0, cropLeft, cropBottom,
1.0, 1.0, cropLeft, cropTop};
memcpy(buffer, &values, sizeof(values));
} break;
case RTCVideoRotation_180: {
float values[16] = {-1.0, -1.0, cropRight, cropTop,
1.0, -1.0, cropLeft, cropTop,
-1.0, 1.0, cropRight, cropBottom,
1.0, 1.0, cropLeft, cropBottom};
memcpy(buffer, &values, sizeof(values));
} break;
case RTCVideoRotation_270: {
float values[16] = {-1.0, -1.0, cropLeft, cropTop,
1.0, -1.0, cropLeft, cropBottom,
-1.0, 1.0, cropRight, cropTop,
1.0, 1.0, cropRight, cropBottom};
memcpy(buffer, &values, sizeof(values));
} break;
}
}
// The max number of command buffers in flight (submitted to GPU).
// For now setting it up to 1.
// In future we might use triple buffering method if it improves performance.
static const NSInteger kMaxInflightBuffers = 1;
@implementation RTCMTLRenderer {
__kindof MTKView *_view;
// Controller.
dispatch_semaphore_t _inflight_semaphore;
// Renderer.
id<MTLDevice> _device;
id<MTLCommandQueue> _commandQueue;
id<MTLLibrary> _defaultLibrary;
id<MTLRenderPipelineState> _pipelineState;
// Buffers.
id<MTLBuffer> _vertexBuffer;
// Values affecting the vertex buffer. Stored for comparison to avoid unnecessary recreation.
int _oldFrameWidth;
int _oldFrameHeight;
int _oldCropWidth;
int _oldCropHeight;
int _oldCropX;
int _oldCropY;
RTCVideoRotation _oldRotation;
}
@synthesize rotationOverride = _rotationOverride;
- (instancetype)init {
if (self = [super init]) {
_inflight_semaphore = dispatch_semaphore_create(kMaxInflightBuffers);
}
return self;
}
- (BOOL)addRenderingDestination:(__kindof MTKView *)view {
return [self setupWithView:view];
}
#pragma mark - Private
- (BOOL)setupWithView:(__kindof MTKView *)view {
BOOL success = NO;
if ([self setupMetal]) {
_view = view;
view.device = _device;
view.preferredFramesPerSecond = 30;
view.autoResizeDrawable = NO;
[self loadAssets];
float vertexBufferArray[16] = {0};
_vertexBuffer = [_device newBufferWithBytes:vertexBufferArray
length:sizeof(vertexBufferArray)
options:MTLResourceCPUCacheModeWriteCombined];
success = YES;
}
return success;
}
#pragma mark - Inheritance
- (id<MTLDevice>)currentMetalDevice {
return _device;
}
- (NSString *)shaderSource {
RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
return nil;
}
- (void)uploadTexturesToRenderEncoder:(id<MTLRenderCommandEncoder>)renderEncoder {
RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
}
- (void)getWidth:(int *)width
height:(int *)height
cropWidth:(int *)cropWidth
cropHeight:(int *)cropHeight
cropX:(int *)cropX
cropY:(int *)cropY
ofFrame:(nonnull RTCVideoFrame *)frame {
RTC_NOTREACHED() << "Virtual method not implemented in subclass.";
}
- (BOOL)setupTexturesForFrame:(nonnull RTCVideoFrame *)frame {
// Apply rotation override if set.
RTCVideoRotation rotation;
NSValue *rotationOverride = self.rotationOverride;
if (rotationOverride) {
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
if (@available(iOS 11, *)) {
[rotationOverride getValue:&rotation size:sizeof(rotation)];
} else
#endif
{
[rotationOverride getValue:&rotation];
}
} else {
rotation = frame.rotation;
}
int frameWidth, frameHeight, cropWidth, cropHeight, cropX, cropY;
[self getWidth:&frameWidth
height:&frameHeight
cropWidth:&cropWidth
cropHeight:&cropHeight
cropX:&cropX
cropY:&cropY
ofFrame:frame];
// Recompute the texture cropping and recreate vertexBuffer if necessary.
if (cropX != _oldCropX || cropY != _oldCropY || cropWidth != _oldCropWidth ||
cropHeight != _oldCropHeight || rotation != _oldRotation || frameWidth != _oldFrameWidth ||
frameHeight != _oldFrameHeight) {
getCubeVertexData(cropX,
cropY,
cropWidth,
cropHeight,
frameWidth,
frameHeight,
rotation,
(float *)_vertexBuffer.contents);
_oldCropX = cropX;
_oldCropY = cropY;
_oldCropWidth = cropWidth;
_oldCropHeight = cropHeight;
_oldRotation = rotation;
_oldFrameWidth = frameWidth;
_oldFrameHeight = frameHeight;
}
return YES;
}
#pragma mark - GPU methods
- (BOOL)setupMetal {
// Set the view to use the default device.
_device = MTLCreateSystemDefaultDevice();
if (!_device) {
return NO;
}
// Create a new command queue.
_commandQueue = [_device newCommandQueue];
// Load metal library from source.
NSError *libraryError = nil;
NSString *shaderSource = [self shaderSource];
id<MTLLibrary> sourceLibrary =
[_device newLibraryWithSource:shaderSource options:NULL error:&libraryError];
if (libraryError) {
RTCLogError(@"Metal: Library with source failed\n%@", libraryError);
return NO;
}
if (!sourceLibrary) {
RTCLogError(@"Metal: Failed to load library. %@", libraryError);
return NO;
}
_defaultLibrary = sourceLibrary;
return YES;
}
- (void)loadAssets {
id<MTLFunction> vertexFunction = [_defaultLibrary newFunctionWithName:vertexFunctionName];
id<MTLFunction> fragmentFunction = [_defaultLibrary newFunctionWithName:fragmentFunctionName];
MTLRenderPipelineDescriptor *pipelineDescriptor = [[MTLRenderPipelineDescriptor alloc] init];
pipelineDescriptor.label = pipelineDescriptorLabel;
pipelineDescriptor.vertexFunction = vertexFunction;
pipelineDescriptor.fragmentFunction = fragmentFunction;
pipelineDescriptor.colorAttachments[0].pixelFormat = _view.colorPixelFormat;
pipelineDescriptor.depthAttachmentPixelFormat = MTLPixelFormatInvalid;
NSError *error = nil;
_pipelineState = [_device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:&error];
if (!_pipelineState) {
RTCLogError(@"Metal: Failed to create pipeline state. %@", error);
}
}
- (void)render {
// Wait until the inflight (curently sent to GPU) command buffer
// has completed the GPU work.
dispatch_semaphore_wait(_inflight_semaphore, DISPATCH_TIME_FOREVER);
id<MTLCommandBuffer> commandBuffer = [_commandQueue commandBuffer];
commandBuffer.label = commandBufferLabel;
__block dispatch_semaphore_t block_semaphore = _inflight_semaphore;
[commandBuffer addCompletedHandler:^(id<MTLCommandBuffer> _Nonnull) {
// GPU work completed.
dispatch_semaphore_signal(block_semaphore);
}];
MTLRenderPassDescriptor *renderPassDescriptor = _view.currentRenderPassDescriptor;
if (renderPassDescriptor) { // Valid drawable.
id<MTLRenderCommandEncoder> renderEncoder =
[commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor];
renderEncoder.label = renderEncoderLabel;
// Set context state.
[renderEncoder pushDebugGroup:renderEncoderDebugGroup];
[renderEncoder setRenderPipelineState:_pipelineState];
[renderEncoder setVertexBuffer:_vertexBuffer offset:0 atIndex:0];
[self uploadTexturesToRenderEncoder:renderEncoder];
[renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip
vertexStart:0
vertexCount:4
instanceCount:1];
[renderEncoder popDebugGroup];
[renderEncoder endEncoding];
[commandBuffer presentDrawable:_view.currentDrawable];
}
// CPU work is completed, GPU work can be started.
[commandBuffer commit];
}
#pragma mark - RTCMTLRenderer
- (void)drawFrame:(RTCVideoFrame *)frame {
@autoreleasepool {
if ([self setupTexturesForFrame:frame]) {
[self render];
}
}
}
@end

View File

@ -1,257 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMTLVideoView.h"
#import <Metal/Metal.h>
#import <MetalKit/MetalKit.h>
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrame.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#import "RTCMTLI420Renderer.h"
#import "RTCMTLNV12Renderer.h"
#import "RTCMTLRGBRenderer.h"
// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime.
// Linking errors occur when compiling for architectures that don't support Metal.
#define MTKViewClass NSClassFromString(@"MTKView")
#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer")
#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer")
#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer")
@interface RTCMTLVideoView () <MTKViewDelegate>
@property(nonatomic) RTCMTLI420Renderer *rendererI420;
@property(nonatomic) RTCMTLNV12Renderer *rendererNV12;
@property(nonatomic) RTCMTLRGBRenderer *rendererRGB;
@property(nonatomic) MTKView *metalView;
@property(atomic) RTCVideoFrame *videoFrame;
@property(nonatomic) CGSize videoFrameSize;
@property(nonatomic) int64_t lastFrameTimeNs;
@end
@implementation RTCMTLVideoView
@synthesize delegate = _delegate;
@synthesize rendererI420 = _rendererI420;
@synthesize rendererNV12 = _rendererNV12;
@synthesize rendererRGB = _rendererRGB;
@synthesize metalView = _metalView;
@synthesize videoFrame = _videoFrame;
@synthesize videoFrameSize = _videoFrameSize;
@synthesize lastFrameTimeNs = _lastFrameTimeNs;
@synthesize rotationOverride = _rotationOverride;
- (instancetype)initWithFrame:(CGRect)frameRect {
self = [super initWithFrame:frameRect];
if (self) {
[self configure];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aCoder {
self = [super initWithCoder:aCoder];
if (self) {
[self configure];
}
return self;
}
- (BOOL)isEnabled {
return !self.metalView.paused;
}
- (void)setEnabled:(BOOL)enabled {
self.metalView.paused = !enabled;
}
- (UIViewContentMode)videoContentMode {
return self.metalView.contentMode;
}
- (void)setVideoContentMode:(UIViewContentMode)mode {
self.metalView.contentMode = mode;
}
#pragma mark - Private
+ (BOOL)isMetalAvailable {
#if defined(RTC_SUPPORTS_METAL)
return MTLCreateSystemDefaultDevice() != nil;
#else
return NO;
#endif
}
+ (MTKView *)createMetalView:(CGRect)frame {
return [[MTKViewClass alloc] initWithFrame:frame];
}
+ (RTCMTLNV12Renderer *)createNV12Renderer {
return [[RTCMTLNV12RendererClass alloc] init];
}
+ (RTCMTLI420Renderer *)createI420Renderer {
return [[RTCMTLI420RendererClass alloc] init];
}
+ (RTCMTLRGBRenderer *)createRGBRenderer {
return [[RTCMTLRGBRenderer alloc] init];
}
- (void)configure {
NSAssert([RTCMTLVideoView isMetalAvailable], @"Metal not availiable on this device");
self.metalView = [RTCMTLVideoView createMetalView:self.bounds];
self.metalView.delegate = self;
self.metalView.contentMode = UIViewContentModeScaleAspectFill;
[self addSubview:self.metalView];
self.videoFrameSize = CGSizeZero;
}
- (void)layoutSubviews {
[super layoutSubviews];
CGRect bounds = self.bounds;
self.metalView.frame = bounds;
if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) {
self.metalView.drawableSize = [self drawableSize];
} else {
self.metalView.drawableSize = bounds.size;
}
}
#pragma mark - MTKViewDelegate methods
- (void)drawInMTKView:(nonnull MTKView *)view {
NSAssert(view == self.metalView, @"Receiving draw callbacks from foreign instance.");
RTCVideoFrame *videoFrame = self.videoFrame;
// Skip rendering if we've already rendered this frame.
if (!videoFrame || videoFrame.timeStampNs == self.lastFrameTimeNs) {
return;
}
RTCMTLRenderer *renderer;
if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) {
RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer;
const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer);
if (pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB) {
if (!self.rendererRGB) {
self.rendererRGB = [RTCMTLVideoView createRGBRenderer];
if (![self.rendererRGB addRenderingDestination:self.metalView]) {
self.rendererRGB = nil;
RTCLogError(@"Failed to create RGB renderer");
return;
}
}
renderer = self.rendererRGB;
} else {
if (!self.rendererNV12) {
self.rendererNV12 = [RTCMTLVideoView createNV12Renderer];
if (![self.rendererNV12 addRenderingDestination:self.metalView]) {
self.rendererNV12 = nil;
RTCLogError(@"Failed to create NV12 renderer");
return;
}
}
renderer = self.rendererNV12;
}
} else {
if (!self.rendererI420) {
self.rendererI420 = [RTCMTLVideoView createI420Renderer];
if (![self.rendererI420 addRenderingDestination:self.metalView]) {
self.rendererI420 = nil;
RTCLogError(@"Failed to create I420 renderer");
return;
}
}
renderer = self.rendererI420;
}
renderer.rotationOverride = self.rotationOverride;
[renderer drawFrame:videoFrame];
self.lastFrameTimeNs = videoFrame.timeStampNs;
}
- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size {
}
#pragma mark -
- (void)setRotationOverride:(NSValue *)rotationOverride {
_rotationOverride = rotationOverride;
self.metalView.drawableSize = [self drawableSize];
[self setNeedsLayout];
}
- (RTCVideoRotation)frameRotation {
if (self.rotationOverride) {
RTCVideoRotation rotation;
if (@available(iOS 11, *)) {
[self.rotationOverride getValue:&rotation size:sizeof(rotation)];
} else {
[self.rotationOverride getValue:&rotation];
}
return rotation;
}
return self.videoFrame.rotation;
}
- (CGSize)drawableSize {
// Flip width/height if the rotations are not the same.
CGSize videoFrameSize = self.videoFrameSize;
RTCVideoRotation frameRotation = [self frameRotation];
BOOL useLandscape =
(frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180);
BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) ||
(self.videoFrame.rotation == RTCVideoRotation_180);
if (useLandscape == sizeIsLandscape) {
return videoFrameSize;
} else {
return CGSizeMake(videoFrameSize.height, videoFrameSize.width);
}
}
#pragma mark - RTCVideoRenderer
- (void)setSize:(CGSize)size {
__weak RTCMTLVideoView *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
RTCMTLVideoView *strongSelf = weakSelf;
strongSelf.videoFrameSize = size;
CGSize drawableSize = [strongSelf drawableSize];
strongSelf.metalView.drawableSize = drawableSize;
[strongSelf setNeedsLayout];
[strongSelf.delegate videoView:self didChangeVideoSize:size];
});
}
- (void)renderFrame:(nullable RTCVideoFrame *)frame {
if (!self.isEnabled) {
return;
}
if (frame == nil) {
RTCLogInfo(@"Incoming frame is nil. Exiting render callback.");
return;
}
self.videoFrame = frame;
}
@end

View File

@ -1,32 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAudioSource.h"
#import "RTCMediaSource+Private.h"
@interface RTCAudioSource ()
/**
* The AudioSourceInterface object passed to this RTCAudioSource during
* construction.
*/
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
nativeAudioSource:(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
@end

View File

@ -1,52 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAudioSource+Private.h"
#include "rtc_base/checks.h"
@implementation RTCAudioSource {
}
@synthesize volume = _volume;
@synthesize nativeAudioSource = _nativeAudioSource;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeAudioSource:
(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
RTC_DCHECK(factory);
RTC_DCHECK(nativeAudioSource);
if (self = [super initWithFactory:factory
nativeMediaSource:nativeAudioSource
type:RTCMediaSourceTypeAudio]) {
_nativeAudioSource = nativeAudioSource;
}
return self;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type {
RTC_NOTREACHED();
return nil;
}
- (NSString *)description {
NSString *stateString = [[self class] stringForState:self.state];
return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString];
}
- (void)setVolume:(double)volume {
_volume = volume;
_nativeAudioSource->SetVolume(volume);
}
@end

View File

@ -1,30 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAudioTrack.h"
#include "api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
@interface RTCAudioTrack ()
/** AudioTrackInterface created or passed in at construction. */
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
/** Initialize an RTCAudioTrack with an id. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
source:(RTCAudioSource *)source
trackId:(NSString *)trackId;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,68 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAudioTrack+Private.h"
#import "NSString+StdString.h"
#import "RTCAudioSource+Private.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#include "rtc_base/checks.h"
@implementation RTCAudioTrack
@synthesize source = _source;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
source:(RTCAudioSource *)source
trackId:(NSString *)trackId {
RTC_DCHECK(factory);
RTC_DCHECK(source);
RTC_DCHECK(trackId.length);
std::string nativeId = [NSString stdStringForString:trackId];
rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
_source = source;
}
return self;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
type:(RTCMediaStreamTrackType)type {
NSParameterAssert(factory);
NSParameterAssert(nativeTrack);
NSParameterAssert(type == RTCMediaStreamTrackTypeAudio);
return [super initWithFactory:factory nativeTrack:nativeTrack type:type];
}
- (RTCAudioSource *)source {
if (!_source) {
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
self.nativeAudioTrack->GetSource();
if (source) {
_source =
[[RTCAudioSource alloc] initWithFactory:self.factory nativeAudioSource:source.get()];
}
}
return _source;
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
}
@end

View File

@ -1,498 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
#if TARGET_OS_IPHONE
#import "WebRTC/UIDevice+RTCDevice.h"
#endif
#import "AVCaptureSession+DevicePosition.h"
#import "RTCDispatcher+Private.h"
const int64_t kNanosecondsPerSecond = 1000000000;
@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
@end
@implementation RTCCameraVideoCapturer {
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession;
AVCaptureDevice *_currentDevice;
FourCharCode _preferredOutputPixelFormat;
FourCharCode _outputPixelFormat;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
// Will the session be running once all asynchronous operations have been completed?
BOOL _willBeRunning;
RTCVideoRotation _rotation;
#if TARGET_OS_IPHONE
UIDeviceOrientation _orientation;
#endif
}
@synthesize frameQueue = _frameQueue;
@synthesize captureSession = _captureSession;
- (instancetype)init {
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
}
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]];
}
// This initializer is used for testing.
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate
captureSession:(AVCaptureSession *)captureSession {
if (self = [super initWithDelegate:delegate]) {
// Create the capture session and all relevant inputs and outputs. We need
// to do this in init because the application may want the capture session
// before we start the capturer for e.g. AVCapturePreviewLayer. All objects
// created here are retained until dealloc and never recreated.
if (![self setupCaptureSession:captureSession]) {
return nil;
}
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
#if TARGET_OS_IPHONE
_orientation = UIDeviceOrientationPortrait;
_rotation = RTCVideoRotation_90;
[center addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruption:)
name:AVCaptureSessionWasInterruptedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruptionEnded:)
name:AVCaptureSessionInterruptionEndedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleApplicationDidBecomeActive:)
name:UIApplicationDidBecomeActiveNotification
object:[UIApplication sharedApplication]];
#endif
[center addObserver:self
selector:@selector(handleCaptureSessionRuntimeError:)
name:AVCaptureSessionRuntimeErrorNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStartRunning:)
name:AVCaptureSessionDidStartRunningNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStopRunning:)
name:AVCaptureSessionDidStopRunningNotification
object:_captureSession];
}
return self;
}
- (void)dealloc {
NSAssert(
!_willBeRunning,
@"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
+ (NSArray<AVCaptureDevice *> *)captureDevices {
#if defined(WEBRTC_IOS) && defined(__IPHONE_10_0) && \
__IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ]
mediaType:AVMediaTypeVideo
position:AVCaptureDevicePositionUnspecified];
return session.devices;
#else
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
#endif
}
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
// Support opening the device in any format. We make sure it's converted to a format we
// can handle, if needed, in the method `-setupVideoDataOutput`.
return device.formats;
}
- (FourCharCode)preferredOutputPixelFormat {
return _preferredOutputPixelFormat;
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps {
[self startCaptureWithDevice:device format:format fps:fps completionHandler:nil];
}
- (void)stopCapture {
[self stopCaptureWithCompletionHandler:nil];
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps
completionHandler:(nullable void (^)(NSError *))completionHandler {
_willBeRunning = YES;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps);
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
#endif
_currentDevice = device;
NSError *error = nil;
if (![_currentDevice lockForConfiguration:&error]) {
RTCLogError(
@"Failed to lock device %@. Error: %@", _currentDevice, error.userInfo);
if (completionHandler) {
completionHandler(error);
}
_willBeRunning = NO;
return;
}
[self reconfigureCaptureSessionInput];
[self updateOrientation];
[self updateDeviceCaptureFormat:format fps:fps];
[self updateVideoDataOutputPixelFormat:format];
[_captureSession startRunning];
[_currentDevice unlockForConfiguration];
_isRunning = YES;
if (completionHandler) {
completionHandler(nil);
}
}];
}
- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler {
_willBeRunning = NO;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("Stop");
_currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
[_captureSession stopRunning];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
#endif
_isRunning = NO;
if (completionHandler) {
completionHandler();
}
}];
}
#pragma mark iOS notifications
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
}
#endif
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSParameterAssert(captureOutput == _videoDataOutput);
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
return;
}
#if TARGET_OS_IPHONE
// Default to portrait orientation on iPhone.
BOOL usingFrontCamera = NO;
// Check the image's EXIF for the camera the image came from as the image could have been
// delayed as we set alwaysDiscardsLateVideoFrames to NO.
AVCaptureDevicePosition cameraPosition =
[AVCaptureSession devicePositionForSampleBuffer:sampleBuffer];
if (cameraPosition != AVCaptureDevicePositionUnspecified) {
usingFrontCamera = AVCaptureDevicePositionFront == cameraPosition;
} else {
AVCaptureDeviceInput *deviceInput =
(AVCaptureDeviceInput *)((AVCaptureInputPort *)connection.inputPorts.firstObject).input;
usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position;
}
switch (_orientation) {
case UIDeviceOrientationPortrait:
_rotation = RTCVideoRotation_90;
break;
case UIDeviceOrientationPortraitUpsideDown:
_rotation = RTCVideoRotation_270;
break;
case UIDeviceOrientationLandscapeLeft:
_rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
break;
case UIDeviceOrientationLandscapeRight:
_rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
case UIDeviceOrientationUnknown:
// Ignore.
break;
}
#else
// No rotation on Mac.
_rotation = RTCVideoRotation_0;
#endif
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
RTCLogError(@"Dropped sample buffer.");
}
#pragma mark - AVCaptureSession notifications
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
NSString *reasonString = nil;
#if TARGET_OS_IPHONE
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
if (reason) {
switch (reason.intValue) {
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
reasonString = @"VideoDeviceNotAvailableInBackground";
break;
case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
reasonString = @"AudioDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
reasonString = @"VideoDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
break;
}
}
#endif
RTCLog(@"Capture session interrupted: %@", reasonString);
}
- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
RTCLog(@"Capture session interruption ended.");
}
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error);
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
#if TARGET_OS_IPHONE
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
#else
[self handleFatalError];
#endif
}];
}
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
RTCLog(@"Capture session started.");
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown error,
// allow future retries on fatal errors.
_hasRetriedOnFatalError = NO;
}];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped.");
}
- (void)handleFatalError {
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!_hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error.");
[self handleNonFatalError];
_hasRetriedOnFatalError = YES;
} else {
RTCLogError(@"Previous fatal error recovery failed.");
}
}];
}
- (void)handleNonFatalError {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (_isRunning) {
[_captureSession startRunning];
}
}];
}
#if TARGET_OS_IPHONE
#pragma mark - UIApplication notifications
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (_isRunning && !_captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[_captureSession startRunning];
}
}];
}
#endif // TARGET_OS_IPHONE
#pragma mark - Private
- (dispatch_queue_t)frameQueue {
if (!_frameQueue) {
_frameQueue =
dispatch_queue_create("org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_frameQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
}
return _frameQueue;
}
- (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession {
NSAssert(_captureSession == nil, @"Setup capture session called twice.");
_captureSession = captureSession;
#if defined(WEBRTC_IOS)
_captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
_captureSession.usesApplicationAudioSession = NO;
#endif
[self setupVideoDataOutput];
// Add the output.
if (![_captureSession canAddOutput:_videoDataOutput]) {
RTCLogError(@"Video data output unsupported.");
return NO;
}
[_captureSession addOutput:_videoDataOutput];
return YES;
}
- (void)setupVideoDataOutput {
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
// `videoDataOutput.availableVideoCVPixelFormatTypes` returns the pixel formats supported by the
// device with the most efficient output format first. Find the first format that we support.
NSSet<NSNumber *> *supportedPixelFormats = [RTCCVPixelBuffer supportedPixelFormats];
NSMutableOrderedSet *availablePixelFormats =
[NSMutableOrderedSet orderedSetWithArray:videoDataOutput.availableVideoCVPixelFormatTypes];
[availablePixelFormats intersectSet:supportedPixelFormats];
NSNumber *pixelFormat = availablePixelFormats.firstObject;
NSAssert(pixelFormat, @"Output device has no supported formats.");
_preferredOutputPixelFormat = [pixelFormat unsignedIntValue];
_outputPixelFormat = _preferredOutputPixelFormat;
videoDataOutput.videoSettings = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : pixelFormat};
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
_videoDataOutput = videoDataOutput;
}
- (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format {
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (![[RTCCVPixelBuffer supportedPixelFormats] containsObject:@(mediaSubType)]) {
mediaSubType = _preferredOutputPixelFormat;
}
if (mediaSubType != _outputPixelFormat) {
_outputPixelFormat = mediaSubType;
_videoDataOutput.videoSettings =
@{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(mediaSubType) };
}
}
#pragma mark - Private, called inside capture queue
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateDeviceCaptureFormat must be called on the capture queue.");
@try {
_currentDevice.activeFormat = format;
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
} @catch (NSException *exception) {
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
return;
}
}
- (void)reconfigureCaptureSessionInput {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"reconfigureCaptureSessionInput must be called on the capture queue.");
NSError *error = nil;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
if (!input) {
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
return;
}
[_captureSession beginConfiguration];
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
if ([_captureSession canAddInput:input]) {
[_captureSession addInput:input];
} else {
RTCLogError(@"Cannot add camera as an input to the session.");
}
[_captureSession commitConfiguration];
}
- (void)updateOrientation {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateOrientation must be called on the capture queue.");
#if TARGET_OS_IPHONE
_orientation = [UIDevice currentDevice].orientation;
#endif
}
@end

View File

@ -1,70 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCCertificate.h"
#import "WebRTC/RTCLogging.h"
#include "rtc_base/logging.h"
#include "rtc_base/rtccertificategenerator.h"
#include "rtc_base/sslidentity.h"
@implementation RTCCertificate
@synthesize private_key = _private_key;
@synthesize certificate = _certificate;
- (id)copyWithZone:(NSZone *)zone {
id copy = [[[self class] alloc] initWithPrivateKey:[self.private_key copyWithZone:zone]
certificate:[self.certificate copyWithZone:zone]];
return copy;
}
- (instancetype)initWithPrivateKey:(NSString *)private_key certificate:(NSString *)certificate {
if (self = [super init]) {
_private_key = [private_key copy];
_certificate = [certificate copy];
}
return self;
}
+ (nullable RTCCertificate *)generateCertificateWithParams:(NSDictionary *)params {
rtc::KeyType keyType = rtc::KT_ECDSA;
NSString *keyTypeString = [params valueForKey:@"name"];
if (keyTypeString && [keyTypeString isEqualToString:@"RSASSA-PKCS1-v1_5"]) {
keyType = rtc::KT_RSA;
}
NSNumber *expires = [params valueForKey:@"expires"];
rtc::scoped_refptr<rtc::RTCCertificate> cc_certificate = nullptr;
if (expires != nil) {
uint64_t expirationTimestamp = [expires unsignedLongLongValue];
cc_certificate = rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
expirationTimestamp);
} else {
cc_certificate =
rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType), absl::nullopt);
}
if (!cc_certificate) {
RTCLogError(@"Failed to generate certificate.");
return nullptr;
}
// grab PEMs and create an NS RTCCerticicate
rtc::RTCCertificatePEM pem = cc_certificate->ToPEM();
std::string pem_private_key = pem.private_key();
std::string pem_certificate = pem.certificate();
RTC_LOG(LS_INFO) << "CERT PEM ";
RTC_LOG(LS_INFO) << pem_certificate;
RTCCertificate *cert = [[RTCCertificate alloc] initWithPrivateKey:@(pem_private_key.c_str())
certificate:@(pem_certificate.c_str())];
return cert;
}
@end

View File

@ -8,21 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCConfiguration.h"
#include "api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCConfiguration ()
/** Optional TurnCustomizer.
* With this class one can modify outgoing TURN messages.
* The object passed in must remain valid until PeerConnection::Close() is
* called.
*/
@property(nonatomic, nullable) webrtc::TurnCustomizer* turnCustomizer;
@end
NS_ASSUME_NONNULL_END
#import "api/peerconnection/RTCConfiguration+Native.h"

View File

@ -1,78 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCConfiguration.h"
#include "api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCConfiguration ()
+ (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy:
(RTCIceTransportPolicy)policy;
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy;
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
(RTCBundlePolicy)policy;
+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy;
+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy;
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
(RTCRtcpMuxPolicy)policy;
+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy;
+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy;
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy:
(RTCTcpCandidatePolicy)policy;
+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy;
+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy:
(RTCCandidateNetworkPolicy)policy;
+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy;
+ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy;
+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTCEncryptionKeyType)keyType;
+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics;
+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics;
/**
* RTCConfiguration struct representation of this RTCConfiguration. This is
* needed to pass to the underlying C++ APIs.
*/
- (nullable webrtc::PeerConnectionInterface::RTCConfiguration *)createNativeConfiguration;
- (instancetype)initWithNativeConfiguration:
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,460 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCConfiguration+Private.h"
#include <memory>
#import "RTCConfiguration+Native.h"
#import "RTCIceServer+Private.h"
#import "RTCIntervalRange+Private.h"
#import "WebRTC/RTCLogging.h"
#include "rtc_base/rtccertificategenerator.h"
#include "rtc_base/sslidentity.h"
@implementation RTCConfiguration
@synthesize iceServers = _iceServers;
@synthesize certificate = _certificate;
@synthesize iceTransportPolicy = _iceTransportPolicy;
@synthesize bundlePolicy = _bundlePolicy;
@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
@synthesize candidateNetworkPolicy = _candidateNetworkPolicy;
@synthesize continualGatheringPolicy = _continualGatheringPolicy;
@synthesize maxIPv6Networks = _maxIPv6Networks;
@synthesize disableLinkLocalNetworks = _disableLinkLocalNetworks;
@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
@synthesize audioJitterBufferFastAccelerate = _audioJitterBufferFastAccelerate;
@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
@synthesize iceBackupCandidatePairPingInterval =
_iceBackupCandidatePairPingInterval;
@synthesize keyType = _keyType;
@synthesize iceCandidatePoolSize = _iceCandidatePoolSize;
@synthesize shouldPruneTurnPorts = _shouldPruneTurnPorts;
@synthesize shouldPresumeWritableWhenFullyRelayed =
_shouldPresumeWritableWhenFullyRelayed;
@synthesize iceCheckMinInterval = _iceCheckMinInterval;
@synthesize iceRegatherIntervalRange = _iceRegatherIntervalRange;
@synthesize sdpSemantics = _sdpSemantics;
@synthesize turnCustomizer = _turnCustomizer;
@synthesize activeResetSrtpParams = _activeResetSrtpParams;
- (instancetype)init {
// Copy defaults.
webrtc::PeerConnectionInterface::RTCConfiguration config(
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive);
return [self initWithNativeConfiguration:config];
}
- (instancetype)initWithNativeConfiguration:
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
if (self = [super init]) {
NSMutableArray *iceServers = [NSMutableArray array];
for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server];
[iceServers addObject:iceServer];
}
_iceServers = iceServers;
if (!config.certificates.empty()) {
rtc::scoped_refptr<rtc::RTCCertificate> native_cert;
native_cert = config.certificates[0];
rtc::RTCCertificatePEM native_pem = native_cert->ToPEM();
_certificate =
[[RTCCertificate alloc] initWithPrivateKey:@(native_pem.private_key().c_str())
certificate:@(native_pem.certificate().c_str())];
}
_iceTransportPolicy =
[[self class] transportPolicyForTransportsType:config.type];
_bundlePolicy =
[[self class] bundlePolicyForNativePolicy:config.bundle_policy];
_rtcpMuxPolicy =
[[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
_tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
config.tcp_candidate_policy];
_candidateNetworkPolicy = [[self class]
candidateNetworkPolicyForNativePolicy:config.candidate_network_policy];
webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy =
config.continual_gathering_policy;
_continualGatheringPolicy =
[[self class] continualGatheringPolicyForNativePolicy:nativePolicy];
_maxIPv6Networks = config.max_ipv6_networks;
_disableLinkLocalNetworks = config.disable_link_local_networks;
_audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
_audioJitterBufferFastAccelerate = config.audio_jitter_buffer_fast_accelerate;
_iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
_iceBackupCandidatePairPingInterval =
config.ice_backup_candidate_pair_ping_interval;
_keyType = RTCEncryptionKeyTypeECDSA;
_iceCandidatePoolSize = config.ice_candidate_pool_size;
_shouldPruneTurnPorts = config.prune_turn_ports;
_shouldPresumeWritableWhenFullyRelayed =
config.presume_writable_when_fully_relayed;
if (config.ice_check_min_interval) {
_iceCheckMinInterval =
[NSNumber numberWithInt:*config.ice_check_min_interval];
}
if (config.ice_regather_interval_range) {
const rtc::IntervalRange &nativeIntervalRange = config.ice_regather_interval_range.value();
_iceRegatherIntervalRange =
[[RTCIntervalRange alloc] initWithNativeIntervalRange:nativeIntervalRange];
}
_sdpSemantics = [[self class] sdpSemanticsForNativeSdpSemantics:config.sdp_semantics];
_turnCustomizer = config.turn_customizer;
_activeResetSrtpParams = config.active_reset_srtp_params;
}
return self;
}
- (NSString *)description {
static NSString *formatString =
@"RTCConfiguration: "
@"{\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n%d\n%@\n%@\n%d\n%d\n%d\n}\n";
return [NSString
stringWithFormat:formatString,
_iceServers,
[[self class] stringForTransportPolicy:_iceTransportPolicy],
[[self class] stringForBundlePolicy:_bundlePolicy],
[[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
[[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
[[self class] stringForCandidateNetworkPolicy:_candidateNetworkPolicy],
[[self class] stringForContinualGatheringPolicy:_continualGatheringPolicy],
[[self class] stringForSdpSemantics:_sdpSemantics],
_audioJitterBufferMaxPackets,
_audioJitterBufferFastAccelerate,
_iceConnectionReceivingTimeout,
_iceBackupCandidatePairPingInterval,
_iceCandidatePoolSize,
_shouldPruneTurnPorts,
_shouldPresumeWritableWhenFullyRelayed,
_iceCheckMinInterval,
_iceRegatherIntervalRange,
_disableLinkLocalNetworks,
_maxIPv6Networks,
_activeResetSrtpParams];
}
#pragma mark - Private
- (webrtc::PeerConnectionInterface::RTCConfiguration *)
createNativeConfiguration {
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration(
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
for (RTCIceServer *iceServer in _iceServers) {
nativeConfig->servers.push_back(iceServer.nativeServer);
}
nativeConfig->type =
[[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
nativeConfig->bundle_policy =
[[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
nativeConfig->rtcp_mux_policy =
[[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
nativeConfig->tcp_candidate_policy =
[[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
nativeConfig->candidate_network_policy = [[self class]
nativeCandidateNetworkPolicyForPolicy:_candidateNetworkPolicy];
nativeConfig->continual_gathering_policy = [[self class]
nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy];
nativeConfig->max_ipv6_networks = _maxIPv6Networks;
nativeConfig->disable_link_local_networks = _disableLinkLocalNetworks;
nativeConfig->audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
nativeConfig->audio_jitter_buffer_fast_accelerate =
_audioJitterBufferFastAccelerate ? true : false;
nativeConfig->ice_connection_receiving_timeout =
_iceConnectionReceivingTimeout;
nativeConfig->ice_backup_candidate_pair_ping_interval =
_iceBackupCandidatePairPingInterval;
rtc::KeyType keyType =
[[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
if (_certificate != nullptr) {
// if offered a pemcert use it...
RTC_LOG(LS_INFO) << "Have configured cert - using it.";
std::string pem_private_key = [[_certificate private_key] UTF8String];
std::string pem_certificate = [[_certificate certificate] UTF8String];
rtc::RTCCertificatePEM pem = rtc::RTCCertificatePEM(pem_private_key, pem_certificate);
rtc::scoped_refptr<rtc::RTCCertificate> certificate = rtc::RTCCertificate::FromPEM(pem);
RTC_LOG(LS_INFO) << "Created cert from PEM strings.";
if (!certificate) {
RTC_LOG(LS_ERROR) << "Failed to generate certificate from PEM.";
return nullptr;
}
nativeConfig->certificates.push_back(certificate);
} else {
RTC_LOG(LS_INFO) << "Don't have configured cert.";
// Generate non-default certificate.
if (keyType != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate =
rtc::RTCCertificateGenerator::GenerateCertificate(rtc::KeyParams(keyType),
absl::optional<uint64_t>());
if (!certificate) {
RTCLogError(@"Failed to generate certificate.");
return nullptr;
}
nativeConfig->certificates.push_back(certificate);
}
}
nativeConfig->ice_candidate_pool_size = _iceCandidatePoolSize;
nativeConfig->prune_turn_ports = _shouldPruneTurnPorts ? true : false;
nativeConfig->presume_writable_when_fully_relayed =
_shouldPresumeWritableWhenFullyRelayed ? true : false;
if (_iceCheckMinInterval != nil) {
nativeConfig->ice_check_min_interval = absl::optional<int>(_iceCheckMinInterval.intValue);
}
if (_iceRegatherIntervalRange != nil) {
std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
_iceRegatherIntervalRange.nativeIntervalRange);
nativeConfig->ice_regather_interval_range =
absl::optional<rtc::IntervalRange>(*nativeIntervalRange);
}
nativeConfig->sdp_semantics = [[self class] nativeSdpSemanticsForSdpSemantics:_sdpSemantics];
if (_turnCustomizer) {
nativeConfig->turn_customizer = _turnCustomizer;
}
nativeConfig->active_reset_srtp_params = _activeResetSrtpParams ? true : false;
return nativeConfig.release();
}
+ (webrtc::PeerConnectionInterface::IceTransportsType)
nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy {
switch (policy) {
case RTCIceTransportPolicyNone:
return webrtc::PeerConnectionInterface::kNone;
case RTCIceTransportPolicyRelay:
return webrtc::PeerConnectionInterface::kRelay;
case RTCIceTransportPolicyNoHost:
return webrtc::PeerConnectionInterface::kNoHost;
case RTCIceTransportPolicyAll:
return webrtc::PeerConnectionInterface::kAll;
}
}
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType {
switch (nativeType) {
case webrtc::PeerConnectionInterface::kNone:
return RTCIceTransportPolicyNone;
case webrtc::PeerConnectionInterface::kRelay:
return RTCIceTransportPolicyRelay;
case webrtc::PeerConnectionInterface::kNoHost:
return RTCIceTransportPolicyNoHost;
case webrtc::PeerConnectionInterface::kAll:
return RTCIceTransportPolicyAll;
}
}
+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy {
switch (policy) {
case RTCIceTransportPolicyNone:
return @"NONE";
case RTCIceTransportPolicyRelay:
return @"RELAY";
case RTCIceTransportPolicyNoHost:
return @"NO_HOST";
case RTCIceTransportPolicyAll:
return @"ALL";
}
}
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
(RTCBundlePolicy)policy {
switch (policy) {
case RTCBundlePolicyBalanced:
return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
case RTCBundlePolicyMaxCompat:
return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
case RTCBundlePolicyMaxBundle:
return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
}
}
+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
return RTCBundlePolicyBalanced;
case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
return RTCBundlePolicyMaxCompat;
case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
return RTCBundlePolicyMaxBundle;
}
}
+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy {
switch (policy) {
case RTCBundlePolicyBalanced:
return @"BALANCED";
case RTCBundlePolicyMaxCompat:
return @"MAX_COMPAT";
case RTCBundlePolicyMaxBundle:
return @"MAX_BUNDLE";
}
}
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
(RTCRtcpMuxPolicy)policy {
switch (policy) {
case RTCRtcpMuxPolicyNegotiate:
return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
case RTCRtcpMuxPolicyRequire:
return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
}
}
+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
return RTCRtcpMuxPolicyNegotiate;
case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
return RTCRtcpMuxPolicyRequire;
}
}
+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy {
switch (policy) {
case RTCRtcpMuxPolicyNegotiate:
return @"NEGOTIATE";
case RTCRtcpMuxPolicyRequire:
return @"REQUIRE";
}
}
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy {
switch (policy) {
case RTCTcpCandidatePolicyEnabled:
return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
case RTCTcpCandidatePolicyDisabled:
return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
}
}
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy {
switch (policy) {
case RTCCandidateNetworkPolicyAll:
return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll;
case RTCCandidateNetworkPolicyLowCost:
return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
}
}
+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
return RTCTcpCandidatePolicyEnabled;
case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
return RTCTcpCandidatePolicyDisabled;
}
}
+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy {
switch (policy) {
case RTCTcpCandidatePolicyEnabled:
return @"TCP_ENABLED";
case RTCTcpCandidatePolicyDisabled:
return @"TCP_DISABLED";
}
}
+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll:
return RTCCandidateNetworkPolicyAll;
case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost:
return RTCCandidateNetworkPolicyLowCost;
}
}
+ (NSString *)stringForCandidateNetworkPolicy:
(RTCCandidateNetworkPolicy)policy {
switch (policy) {
case RTCCandidateNetworkPolicyAll:
return @"CANDIDATE_ALL_NETWORKS";
case RTCCandidateNetworkPolicyLowCost:
return @"CANDIDATE_LOW_COST_NETWORKS";
}
}
+ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)
nativeContinualGatheringPolicyForPolicy:
(RTCContinualGatheringPolicy)policy {
switch (policy) {
case RTCContinualGatheringPolicyGatherOnce:
return webrtc::PeerConnectionInterface::GATHER_ONCE;
case RTCContinualGatheringPolicyGatherContinually:
return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY;
}
}
+ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::GATHER_ONCE:
return RTCContinualGatheringPolicyGatherOnce;
case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY:
return RTCContinualGatheringPolicyGatherContinually;
}
}
+ (NSString *)stringForContinualGatheringPolicy:
(RTCContinualGatheringPolicy)policy {
switch (policy) {
case RTCContinualGatheringPolicyGatherOnce:
return @"GATHER_ONCE";
case RTCContinualGatheringPolicyGatherContinually:
return @"GATHER_CONTINUALLY";
}
}
+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:
(RTCEncryptionKeyType)keyType {
switch (keyType) {
case RTCEncryptionKeyTypeRSA:
return rtc::KT_RSA;
case RTCEncryptionKeyTypeECDSA:
return rtc::KT_ECDSA;
}
}
+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
switch (sdpSemantics) {
case RTCSdpSemanticsPlanB:
return webrtc::SdpSemantics::kPlanB;
case RTCSdpSemanticsUnifiedPlan:
return webrtc::SdpSemantics::kUnifiedPlan;
}
}
+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics {
switch (sdpSemantics) {
case webrtc::SdpSemantics::kPlanB:
return RTCSdpSemanticsPlanB;
case webrtc::SdpSemantics::kUnifiedPlan:
return RTCSdpSemanticsUnifiedPlan;
}
}
+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics {
switch (sdpSemantics) {
case RTCSdpSemanticsPlanB:
return @"PLAN_B";
case RTCSdpSemanticsUnifiedPlan:
return @"UNIFIED_PLAN";
}
}
@end

View File

@ -1,50 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCDataChannel.h"
#include "api/datachannelinterface.h"
#include "rtc_base/scoped_ref_ptr.h"
NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
@interface RTCDataBuffer ()
/**
* The native DataBuffer representation of this RTCDatabuffer object. This is
* needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
/** Initialize an RTCDataBuffer from a native DataBuffer. */
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer &)nativeBuffer;
@end
@interface RTCDataChannel ()
/** Initialize an RTCDataChannel from a native DataChannelInterface. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeDataChannel:(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel
NS_DESIGNATED_INITIALIZER;
+ (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState:
(RTCDataChannelState)state;
+ (RTCDataChannelState)dataChannelStateForNativeState:
(webrtc::DataChannelInterface::DataState)nativeState;
+ (NSString *)stringForState:(RTCDataChannelState)state;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,223 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDataChannel+Private.h"
#import "NSString+StdString.h"
#include <memory>
namespace webrtc {
class DataChannelDelegateAdapter : public DataChannelObserver {
public:
DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; }
void OnStateChange() override {
[channel_.delegate dataChannelDidChangeState:channel_];
}
void OnMessage(const DataBuffer& buffer) override {
RTCDataBuffer *data_buffer =
[[RTCDataBuffer alloc] initWithNativeBuffer:buffer];
[channel_.delegate dataChannel:channel_
didReceiveMessageWithBuffer:data_buffer];
}
void OnBufferedAmountChange(uint64_t previousAmount) override {
id<RTCDataChannelDelegate> delegate = channel_.delegate;
SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
if ([delegate respondsToSelector:sel]) {
[delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
}
}
private:
__weak RTCDataChannel *channel_;
};
}
@implementation RTCDataBuffer {
std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
}
- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
NSParameterAssert(data);
if (self = [super init]) {
rtc::CopyOnWriteBuffer buffer(
reinterpret_cast<const uint8_t*>(data.bytes), data.length);
_dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
}
return self;
}
- (NSData *)data {
return [NSData dataWithBytes:_dataBuffer->data.data()
length:_dataBuffer->data.size()];
}
- (BOOL)isBinary {
return _dataBuffer->binary;
}
#pragma mark - Private
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
if (self = [super init]) {
_dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
}
return self;
}
- (const webrtc::DataBuffer *)nativeDataBuffer {
return _dataBuffer.get();
}
@end
@implementation RTCDataChannel {
RTCPeerConnectionFactory *_factory;
rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
BOOL _isObserverRegistered;
}
@synthesize delegate = _delegate;
- (void)dealloc {
// Handles unregistering the observer properly. We need to do this because
// there may still be other references to the underlying data channel.
_nativeDataChannel->UnregisterObserver();
}
- (NSString *)label {
return [NSString stringForStdString:_nativeDataChannel->label()];
}
- (BOOL)isReliable {
return _nativeDataChannel->reliable();
}
- (BOOL)isOrdered {
return _nativeDataChannel->ordered();
}
- (NSUInteger)maxRetransmitTime {
return self.maxPacketLifeTime;
}
- (uint16_t)maxPacketLifeTime {
return _nativeDataChannel->maxRetransmitTime();
}
- (uint16_t)maxRetransmits {
return _nativeDataChannel->maxRetransmits();
}
- (NSString *)protocol {
return [NSString stringForStdString:_nativeDataChannel->protocol()];
}
- (BOOL)isNegotiated {
return _nativeDataChannel->negotiated();
}
- (NSInteger)streamId {
return self.channelId;
}
- (int)channelId {
return _nativeDataChannel->id();
}
- (RTCDataChannelState)readyState {
return [[self class] dataChannelStateForNativeState:
_nativeDataChannel->state()];
}
- (uint64_t)bufferedAmount {
return _nativeDataChannel->buffered_amount();
}
- (void)close {
_nativeDataChannel->Close();
}
- (BOOL)sendData:(RTCDataBuffer *)data {
return _nativeDataChannel->Send(*data.nativeDataBuffer);
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@",
(long)self.channelId,
self.label,
[[self class]
stringForState:self.readyState]];
}
#pragma mark - Private
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeDataChannel:
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
NSParameterAssert(nativeDataChannel);
if (self = [super init]) {
_factory = factory;
_nativeDataChannel = nativeDataChannel;
_observer.reset(new webrtc::DataChannelDelegateAdapter(self));
_nativeDataChannel->RegisterObserver(_observer.get());
}
return self;
}
+ (webrtc::DataChannelInterface::DataState)
nativeDataChannelStateForState:(RTCDataChannelState)state {
switch (state) {
case RTCDataChannelStateConnecting:
return webrtc::DataChannelInterface::DataState::kConnecting;
case RTCDataChannelStateOpen:
return webrtc::DataChannelInterface::DataState::kOpen;
case RTCDataChannelStateClosing:
return webrtc::DataChannelInterface::DataState::kClosing;
case RTCDataChannelStateClosed:
return webrtc::DataChannelInterface::DataState::kClosed;
}
}
+ (RTCDataChannelState)dataChannelStateForNativeState:
(webrtc::DataChannelInterface::DataState)nativeState {
switch (nativeState) {
case webrtc::DataChannelInterface::DataState::kConnecting:
return RTCDataChannelStateConnecting;
case webrtc::DataChannelInterface::DataState::kOpen:
return RTCDataChannelStateOpen;
case webrtc::DataChannelInterface::DataState::kClosing:
return RTCDataChannelStateClosing;
case webrtc::DataChannelInterface::DataState::kClosed:
return RTCDataChannelStateClosed;
}
}
+ (NSString *)stringForState:(RTCDataChannelState)state {
switch (state) {
case RTCDataChannelStateConnecting:
return @"Connecting";
case RTCDataChannelStateOpen:
return @"Open";
case RTCDataChannelStateClosing:
return @"Closing";
case RTCDataChannelStateClosed:
return @"Closed";
}
}
@end

View File

@ -1,23 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCDataChannelConfiguration.h"
#include "api/datachannelinterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCDataChannelConfiguration ()
@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,83 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDataChannelConfiguration+Private.h"
#import "NSString+StdString.h"
@implementation RTCDataChannelConfiguration
@synthesize nativeDataChannelInit = _nativeDataChannelInit;
- (BOOL)isOrdered {
return _nativeDataChannelInit.ordered;
}
- (void)setIsOrdered:(BOOL)isOrdered {
_nativeDataChannelInit.ordered = isOrdered;
}
- (NSInteger)maxRetransmitTimeMs {
return self.maxPacketLifeTime;
}
- (void)setMaxRetransmitTimeMs:(NSInteger)maxRetransmitTimeMs {
self.maxPacketLifeTime = maxRetransmitTimeMs;
}
- (int)maxPacketLifeTime {
return _nativeDataChannelInit.maxRetransmitTime;
}
- (void)setMaxPacketLifeTime:(int)maxPacketLifeTime {
_nativeDataChannelInit.maxRetransmitTime = maxPacketLifeTime;
}
- (int)maxRetransmits {
return _nativeDataChannelInit.maxRetransmits;
}
- (void)setMaxRetransmits:(int)maxRetransmits {
_nativeDataChannelInit.maxRetransmits = maxRetransmits;
}
- (NSString *)protocol {
return [NSString stringForStdString:_nativeDataChannelInit.protocol];
}
- (void)setProtocol:(NSString *)protocol {
_nativeDataChannelInit.protocol = [NSString stdStringForString:protocol];
}
- (BOOL)isNegotiated {
return _nativeDataChannelInit.negotiated;
}
- (void)setIsNegotiated:(BOOL)isNegotiated {
_nativeDataChannelInit.negotiated = isNegotiated;
}
- (int)streamId {
return self.channelId;
}
- (void)setStreamId:(int)streamId {
self.channelId = streamId;
}
- (int)channelId {
return _nativeDataChannelInit.id;
}
- (void)setChannelId:(int)channelId {
_nativeDataChannelInit.id = channelId;
}
@end

View File

@ -1,45 +0,0 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoCodecFactory.h"
#import "WebRTC/RTCVideoCodecH264.h"
#import "WebRTC/RTCVideoDecoderVP8.h"
#if !defined(RTC_DISABLE_VP9)
#import "WebRTC/RTCVideoDecoderVP9.h"
#endif
@implementation RTCDefaultVideoDecoderFactory
- (id<RTCVideoDecoder>)createDecoder:(RTCVideoCodecInfo *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTCVideoDecoderH264 alloc] init];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
return [RTCVideoDecoderVP8 vp8Decoder];
#if !defined(RTC_DISABLE_VP9)
} else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
return [RTCVideoDecoderVP9 vp9Decoder];
#endif
}
return nil;
}
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
return @[
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name],
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name],
#if !defined(RTC_DISABLE_VP9)
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name],
#endif
];
}
@end

View File

@ -1,87 +0,0 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoCodecFactory.h"
#import "WebRTC/RTCVideoCodec.h"
#import "WebRTC/RTCVideoCodecH264.h"
#import "WebRTC/RTCVideoEncoderVP8.h"
#if !defined(RTC_DISABLE_VP9)
#import "WebRTC/RTCVideoEncoderVP9.h"
#endif
@implementation RTCDefaultVideoEncoderFactory
@synthesize preferredCodec;
+ (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSDictionary<NSString *, NSString *> *constrainedHighParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedHighInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedHighParams];
NSDictionary<NSString *, NSString *> *constrainedBaselineParams = @{
@"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline,
@"level-asymmetry-allowed" : @"1",
@"packetization-mode" : @"1",
};
RTCVideoCodecInfo *constrainedBaselineInfo =
[[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecH264Name
parameters:constrainedBaselineParams];
RTCVideoCodecInfo *vp8Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp8Name];
#if !defined(RTC_DISABLE_VP9)
RTCVideoCodecInfo *vp9Info = [[RTCVideoCodecInfo alloc] initWithName:kRTCVideoCodecVp9Name];
#endif
return @[
constrainedHighInfo,
constrainedBaselineInfo,
vp8Info,
#if !defined(RTC_DISABLE_VP9)
vp9Info,
#endif
];
}
- (id<RTCVideoEncoder>)createEncoder:(RTCVideoCodecInfo *)info {
if ([info.name isEqualToString:kRTCVideoCodecH264Name]) {
return [[RTCVideoEncoderH264 alloc] initWithCodecInfo:info];
} else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) {
return [RTCVideoEncoderVP8 vp8Encoder];
#if !defined(RTC_DISABLE_VP9)
} else if ([info.name isEqualToString:kRTCVideoCodecVp9Name]) {
return [RTCVideoEncoderVP9 vp9Encoder];
#endif
}
return nil;
}
- (NSArray<RTCVideoCodecInfo *> *)supportedCodecs {
NSMutableArray<RTCVideoCodecInfo *> *codecs = [[[self class] supportedCodecs] mutableCopy];
NSMutableArray<RTCVideoCodecInfo *> *orderedCodecs = [NSMutableArray array];
NSUInteger index = [codecs indexOfObject:self.preferredCodec];
if (index != NSNotFound) {
[orderedCodecs addObject:[codecs objectAtIndex:index]];
[codecs removeObjectAtIndex:index];
}
[orderedCodecs addObjectsFromArray:codecs];
return [orderedCodecs copy];
}
@end

View File

@ -1,29 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCDtmfSender.h"
#include "api/dtmfsenderinterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCDtmfSender : NSObject <RTCDtmfSender>
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender;
- (instancetype)init NS_UNAVAILABLE;
/** Initialize an RTCDtmfSender with a native DtmfSenderInterface. */
- (instancetype)initWithNativeDtmfSender:
(rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,74 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDtmfSender+Private.h"
#import "NSString+StdString.h"
#import "WebRTC/RTCLogging.h"
#include "rtc_base/timeutils.h"
@implementation RTCDtmfSender {
rtc::scoped_refptr<webrtc::DtmfSenderInterface> _nativeDtmfSender;
}
- (BOOL)canInsertDtmf {
return _nativeDtmfSender->CanInsertDtmf();
}
- (BOOL)insertDtmf:(nonnull NSString *)tones
duration:(NSTimeInterval)duration
interToneGap:(NSTimeInterval)interToneGap {
RTC_DCHECK(tones != nil);
int durationMs = static_cast<int>(duration * rtc::kNumMillisecsPerSec);
int interToneGapMs = static_cast<int>(interToneGap * rtc::kNumMillisecsPerSec);
return _nativeDtmfSender->InsertDtmf(
[NSString stdStringForString:tones], durationMs, interToneGapMs);
}
- (nonnull NSString *)remainingTones {
return [NSString stringForStdString:_nativeDtmfSender->tones()];
}
- (NSTimeInterval)duration {
return static_cast<NSTimeInterval>(_nativeDtmfSender->duration()) / rtc::kNumMillisecsPerSec;
}
- (NSTimeInterval)interToneGap {
return static_cast<NSTimeInterval>(_nativeDtmfSender->inter_tone_gap()) /
rtc::kNumMillisecsPerSec;
}
- (NSString *)description {
return [NSString
stringWithFormat:
@"RTCDtmfSender {\n remainingTones: %@\n duration: %f sec\n interToneGap: %f sec\n}",
[self remainingTones],
[self duration],
[self interToneGap]];
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
return _nativeDtmfSender;
}
- (instancetype)initWithNativeDtmfSender:
(rtc::scoped_refptr<webrtc::DtmfSenderInterface>)nativeDtmfSender {
NSParameterAssert(nativeDtmfSender);
if (self = [super init]) {
_nativeDtmfSender = nativeDtmfSender;
RTCLogInfo(@"RTCDtmfSender(%p): created DTMF sender: %@", self, self.description);
}
return self;
}
@end

View File

@ -1,83 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoCodec.h"
#import "RTCVideoCodec+Private.h"
#include "rtc_base/numerics/safe_conversions.h"
@implementation RTCEncodedImage
@synthesize buffer = _buffer;
@synthesize encodedWidth = _encodedWidth;
@synthesize encodedHeight = _encodedHeight;
@synthesize timeStamp = _timeStamp;
@synthesize captureTimeMs = _captureTimeMs;
@synthesize ntpTimeMs = _ntpTimeMs;
@synthesize flags = _flags;
@synthesize encodeStartMs = _encodeStartMs;
@synthesize encodeFinishMs = _encodeFinishMs;
@synthesize frameType = _frameType;
@synthesize rotation = _rotation;
@synthesize completeFrame = _completeFrame;
@synthesize qp = _qp;
@synthesize contentType = _contentType;
- (instancetype)initWithNativeEncodedImage:(webrtc::EncodedImage)encodedImage {
if (self = [super init]) {
// Wrap the buffer in NSData without copying, do not take ownership.
_buffer = [NSData dataWithBytesNoCopy:encodedImage._buffer
length:encodedImage._length
freeWhenDone:NO];
_encodedWidth = rtc::dchecked_cast<int32_t>(encodedImage._encodedWidth);
_encodedHeight = rtc::dchecked_cast<int32_t>(encodedImage._encodedHeight);
_timeStamp = encodedImage.Timestamp();
_captureTimeMs = encodedImage.capture_time_ms_;
_ntpTimeMs = encodedImage.ntp_time_ms_;
_flags = encodedImage.timing_.flags;
_encodeStartMs = encodedImage.timing_.encode_start_ms;
_encodeFinishMs = encodedImage.timing_.encode_finish_ms;
_frameType = static_cast<RTCFrameType>(encodedImage._frameType);
_rotation = static_cast<RTCVideoRotation>(encodedImage.rotation_);
_completeFrame = encodedImage._completeFrame;
_qp = @(encodedImage.qp_);
_contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ?
RTCVideoContentTypeScreenshare :
RTCVideoContentTypeUnspecified;
}
return self;
}
- (webrtc::EncodedImage)nativeEncodedImage {
// Return the pointer without copying.
webrtc::EncodedImage encodedImage(
(uint8_t *)_buffer.bytes, (size_t)_buffer.length, (size_t)_buffer.length);
encodedImage._encodedWidth = rtc::dchecked_cast<uint32_t>(_encodedWidth);
encodedImage._encodedHeight = rtc::dchecked_cast<uint32_t>(_encodedHeight);
encodedImage.SetTimestamp(_timeStamp);
encodedImage.capture_time_ms_ = _captureTimeMs;
encodedImage.ntp_time_ms_ = _ntpTimeMs;
encodedImage.timing_.flags = _flags;
encodedImage.timing_.encode_start_ms = _encodeStartMs;
encodedImage.timing_.encode_finish_ms = _encodeFinishMs;
encodedImage._frameType = webrtc::FrameType(_frameType);
encodedImage.rotation_ = webrtc::VideoRotation(_rotation);
encodedImage._completeFrame = _completeFrame;
encodedImage.qp_ = _qp ? _qp.intValue : -1;
encodedImage.content_type_ = (_contentType == RTCVideoContentTypeScreenshare) ?
webrtc::VideoContentType::SCREENSHARE :
webrtc::VideoContentType::UNSPECIFIED;
return encodedImage;
}
@end

View File

@ -1,202 +0,0 @@
/**
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCFileVideoCapturer.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoFrameBuffer.h"
NSString *const kRTCFileVideoCapturerErrorDomain = @"org.webrtc.RTCFileVideoCapturer";
typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) {
RTCFileVideoCapturerErrorCode_CapturerRunning = 2000,
RTCFileVideoCapturerErrorCode_FileNotFound
};
typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
RTCFileVideoCapturerStatusNotInitialized,
RTCFileVideoCapturerStatusStarted,
RTCFileVideoCapturerStatusStopped
};
@implementation RTCFileVideoCapturer {
AVAssetReader *_reader;
AVAssetReaderTrackOutput *_outTrack;
RTCFileVideoCapturerStatus _status;
CMTime _lastPresentationTime;
dispatch_queue_t _frameQueue;
NSURL *_fileURL;
}
- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
if (_status == RTCFileVideoCapturerStatusStarted) {
NSError *error =
[NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
code:RTCFileVideoCapturerErrorCode_CapturerRunning
userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}];
errorBlock(error);
return;
} else {
_status = RTCFileVideoCapturerStatusStarted;
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
NSString *pathForFile = [self pathForFileName:nameOfFile];
if (!pathForFile) {
NSString *errorString =
[NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile];
NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain
code:RTCFileVideoCapturerErrorCode_FileNotFound
userInfo:@{NSUnderlyingErrorKey : errorString}];
errorBlock(error);
return;
}
_lastPresentationTime = CMTimeMake(0, 0);
_fileURL = [NSURL fileURLWithPath:pathForFile];
[self setupReaderOnError:errorBlock];
});
}
- (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock {
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:_fileURL options:nil];
NSArray *allTracks = [asset tracksWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_reader = [[AVAssetReader alloc] initWithAsset:asset error:&error];
if (error) {
errorBlock(error);
return;
}
NSDictionary *options = @{
(NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
};
_outTrack =
[[AVAssetReaderTrackOutput alloc] initWithTrack:allTracks.firstObject outputSettings:options];
[_reader addOutput:_outTrack];
[_reader startReading];
RTCLog(@"File capturer started reading");
[self readNextBuffer];
}
- (void)stopCapture {
_status = RTCFileVideoCapturerStatusStopped;
RTCLog(@"File capturer stopped.");
}
#pragma mark - Private
- (nullable NSString *)pathForFileName:(NSString *)fileName {
NSArray *nameComponents = [fileName componentsSeparatedByString:@"."];
if (nameComponents.count != 2) {
return nil;
}
NSString *path =
[[NSBundle mainBundle] pathForResource:nameComponents[0] ofType:nameComponents[1]];
return path;
}
- (dispatch_queue_t)frameQueue {
if (!_frameQueue) {
_frameQueue = dispatch_queue_create("org.webrtc.filecapturer.video", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_frameQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0));
}
return _frameQueue;
}
- (void)readNextBuffer {
if (_status == RTCFileVideoCapturerStatusStopped) {
[_reader cancelReading];
_reader = nil;
return;
}
if (_reader.status == AVAssetReaderStatusCompleted) {
[_reader cancelReading];
_reader = nil;
[self setupReaderOnError:nil];
return;
}
CMSampleBufferRef sampleBuffer = [_outTrack copyNextSampleBuffer];
if (!sampleBuffer) {
[self readNextBuffer];
return;
}
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
CFRelease(sampleBuffer);
[self readNextBuffer];
return;
}
[self publishSampleBuffer:sampleBuffer];
}
- (void)publishSampleBuffer:(CMSampleBufferRef)sampleBuffer {
CMTime presentationTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
Float64 presentationDifference =
CMTimeGetSeconds(CMTimeSubtract(presentationTime, _lastPresentationTime));
_lastPresentationTime = presentationTime;
int64_t presentationDifferenceRound = lroundf(presentationDifference * NSEC_PER_SEC);
__block dispatch_source_t timer = [self createStrictTimer];
// Strict timer that will fire |presentationDifferenceRound| ns from now and never again.
dispatch_source_set_timer(timer,
dispatch_time(DISPATCH_TIME_NOW, presentationDifferenceRound),
DISPATCH_TIME_FOREVER,
0);
dispatch_source_set_event_handler(timer, ^{
dispatch_source_cancel(timer);
timer = nil;
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (!pixelBuffer) {
CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self readNextBuffer];
});
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
NSTimeInterval timeStampSeconds = CACurrentMediaTime();
int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
RTCVideoFrame *videoFrame =
[[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer rotation:0 timeStampNs:timeStampNs];
CFRelease(sampleBuffer);
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
[self readNextBuffer];
});
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
});
dispatch_activate(timer);
}
- (dispatch_source_t)createStrictTimer {
dispatch_source_t timer = dispatch_source_create(
DISPATCH_SOURCE_TYPE_TIMER, 0, DISPATCH_TIMER_STRICT, [self frameQueue]);
return timer;
}
- (void)dealloc {
[self stopCapture];
}
@end

View File

@ -1,58 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*
*/
#import "WebRTC/RTCVideoCodecH264.h"
#include "media/base/h264_profile_level_id.h"
@interface RTCH264ProfileLevelId ()
@property(nonatomic, assign) RTCH264Profile profile;
@property(nonatomic, assign) RTCH264Level level;
@property(nonatomic, strong) NSString *hexString;
@end
@implementation RTCH264ProfileLevelId
@synthesize profile = _profile;
@synthesize level = _level;
@synthesize hexString = _hexString;
- (instancetype)initWithHexString:(NSString *)hexString {
if (self = [super init]) {
self.hexString = hexString;
absl::optional<webrtc::H264::ProfileLevelId> profile_level_id =
webrtc::H264::ParseProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]);
if (profile_level_id.has_value()) {
self.profile = static_cast<RTCH264Profile>(profile_level_id->profile);
self.level = static_cast<RTCH264Level>(profile_level_id->level);
}
}
return self;
}
- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level {
if (self = [super init]) {
self.profile = profile;
self.level = level;
absl::optional<std::string> hex_string =
webrtc::H264::ProfileLevelIdToString(webrtc::H264::ProfileLevelId(
static_cast<webrtc::H264::Profile>(profile), static_cast<webrtc::H264::Level>(level)));
self.hexString =
[NSString stringWithCString:hex_string.value_or("").c_str() encoding:NSUTF8StringEncoding];
}
return self;
}
@end

View File

@ -1,35 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCIceCandidate.h"
#include <memory>
#include "api/jsep.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCIceCandidate ()
/**
* The native IceCandidateInterface representation of this RTCIceCandidate
* object. This is needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly) std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
/**
* Initialize an RTCIceCandidate from a native IceCandidateInterface. No
* ownership is taken of the native candidate.
*/
- (instancetype)initWithNativeCandidate:(const webrtc::IceCandidateInterface *)candidate;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,76 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCIceCandidate+Private.h"
#include <memory>
#import "NSString+StdString.h"
#import "WebRTC/RTCLogging.h"
@implementation RTCIceCandidate
@synthesize sdpMid = _sdpMid;
@synthesize sdpMLineIndex = _sdpMLineIndex;
@synthesize sdp = _sdp;
@synthesize serverUrl = _serverUrl;
- (instancetype)initWithSdp:(NSString *)sdp
sdpMLineIndex:(int)sdpMLineIndex
sdpMid:(NSString *)sdpMid {
NSParameterAssert(sdp.length);
if (self = [super init]) {
_sdpMid = [sdpMid copy];
_sdpMLineIndex = sdpMLineIndex;
_sdp = [sdp copy];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@",
_sdpMid,
_sdpMLineIndex,
_sdp,
_serverUrl];
}
#pragma mark - Private
- (instancetype)initWithNativeCandidate:
(const webrtc::IceCandidateInterface *)candidate {
NSParameterAssert(candidate);
std::string sdp;
candidate->ToString(&sdp);
RTCIceCandidate *rtcCandidate =
[self initWithSdp:[NSString stringForStdString:sdp]
sdpMLineIndex:candidate->sdp_mline_index()
sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
rtcCandidate->_serverUrl = [NSString stringForStdString:candidate->server_url()];
return rtcCandidate;
}
- (std::unique_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
webrtc::SdpParseError error;
webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
_sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
if (!candidate) {
RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
error.description.c_str(),
error.line.c_str());
}
return std::unique_ptr<webrtc::IceCandidateInterface>(candidate);
}
@end

View File

@ -1,30 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCIceServer.h"
#include "api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCIceServer ()
/**
* IceServer struct representation of this RTCIceServer object's data.
* This is needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly) webrtc::PeerConnectionInterface::IceServer nativeServer;
/** Initialize an RTCIceServer from a native IceServer. */
- (instancetype)initWithNativeServer:(webrtc::PeerConnectionInterface::IceServer)nativeServer;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,196 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCIceServer+Private.h"
#import "NSString+StdString.h"
@implementation RTCIceServer
@synthesize urlStrings = _urlStrings;
@synthesize username = _username;
@synthesize credential = _credential;
@synthesize tlsCertPolicy = _tlsCertPolicy;
@synthesize hostname = _hostname;
@synthesize tlsAlpnProtocols = _tlsAlpnProtocols;
@synthesize tlsEllipticCurves = _tlsEllipticCurves;
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
return [self initWithURLStrings:urlStrings
username:nil
credential:nil];
}
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
username:(NSString *)username
credential:(NSString *)credential {
return [self initWithURLStrings:urlStrings
username:username
credential:credential
tlsCertPolicy:RTCTlsCertPolicySecure];
}
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
username:(NSString *)username
credential:(NSString *)credential
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
return [self initWithURLStrings:urlStrings
username:username
credential:credential
tlsCertPolicy:tlsCertPolicy
hostname:nil];
}
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
username:(NSString *)username
credential:(NSString *)credential
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
hostname:(NSString *)hostname {
return [self initWithURLStrings:urlStrings
username:username
credential:credential
tlsCertPolicy:tlsCertPolicy
hostname:hostname
tlsAlpnProtocols:[NSArray array]];
}
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
username:(NSString *)username
credential:(NSString *)credential
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
hostname:(NSString *)hostname
tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols {
return [self initWithURLStrings:urlStrings
username:username
credential:credential
tlsCertPolicy:tlsCertPolicy
hostname:hostname
tlsAlpnProtocols:tlsAlpnProtocols
tlsEllipticCurves:[NSArray array]];
}
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
username:(NSString *)username
credential:(NSString *)credential
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy
hostname:(NSString *)hostname
tlsAlpnProtocols:(NSArray<NSString *> *)tlsAlpnProtocols
tlsEllipticCurves:(NSArray<NSString *> *)tlsEllipticCurves {
NSParameterAssert(urlStrings.count);
if (self = [super init]) {
_urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
_username = [username copy];
_credential = [credential copy];
_tlsCertPolicy = tlsCertPolicy;
_hostname = [hostname copy];
_tlsAlpnProtocols = [[NSArray alloc] initWithArray:tlsAlpnProtocols copyItems:YES];
_tlsEllipticCurves = [[NSArray alloc] initWithArray:tlsEllipticCurves copyItems:YES];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@\n%@\n%@\n%@",
_urlStrings,
_username,
_credential,
[self stringForTlsCertPolicy:_tlsCertPolicy],
_hostname,
_tlsAlpnProtocols,
_tlsEllipticCurves];
}
#pragma mark - Private
- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
switch (tlsCertPolicy) {
case RTCTlsCertPolicySecure:
return @"RTCTlsCertPolicySecure";
case RTCTlsCertPolicyInsecureNoCheck:
return @"RTCTlsCertPolicyInsecureNoCheck";
}
}
- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
__block webrtc::PeerConnectionInterface::IceServer iceServer;
iceServer.username = [NSString stdStringForString:_username];
iceServer.password = [NSString stdStringForString:_credential];
iceServer.hostname = [NSString stdStringForString:_hostname];
[_tlsAlpnProtocols enumerateObjectsUsingBlock:^(NSString *proto, NSUInteger idx, BOOL *stop) {
iceServer.tls_alpn_protocols.push_back(proto.stdString);
}];
[_tlsEllipticCurves enumerateObjectsUsingBlock:^(NSString *curve, NSUInteger idx, BOOL *stop) {
iceServer.tls_elliptic_curves.push_back(curve.stdString);
}];
[_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
NSUInteger idx,
BOOL *stop) {
iceServer.urls.push_back(url.stdString);
}];
switch (_tlsCertPolicy) {
case RTCTlsCertPolicySecure:
iceServer.tls_cert_policy =
webrtc::PeerConnectionInterface::kTlsCertPolicySecure;
break;
case RTCTlsCertPolicyInsecureNoCheck:
iceServer.tls_cert_policy =
webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
break;
}
return iceServer;
}
- (instancetype)initWithNativeServer:
(webrtc::PeerConnectionInterface::IceServer)nativeServer {
NSMutableArray *urls =
[NSMutableArray arrayWithCapacity:nativeServer.urls.size()];
for (auto const &url : nativeServer.urls) {
[urls addObject:[NSString stringForStdString:url]];
}
NSString *username = [NSString stringForStdString:nativeServer.username];
NSString *credential = [NSString stringForStdString:nativeServer.password];
NSString *hostname = [NSString stringForStdString:nativeServer.hostname];
NSMutableArray *tlsAlpnProtocols =
[NSMutableArray arrayWithCapacity:nativeServer.tls_alpn_protocols.size()];
for (auto const &proto : nativeServer.tls_alpn_protocols) {
[tlsAlpnProtocols addObject:[NSString stringForStdString:proto]];
}
NSMutableArray *tlsEllipticCurves =
[NSMutableArray arrayWithCapacity:nativeServer.tls_elliptic_curves.size()];
for (auto const &curve : nativeServer.tls_elliptic_curves) {
[tlsEllipticCurves addObject:[NSString stringForStdString:curve]];
}
RTCTlsCertPolicy tlsCertPolicy;
switch (nativeServer.tls_cert_policy) {
case webrtc::PeerConnectionInterface::kTlsCertPolicySecure:
tlsCertPolicy = RTCTlsCertPolicySecure;
break;
case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck:
tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck;
break;
}
self = [self initWithURLStrings:urls
username:username
credential:credential
tlsCertPolicy:tlsCertPolicy
hostname:hostname
tlsAlpnProtocols:tlsAlpnProtocols
tlsEllipticCurves:tlsEllipticCurves];
return self;
}
@end

View File

@ -1,25 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCIntervalRange.h"
#include "rtc_base/timeutils.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCIntervalRange ()
@property(nonatomic, readonly) std::unique_ptr<rtc::IntervalRange> nativeIntervalRange;
- (instancetype)initWithNativeIntervalRange:(const rtc::IntervalRange &)config;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,50 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCIntervalRange+Private.h"
#include "rtc_base/checks.h"
@implementation RTCIntervalRange
@synthesize min = _min;
@synthesize max = _max;
- (instancetype)init {
return [self initWithMin:0 max:0];
}
- (instancetype)initWithMin:(NSInteger)min
max:(NSInteger)max {
RTC_DCHECK_LE(min, max);
if (self = [super init]) {
_min = min;
_max = max;
}
return self;
}
- (instancetype)initWithNativeIntervalRange:(const rtc::IntervalRange &)config {
return [self initWithMin:config.min() max:config.max()];
}
- (NSString *)description {
return [NSString stringWithFormat:@"[%ld, %ld]", (long)_min, (long)_max];
}
#pragma mark - Private
- (std::unique_ptr<rtc::IntervalRange>)nativeIntervalRange {
std::unique_ptr<rtc::IntervalRange> nativeIntervalRange(
new rtc::IntervalRange((int)_min, (int)_max));
return nativeIntervalRange;
}
@end

View File

@ -1,24 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCLegacyStatsReport.h"
#include "api/statstypes.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCLegacyStatsReport ()
/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,60 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCLegacyStatsReport+Private.h"
#import "NSString+StdString.h"
#import "WebRTC/RTCLogging.h"
#include "rtc_base/checks.h"
@implementation RTCLegacyStatsReport
@synthesize timestamp = _timestamp;
@synthesize type = _type;
@synthesize reportId = _reportId;
@synthesize values = _values;
- (NSString *)description {
return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@",
_reportId,
_type,
_timestamp,
_values];
}
#pragma mark - Private
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
if (self = [super init]) {
_timestamp = nativeReport.timestamp();
_type = [NSString stringForStdString:nativeReport.TypeToString()];
_reportId = [NSString stringForStdString:
nativeReport.id()->ToString()];
NSUInteger capacity = nativeReport.values().size();
NSMutableDictionary *values =
[NSMutableDictionary dictionaryWithCapacity:capacity];
for (auto const &valuePair : nativeReport.values()) {
NSString *key = [NSString stringForStdString:
valuePair.second->display_name()];
NSString *value = [NSString stringForStdString:
valuePair.second->ToString()];
// Not expecting duplicate keys.
RTC_DCHECK(![values objectForKey:key]);
[values setObject:value forKey:key];
}
_values = values;
}
return self;
}
@end

View File

@ -1,51 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaConstraints.h"
#include <memory>
#include "api/mediaconstraintsinterface.h"
namespace webrtc {
class MediaConstraints : public MediaConstraintsInterface {
public:
~MediaConstraints() override;
MediaConstraints();
MediaConstraints(const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional);
const Constraints& GetMandatory() const override;
const Constraints& GetOptional() const override;
private:
MediaConstraintsInterface::Constraints mandatory_;
MediaConstraintsInterface::Constraints optional_;
};
} // namespace webrtc
NS_ASSUME_NONNULL_BEGIN
@interface RTCMediaConstraints ()
/**
* A MediaConstraints representation of this RTCMediaConstraints object. This is
* needed to pass to the underlying C++ APIs.
*/
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
/** Return a native Constraints object representing these constraints */
+ (webrtc::MediaConstraintsInterface::Constraints)nativeConstraintsForConstraints:
(NSDictionary<NSString*, NSString*>*)constraints;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,136 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaConstraints+Private.h"
#import "NSString+StdString.h"
#include <memory>
NSString * const kRTCMediaConstraintsMinAspectRatio =
@(webrtc::MediaConstraintsInterface::kMinAspectRatio);
NSString * const kRTCMediaConstraintsMaxAspectRatio =
@(webrtc::MediaConstraintsInterface::kMaxAspectRatio);
NSString * const kRTCMediaConstraintsMinWidth =
@(webrtc::MediaConstraintsInterface::kMinWidth);
NSString * const kRTCMediaConstraintsMaxWidth =
@(webrtc::MediaConstraintsInterface::kMaxWidth);
NSString * const kRTCMediaConstraintsMinHeight =
@(webrtc::MediaConstraintsInterface::kMinHeight);
NSString * const kRTCMediaConstraintsMaxHeight =
@(webrtc::MediaConstraintsInterface::kMaxHeight);
NSString * const kRTCMediaConstraintsMinFrameRate =
@(webrtc::MediaConstraintsInterface::kMinFrameRate);
NSString * const kRTCMediaConstraintsMaxFrameRate =
@(webrtc::MediaConstraintsInterface::kMaxFrameRate);
NSString * const kRTCMediaConstraintsAudioNetworkAdaptorConfig =
@(webrtc::MediaConstraintsInterface::kAudioNetworkAdaptorConfig);
NSString * const kRTCMediaConstraintsIceRestart =
@(webrtc::MediaConstraintsInterface::kIceRestart);
NSString * const kRTCMediaConstraintsOfferToReceiveAudio =
@(webrtc::MediaConstraintsInterface::kOfferToReceiveAudio);
NSString * const kRTCMediaConstraintsOfferToReceiveVideo =
@(webrtc::MediaConstraintsInterface::kOfferToReceiveVideo);
NSString * const kRTCMediaConstraintsVoiceActivityDetection =
@(webrtc::MediaConstraintsInterface::kVoiceActivityDetection);
NSString * const kRTCMediaConstraintsValueTrue =
@(webrtc::MediaConstraintsInterface::kValueTrue);
NSString * const kRTCMediaConstraintsValueFalse =
@(webrtc::MediaConstraintsInterface::kValueFalse);
namespace webrtc {
MediaConstraints::~MediaConstraints() {}
MediaConstraints::MediaConstraints() {}
MediaConstraints::MediaConstraints(
const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional)
: mandatory_(mandatory), optional_(optional) {}
const MediaConstraintsInterface::Constraints&
MediaConstraints::GetMandatory() const {
return mandatory_;
}
const MediaConstraintsInterface::Constraints&
MediaConstraints::GetOptional() const {
return optional_;
}
} // namespace webrtc
@implementation RTCMediaConstraints {
NSDictionary<NSString *, NSString *> *_mandatory;
NSDictionary<NSString *, NSString *> *_optional;
}
- (instancetype)initWithMandatoryConstraints:
(NSDictionary<NSString *, NSString *> *)mandatory
optionalConstraints:
(NSDictionary<NSString *, NSString *> *)optional {
if (self = [super init]) {
_mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
copyItems:YES];
_optional = [[NSDictionary alloc] initWithDictionary:optional
copyItems:YES];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
_mandatory,
_optional];
}
#pragma mark - Private
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints {
webrtc::MediaConstraintsInterface::Constraints mandatory =
[[self class] nativeConstraintsForConstraints:_mandatory];
webrtc::MediaConstraintsInterface::Constraints optional =
[[self class] nativeConstraintsForConstraints:_optional];
webrtc::MediaConstraints *nativeConstraints =
new webrtc::MediaConstraints(mandatory, optional);
return std::unique_ptr<webrtc::MediaConstraints>(nativeConstraints);
}
+ (webrtc::MediaConstraintsInterface::Constraints)
nativeConstraintsForConstraints:
(NSDictionary<NSString *, NSString *> *)constraints {
webrtc::MediaConstraintsInterface::Constraints nativeConstraints;
for (NSString *key in constraints) {
NSAssert([key isKindOfClass:[NSString class]],
@"%@ is not an NSString.", key);
NSString *value = [constraints objectForKey:key];
NSAssert([value isKindOfClass:[NSString class]],
@"%@ is not an NSString.", value);
if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) {
// This value is base64 encoded.
NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0];
std::string configValue =
std::string(reinterpret_cast<const char *>(charData.bytes), charData.length);
nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
key.stdString, configValue));
} else {
nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
key.stdString, value.stdString));
}
}
return nativeConstraints;
}
@end

View File

@ -1,40 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaSource.h"
#include "api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
RTCMediaSourceTypeAudio,
RTCMediaSourceTypeVideo,
};
@interface RTCMediaSource ()
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER;
+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:(RTCSourceState)state;
+ (RTCSourceState)sourceStateForNativeState:(webrtc::MediaSourceInterface::SourceState)nativeState;
+ (NSString *)stringForState:(RTCSourceState)state;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,82 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaSource+Private.h"
#include "rtc_base/checks.h"
@implementation RTCMediaSource {
RTCPeerConnectionFactory *_factory;
RTCMediaSourceType _type;
}
@synthesize nativeMediaSource = _nativeMediaSource;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeMediaSource:(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type {
RTC_DCHECK(factory);
RTC_DCHECK(nativeMediaSource);
if (self = [super init]) {
_factory = factory;
_nativeMediaSource = nativeMediaSource;
_type = type;
}
return self;
}
- (RTCSourceState)state {
return [[self class] sourceStateForNativeState:_nativeMediaSource->state()];
}
#pragma mark - Private
+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
(RTCSourceState)state {
switch (state) {
case RTCSourceStateInitializing:
return webrtc::MediaSourceInterface::kInitializing;
case RTCSourceStateLive:
return webrtc::MediaSourceInterface::kLive;
case RTCSourceStateEnded:
return webrtc::MediaSourceInterface::kEnded;
case RTCSourceStateMuted:
return webrtc::MediaSourceInterface::kMuted;
}
}
+ (RTCSourceState)sourceStateForNativeState:
(webrtc::MediaSourceInterface::SourceState)nativeState {
switch (nativeState) {
case webrtc::MediaSourceInterface::kInitializing:
return RTCSourceStateInitializing;
case webrtc::MediaSourceInterface::kLive:
return RTCSourceStateLive;
case webrtc::MediaSourceInterface::kEnded:
return RTCSourceStateEnded;
case webrtc::MediaSourceInterface::kMuted:
return RTCSourceStateMuted;
}
}
+ (NSString *)stringForState:(RTCSourceState)state {
switch (state) {
case RTCSourceStateInitializing:
return @"Initializing";
case RTCSourceStateLive:
return @"Live";
case RTCSourceStateEnded:
return @"Ended";
case RTCSourceStateMuted:
return @"Muted";
}
}
@end

View File

@ -1,34 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaStream.h"
#include "api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCMediaStream ()
/**
* MediaStreamInterface representation of this RTCMediaStream object. This is
* needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
/** Initialize an RTCMediaStream with an id. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory streamId:(NSString *)streamId;
/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeMediaStream:(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,126 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaStream+Private.h"
#include <vector>
#import "NSString+StdString.h"
#import "RTCAudioTrack+Private.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCVideoTrack+Private.h"
@implementation RTCMediaStream {
RTCPeerConnectionFactory *_factory;
NSMutableArray *_audioTracks;
NSMutableArray *_videoTracks;
rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
streamId:(NSString *)streamId {
NSParameterAssert(factory);
NSParameterAssert(streamId.length);
std::string nativeId = [NSString stdStringForString:streamId];
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
factory.nativeFactory->CreateLocalMediaStream(nativeId);
return [self initWithFactory:factory nativeMediaStream:stream];
}
- (NSArray<RTCAudioTrack *> *)audioTracks {
return [_audioTracks copy];
}
- (NSArray<RTCVideoTrack *> *)videoTracks {
return [_videoTracks copy];
}
- (NSString *)streamId {
return [NSString stringForStdString:_nativeMediaStream->id()];
}
- (void)addAudioTrack:(RTCAudioTrack *)audioTrack {
if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
[_audioTracks addObject:audioTrack];
}
}
- (void)addVideoTrack:(RTCVideoTrack *)videoTrack {
if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
[_videoTracks addObject:videoTrack];
}
}
- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack {
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
NSAssert(index != NSNotFound,
@"|removeAudioTrack| called on unexpected RTCAudioTrack");
if (index != NSNotFound &&
_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
[_audioTracks removeObjectAtIndex:index];
}
}
- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack {
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
NSAssert(index != NSNotFound,
@"|removeVideoTrack| called on unexpected RTCVideoTrack");
if (index != NSNotFound &&
_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
[_videoTracks removeObjectAtIndex:index];
}
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu",
self.streamId,
(unsigned long)self.audioTracks.count,
(unsigned long)self.videoTracks.count];
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
return _nativeMediaStream;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeMediaStream:
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
NSParameterAssert(nativeMediaStream);
if (self = [super init]) {
_factory = factory;
webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();
_audioTracks = [NSMutableArray arrayWithCapacity:audioTracks.size()];
_videoTracks = [NSMutableArray arrayWithCapacity:videoTracks.size()];
_nativeMediaStream = nativeMediaStream;
for (auto &track : audioTracks) {
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
RTCAudioTrack *audioTrack =
[[RTCAudioTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
[_audioTracks addObject:audioTrack];
}
for (auto &track : videoTracks) {
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
RTCVideoTrack *videoTrack =
[[RTCVideoTrack alloc] initWithFactory:_factory nativeTrack:track type:type];
[_videoTracks addObject:videoTrack];
}
}
return self;
}
@end

View File

@ -1,60 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaStreamTrack.h"
#include "api/mediastreaminterface.h"
typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
RTCMediaStreamTrackTypeAudio,
RTCMediaStreamTrackTypeVideo,
};
NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
@interface RTCMediaStreamTrack ()
@property(nonatomic, readonly) RTCPeerConnectionFactory *factory;
/**
* The native MediaStreamTrackInterface passed in or created during
* construction.
*/
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
/**
* Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
*/
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
(RTCMediaStreamTrackState)state;
+ (RTCMediaStreamTrackState)trackStateForNativeState:
(webrtc::MediaStreamTrackInterface::TrackState)nativeState;
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
factory:(RTCPeerConnectionFactory *)factory;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,160 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAudioTrack+Private.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCVideoTrack+Private.h"
#import "NSString+StdString.h"
NSString * const kRTCMediaStreamTrackKindAudio =
@(webrtc::MediaStreamTrackInterface::kAudioKind);
NSString * const kRTCMediaStreamTrackKindVideo =
@(webrtc::MediaStreamTrackInterface::kVideoKind);
@implementation RTCMediaStreamTrack {
RTCPeerConnectionFactory *_factory;
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
RTCMediaStreamTrackType _type;
}
- (NSString *)kind {
return [NSString stringForStdString:_nativeTrack->kind()];
}
- (NSString *)trackId {
return [NSString stringForStdString:_nativeTrack->id()];
}
- (BOOL)isEnabled {
return _nativeTrack->enabled();
}
- (void)setIsEnabled:(BOOL)isEnabled {
_nativeTrack->set_enabled(isEnabled);
}
- (RTCMediaStreamTrackState)readyState {
return [[self class] trackStateForNativeState:_nativeTrack->state()];
}
- (NSString *)description {
NSString *readyState = [[self class] stringForState:self.readyState];
return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
self.kind,
self.trackId,
self.isEnabled ? @"enabled" : @"disabled",
readyState];
}
- (BOOL)isEqual:(id)object {
if (self == object) {
return YES;
}
if (![object isMemberOfClass:[self class]]) {
return NO;
}
return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
}
- (NSUInteger)hash {
return (NSUInteger)_nativeTrack.get();
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
return _nativeTrack;
}
@synthesize factory = _factory;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
type:(RTCMediaStreamTrackType)type {
NSParameterAssert(nativeTrack);
NSParameterAssert(factory);
if (self = [super init]) {
_factory = factory;
_nativeTrack = nativeTrack;
_type = type;
}
return self;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeTrack:(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
NSParameterAssert(nativeTrack);
if (nativeTrack->kind() ==
std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeAudio];
}
if (nativeTrack->kind() ==
std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeVideo];
}
return nil;
}
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
if (!track) {
return NO;
}
return _nativeTrack == track.nativeTrack;
}
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
(RTCMediaStreamTrackState)state {
switch (state) {
case RTCMediaStreamTrackStateLive:
return webrtc::MediaStreamTrackInterface::kLive;
case RTCMediaStreamTrackStateEnded:
return webrtc::MediaStreamTrackInterface::kEnded;
}
}
+ (RTCMediaStreamTrackState)trackStateForNativeState:
(webrtc::MediaStreamTrackInterface::TrackState)nativeState {
switch (nativeState) {
case webrtc::MediaStreamTrackInterface::kLive:
return RTCMediaStreamTrackStateLive;
case webrtc::MediaStreamTrackInterface::kEnded:
return RTCMediaStreamTrackStateEnded;
}
}
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
switch (state) {
case RTCMediaStreamTrackStateLive:
return @"Live";
case RTCMediaStreamTrackStateEnded:
return @"Ended";
}
}
+ (RTCMediaStreamTrack *)mediaTrackForNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
factory:(RTCPeerConnectionFactory *)factory {
NSParameterAssert(nativeTrack);
NSParameterAssert(factory);
if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) {
return [[RTCAudioTrack alloc] initWithFactory:factory
nativeTrack:nativeTrack
type:RTCMediaStreamTrackTypeAudio];
} else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) {
return [[RTCVideoTrack alloc] initWithFactory:factory
nativeTrack:nativeTrack
type:RTCMediaStreamTrackTypeVideo];
} else {
return [[RTCMediaStreamTrack alloc] initWithFactory:factory nativeTrack:nativeTrack];
}
}
@end

View File

@ -1,32 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMetrics.h"
#import "RTCMetricsSampleInfo+Private.h"
void RTCEnableMetrics(void) {
webrtc::metrics::Enable();
}
NSArray<RTCMetricsSampleInfo *> *RTCGetAndResetMetrics(void) {
std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
histograms;
webrtc::metrics::GetAndReset(&histograms);
NSMutableArray *metrics =
[NSMutableArray arrayWithCapacity:histograms.size()];
for (auto const &histogram : histograms) {
RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc]
initWithNativeSampleInfo:*histogram.second];
[metrics addObject:metric];
}
return metrics;
}

View File

@ -1,27 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMetricsSampleInfo.h"
// Adding 'nogncheck' to disable the gn include headers check.
// We don't want to depend on 'system_wrappers:metrics_default' because
// clients should be able to provide their own implementation.
#include "system_wrappers/include/metrics_default.h" // nogncheck
NS_ASSUME_NONNULL_BEGIN
@interface RTCMetricsSampleInfo ()
/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
- (instancetype)initWithNativeSampleInfo:(const webrtc::metrics::SampleInfo &)info;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,43 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMetricsSampleInfo+Private.h"
#import "NSString+StdString.h"
@implementation RTCMetricsSampleInfo
@synthesize name = _name;
@synthesize min = _min;
@synthesize max = _max;
@synthesize bucketCount = _bucketCount;
@synthesize samples = _samples;
#pragma mark - Private
- (instancetype)initWithNativeSampleInfo:
(const webrtc::metrics::SampleInfo &)info {
if (self = [super init]) {
_name = [NSString stringForStdString:info.name];
_min = info.min;
_max = info.max;
_bucketCount = info.bucket_count;
NSMutableDictionary *samples =
[NSMutableDictionary dictionaryWithCapacity:info.samples.size()];
for (auto const &sample : info.samples) {
[samples setObject:@(sample.second) forKey:@(sample.first)];
}
_samples = samples;
}
return self;
}
@end

View File

@ -1,33 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnection+Private.h"
#import "NSString+StdString.h"
#import "RTCDataChannel+Private.h"
#import "RTCDataChannelConfiguration+Private.h"
@implementation RTCPeerConnection (DataChannel)
- (nullable RTCDataChannel *)dataChannelForLabel:(NSString *)label
configuration:(RTCDataChannelConfiguration *)configuration {
std::string labelString = [NSString stdStringForString:label];
const webrtc::DataChannelInit nativeInit =
configuration.nativeDataChannelInit;
rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel =
self.nativePeerConnection->CreateDataChannel(labelString,
&nativeInit);
if (!dataChannel) {
return nil;
}
return [[RTCDataChannel alloc] initWithFactory:self.factory nativeDataChannel:dataChannel];
}
@end

View File

@ -8,27 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnection.h"
#include <memory>
namespace rtc {
class BitrateAllocationStrategy;
} // namespace rtc
NS_ASSUME_NONNULL_BEGIN
/**
* This class extension exposes methods that work directly with injectable C++ components.
*/
@interface RTCPeerConnection ()
/** Sets current strategy. If not set default WebRTC allocator will be used. May be changed during
* an active session.
*/
- (void)setBitrateAllocationStrategy:
(std::unique_ptr<rtc::BitrateAllocationStrategy>)bitrateAllocationStrategy;
@end
NS_ASSUME_NONNULL_END
#import "api/peerconnection/RTCPeerConnection+Native.h"

View File

@ -1,106 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnection.h"
#include "api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
namespace webrtc {
/**
* These objects are created by RTCPeerConnectionFactory to wrap an
* id<RTCPeerConnectionDelegate> and call methods on that interface.
*/
class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
public:
PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection);
~PeerConnectionDelegateAdapter() override;
void OnSignalingChange(PeerConnectionInterface::SignalingState new_state) override;
void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
void OnTrack(rtc::scoped_refptr<RtpTransceiverInterface> transceiver) override;
void OnDataChannel(rtc::scoped_refptr<DataChannelInterface> data_channel) override;
void OnRenegotiationNeeded() override;
void OnIceConnectionChange(PeerConnectionInterface::IceConnectionState new_state) override;
void OnIceGatheringChange(PeerConnectionInterface::IceGatheringState new_state) override;
void OnIceCandidate(const IceCandidateInterface *candidate) override;
void OnIceCandidatesRemoved(const std::vector<cricket::Candidate> &candidates) override;
void OnAddTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>> &streams) override;
void OnRemoveTrack(rtc::scoped_refptr<RtpReceiverInterface> receiver) override;
private:
__weak RTCPeerConnection *peer_connection_;
};
} // namespace webrtc
@interface RTCPeerConnection ()
/** The factory used to create this RTCPeerConnection */
@property(nonatomic, readonly) RTCPeerConnectionFactory *factory;
/** The native PeerConnectionInterface created during construction. */
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::PeerConnectionInterface>
nativePeerConnection;
/** Initialize an RTCPeerConnection with a configuration, constraints, and
* delegate.
*/
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
configuration:(RTCConfiguration *)configuration
constraints:(RTCMediaConstraints *)constraints
delegate:(nullable id<RTCPeerConnectionDelegate>)delegate
NS_DESIGNATED_INITIALIZER;
+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
(RTCSignalingState)state;
+ (RTCSignalingState)signalingStateForNativeState:
(webrtc::PeerConnectionInterface::SignalingState)nativeState;
+ (NSString *)stringForSignalingState:(RTCSignalingState)state;
+ (webrtc::PeerConnectionInterface::IceConnectionState)nativeIceConnectionStateForState:
(RTCIceConnectionState)state;
+ (RTCIceConnectionState)iceConnectionStateForNativeState:
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state;
+ (webrtc::PeerConnectionInterface::IceGatheringState)nativeIceGatheringStateForState:
(RTCIceGatheringState)state;
+ (RTCIceGatheringState)iceGatheringStateForNativeState:
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state;
+ (webrtc::PeerConnectionInterface::StatsOutputLevel)nativeStatsOutputLevelForLevel:
(RTCStatsOutputLevel)level;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,62 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnection+Private.h"
#import "NSString+StdString.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCLegacyStatsReport+Private.h"
#include "rtc_base/checks.h"
namespace webrtc {
class StatsObserverAdapter : public StatsObserver {
public:
StatsObserverAdapter(void (^completionHandler)
(NSArray<RTCLegacyStatsReport *> *stats)) {
completion_handler_ = completionHandler;
}
~StatsObserverAdapter() override { completion_handler_ = nil; }
void OnComplete(const StatsReports& reports) override {
RTC_DCHECK(completion_handler_);
NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
for (const auto* report : reports) {
RTCLegacyStatsReport *statsReport =
[[RTCLegacyStatsReport alloc] initWithNativeReport:*report];
[stats addObject:statsReport];
}
completion_handler_(stats);
completion_handler_ = nil;
}
private:
void (^completion_handler_)(NSArray<RTCLegacyStatsReport *> *stats);
};
} // namespace webrtc
@implementation RTCPeerConnection (Stats)
- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack
statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
completionHandler:
(void (^)(NSArray<RTCLegacyStatsReport *> *stats))completionHandler {
rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
(completionHandler));
webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
[[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
self.nativePeerConnection->GetStats(
observer, mediaStreamTrack.nativeTrack, nativeOutputLevel);
}
@end

View File

@ -1,748 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnection+Private.h"
#import "NSString+StdString.h"
#import "RTCConfiguration+Private.h"
#import "RTCDataChannel+Private.h"
#import "RTCIceCandidate+Private.h"
#import "RTCLegacyStatsReport+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCMediaStream+Private.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCPeerConnection+Native.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCRtpReceiver+Private.h"
#import "RTCRtpSender+Private.h"
#import "RTCRtpTransceiver+Private.h"
#import "RTCSessionDescription+Private.h"
#import "WebRTC/RTCLogging.h"
#include <memory>
#include "api/jsepicecandidate.h"
#include "rtc_base/checks.h"
NSString * const kRTCPeerConnectionErrorDomain =
@"org.webrtc.RTCPeerConnection";
int const kRTCPeerConnnectionSessionDescriptionError = -1;
namespace webrtc {
class CreateSessionDescriptionObserverAdapter
: public CreateSessionDescriptionObserver {
public:
CreateSessionDescriptionObserverAdapter(
void (^completionHandler)(RTCSessionDescription *sessionDescription,
NSError *error)) {
completion_handler_ = completionHandler;
}
~CreateSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
void OnSuccess(SessionDescriptionInterface *desc) override {
RTC_DCHECK(completion_handler_);
std::unique_ptr<webrtc::SessionDescriptionInterface> description =
std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
RTCSessionDescription* session =
[[RTCSessionDescription alloc] initWithNativeDescription:
description.get()];
completion_handler_(session, nil);
completion_handler_ = nil;
}
void OnFailure(RTCError error) override {
RTC_DCHECK(completion_handler_);
// TODO(hta): Add handling of error.type()
NSString *str = [NSString stringForStdString:error.message()];
NSError* err =
[NSError errorWithDomain:kRTCPeerConnectionErrorDomain
code:kRTCPeerConnnectionSessionDescriptionError
userInfo:@{ NSLocalizedDescriptionKey : str }];
completion_handler_(nil, err);
completion_handler_ = nil;
}
private:
void (^completion_handler_)
(RTCSessionDescription *sessionDescription, NSError *error);
};
class SetSessionDescriptionObserverAdapter :
public SetSessionDescriptionObserver {
public:
SetSessionDescriptionObserverAdapter(void (^completionHandler)
(NSError *error)) {
completion_handler_ = completionHandler;
}
~SetSessionDescriptionObserverAdapter() override { completion_handler_ = nil; }
void OnSuccess() override {
RTC_DCHECK(completion_handler_);
completion_handler_(nil);
completion_handler_ = nil;
}
void OnFailure(RTCError error) override {
RTC_DCHECK(completion_handler_);
// TODO(hta): Add handling of error.type()
NSString *str = [NSString stringForStdString:error.message()];
NSError* err =
[NSError errorWithDomain:kRTCPeerConnectionErrorDomain
code:kRTCPeerConnnectionSessionDescriptionError
userInfo:@{ NSLocalizedDescriptionKey : str }];
completion_handler_(err);
completion_handler_ = nil;
}
private:
void (^completion_handler_)(NSError *error);
};
PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(
RTCPeerConnection *peerConnection) {
peer_connection_ = peerConnection;
}
PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() {
peer_connection_ = nil;
}
void PeerConnectionDelegateAdapter::OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) {
RTCSignalingState state =
[[RTCPeerConnection class] signalingStateForNativeState:new_state];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didChangeSignalingState:state];
}
void PeerConnectionDelegateAdapter::OnAddStream(
rtc::scoped_refptr<MediaStreamInterface> stream) {
RTCPeerConnection *peer_connection = peer_connection_;
RTCMediaStream *mediaStream =
[[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream];
[peer_connection.delegate peerConnection:peer_connection
didAddStream:mediaStream];
}
void PeerConnectionDelegateAdapter::OnRemoveStream(
rtc::scoped_refptr<MediaStreamInterface> stream) {
RTCPeerConnection *peer_connection = peer_connection_;
RTCMediaStream *mediaStream =
[[RTCMediaStream alloc] initWithFactory:peer_connection.factory nativeMediaStream:stream];
[peer_connection.delegate peerConnection:peer_connection
didRemoveStream:mediaStream];
}
void PeerConnectionDelegateAdapter::OnTrack(
rtc::scoped_refptr<RtpTransceiverInterface> nativeTransceiver) {
RTCPeerConnection *peer_connection = peer_connection_;
RTCRtpTransceiver *transceiver =
[[RTCRtpTransceiver alloc] initWithFactory:peer_connection.factory
nativeRtpTransceiver:nativeTransceiver];
if ([peer_connection.delegate
respondsToSelector:@selector(peerConnection:didStartReceivingOnTransceiver:)]) {
[peer_connection.delegate peerConnection:peer_connection
didStartReceivingOnTransceiver:transceiver];
}
}
void PeerConnectionDelegateAdapter::OnDataChannel(
rtc::scoped_refptr<DataChannelInterface> data_channel) {
RTCPeerConnection *peer_connection = peer_connection_;
RTCDataChannel *dataChannel = [[RTCDataChannel alloc] initWithFactory:peer_connection.factory
nativeDataChannel:data_channel];
[peer_connection.delegate peerConnection:peer_connection
didOpenDataChannel:dataChannel];
}
void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
}
void PeerConnectionDelegateAdapter::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
RTCIceConnectionState state =
[[RTCPeerConnection class] iceConnectionStateForNativeState:new_state];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didChangeIceConnectionState:state];
}
void PeerConnectionDelegateAdapter::OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) {
RTCIceGatheringState state =
[[RTCPeerConnection class] iceGatheringStateForNativeState:new_state];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didChangeIceGatheringState:state];
}
void PeerConnectionDelegateAdapter::OnIceCandidate(
const IceCandidateInterface *candidate) {
RTCIceCandidate *iceCandidate =
[[RTCIceCandidate alloc] initWithNativeCandidate:candidate];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didGenerateIceCandidate:iceCandidate];
}
void PeerConnectionDelegateAdapter::OnIceCandidatesRemoved(
const std::vector<cricket::Candidate>& candidates) {
NSMutableArray* ice_candidates =
[NSMutableArray arrayWithCapacity:candidates.size()];
for (const auto& candidate : candidates) {
std::unique_ptr<JsepIceCandidate> candidate_wrapper(
new JsepIceCandidate(candidate.transport_name(), -1, candidate));
RTCIceCandidate* ice_candidate = [[RTCIceCandidate alloc]
initWithNativeCandidate:candidate_wrapper.get()];
[ice_candidates addObject:ice_candidate];
}
RTCPeerConnection* peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didRemoveIceCandidates:ice_candidates];
}
void PeerConnectionDelegateAdapter::OnAddTrack(
rtc::scoped_refptr<RtpReceiverInterface> receiver,
const std::vector<rtc::scoped_refptr<MediaStreamInterface>>& streams) {
RTCPeerConnection *peer_connection = peer_connection_;
if ([peer_connection.delegate
respondsToSelector:@selector(peerConnection:didAddReceiver:streams:)]) {
NSMutableArray *mediaStreams = [NSMutableArray arrayWithCapacity:streams.size()];
for (const auto& nativeStream : streams) {
RTCMediaStream *mediaStream = [[RTCMediaStream alloc] initWithFactory:peer_connection.factory
nativeMediaStream:nativeStream];
[mediaStreams addObject:mediaStream];
}
RTCRtpReceiver *rtpReceiver =
[[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory nativeRtpReceiver:receiver];
[peer_connection.delegate peerConnection:peer_connection
didAddReceiver:rtpReceiver
streams:mediaStreams];
}
}
void PeerConnectionDelegateAdapter::OnRemoveTrack(
rtc::scoped_refptr<RtpReceiverInterface> receiver) {
RTCPeerConnection *peer_connection = peer_connection_;
if ([peer_connection.delegate respondsToSelector:@selector(peerConnection:didRemoveReceiver:)]) {
RTCRtpReceiver *rtpReceiver =
[[RTCRtpReceiver alloc] initWithFactory:peer_connection.factory nativeRtpReceiver:receiver];
[peer_connection.delegate peerConnection:peer_connection didRemoveReceiver:rtpReceiver];
}
}
} // namespace webrtc
@implementation RTCPeerConnection {
RTCPeerConnectionFactory *_factory;
NSMutableArray<RTCMediaStream *> *_localStreams;
std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
BOOL _hasStartedRtcEventLog;
}
@synthesize delegate = _delegate;
@synthesize factory = _factory;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
configuration:(RTCConfiguration *)configuration
constraints:(RTCMediaConstraints *)constraints
delegate:(id<RTCPeerConnectionDelegate>)delegate {
NSParameterAssert(factory);
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
[configuration createNativeConfiguration]);
if (!config) {
return nil;
}
if (self = [super init]) {
_observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
_nativeConstraints = constraints.nativeConstraints;
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
config.get());
_peerConnection =
factory.nativeFactory->CreatePeerConnection(*config,
nullptr,
nullptr,
_observer.get());
if (!_peerConnection) {
return nil;
}
_factory = factory;
_localStreams = [[NSMutableArray alloc] init];
_delegate = delegate;
}
return self;
}
- (NSArray<RTCMediaStream *> *)localStreams {
return [_localStreams copy];
}
- (RTCSessionDescription *)localDescription {
const webrtc::SessionDescriptionInterface *description =
_peerConnection->local_description();
return description ?
[[RTCSessionDescription alloc] initWithNativeDescription:description]
: nil;
}
- (RTCSessionDescription *)remoteDescription {
const webrtc::SessionDescriptionInterface *description =
_peerConnection->remote_description();
return description ?
[[RTCSessionDescription alloc] initWithNativeDescription:description]
: nil;
}
- (RTCSignalingState)signalingState {
return [[self class]
signalingStateForNativeState:_peerConnection->signaling_state()];
}
- (RTCIceConnectionState)iceConnectionState {
return [[self class] iceConnectionStateForNativeState:
_peerConnection->ice_connection_state()];
}
- (RTCIceGatheringState)iceGatheringState {
return [[self class] iceGatheringStateForNativeState:
_peerConnection->ice_gathering_state()];
}
- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
[configuration createNativeConfiguration]);
if (!config) {
return NO;
}
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
config.get());
return _peerConnection->SetConfiguration(*config);
}
- (RTCConfiguration *)configuration {
webrtc::PeerConnectionInterface::RTCConfiguration config =
_peerConnection->GetConfiguration();
return [[RTCConfiguration alloc] initWithNativeConfiguration:config];
}
- (void)close {
_peerConnection->Close();
}
- (void)addIceCandidate:(RTCIceCandidate *)candidate {
std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
candidate.nativeCandidate);
_peerConnection->AddIceCandidate(iceCandidate.get());
}
- (void)removeIceCandidates:(NSArray<RTCIceCandidate *> *)iceCandidates {
std::vector<cricket::Candidate> candidates;
for (RTCIceCandidate *iceCandidate in iceCandidates) {
std::unique_ptr<const webrtc::IceCandidateInterface> candidate(
iceCandidate.nativeCandidate);
if (candidate) {
candidates.push_back(candidate->candidate());
// Need to fill the transport name from the sdp_mid.
candidates.back().set_transport_name(candidate->sdp_mid());
}
}
if (!candidates.empty()) {
_peerConnection->RemoveIceCandidates(candidates);
}
}
- (void)addStream:(RTCMediaStream *)stream {
if (!_peerConnection->AddStream(stream.nativeMediaStream)) {
RTCLogError(@"Failed to add stream: %@", stream);
return;
}
[_localStreams addObject:stream];
}
- (void)removeStream:(RTCMediaStream *)stream {
_peerConnection->RemoveStream(stream.nativeMediaStream);
[_localStreams removeObject:stream];
}
- (RTCRtpSender *)addTrack:(RTCMediaStreamTrack *)track streamIds:(NSArray<NSString *> *)streamIds {
std::vector<std::string> nativeStreamIds;
for (NSString *streamId in streamIds) {
nativeStreamIds.push_back([streamId UTF8String]);
}
webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenderOrError =
_peerConnection->AddTrack(track.nativeTrack, nativeStreamIds);
if (!nativeSenderOrError.ok()) {
RTCLogError(@"Failed to add track %@: %s", track, nativeSenderOrError.error().message());
return nil;
}
return [[RTCRtpSender alloc] initWithFactory:self.factory
nativeRtpSender:nativeSenderOrError.MoveValue()];
}
- (BOOL)removeTrack:(RTCRtpSender *)sender {
bool result = _peerConnection->RemoveTrack(sender.nativeRtpSender);
if (!result) {
RTCLogError(@"Failed to remote track %@", sender);
}
return result;
}
- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track {
return [self addTransceiverWithTrack:track init:[[RTCRtpTransceiverInit alloc] init]];
}
- (RTCRtpTransceiver *)addTransceiverWithTrack:(RTCMediaStreamTrack *)track
init:(RTCRtpTransceiverInit *)init {
webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
_peerConnection->AddTransceiver(track.nativeTrack, init.nativeInit);
if (!nativeTransceiverOrError.ok()) {
RTCLogError(
@"Failed to add transceiver %@: %s", track, nativeTransceiverOrError.error().message());
return nil;
}
return [[RTCRtpTransceiver alloc] initWithFactory:self.factory
nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
}
- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType {
return [self addTransceiverOfType:mediaType init:[[RTCRtpTransceiverInit alloc] init]];
}
- (RTCRtpTransceiver *)addTransceiverOfType:(RTCRtpMediaType)mediaType
init:(RTCRtpTransceiverInit *)init {
webrtc::RTCErrorOr<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceiverOrError =
_peerConnection->AddTransceiver([RTCRtpReceiver nativeMediaTypeForMediaType:mediaType],
init.nativeInit);
if (!nativeTransceiverOrError.ok()) {
RTCLogError(@"Failed to add transceiver %@: %s",
[RTCRtpReceiver stringForMediaType:mediaType],
nativeTransceiverOrError.error().message());
return nil;
}
return [[RTCRtpTransceiver alloc] initWithFactory:self.factory
nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()];
}
- (void)offerForConstraints:(RTCMediaConstraints *)constraints
completionHandler:
(void (^)(RTCSessionDescription *sessionDescription,
NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
_peerConnection->CreateOffer(observer, options);
}
- (void)answerForConstraints:(RTCMediaConstraints *)constraints
completionHandler:
(void (^)(RTCSessionDescription *sessionDescription,
NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
CopyConstraintsIntoOfferAnswerOptions(constraints.nativeConstraints.get(), &options);
_peerConnection->CreateAnswer(observer, options);
}
- (void)setLocalDescription:(RTCSessionDescription *)sdp
completionHandler:(void (^)(NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
completionHandler));
_peerConnection->SetLocalDescription(observer, sdp.nativeDescription);
}
- (void)setRemoteDescription:(RTCSessionDescription *)sdp
completionHandler:(void (^)(NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
completionHandler));
_peerConnection->SetRemoteDescription(observer, sdp.nativeDescription);
}
- (BOOL)setBweMinBitrateBps:(nullable NSNumber *)minBitrateBps
currentBitrateBps:(nullable NSNumber *)currentBitrateBps
maxBitrateBps:(nullable NSNumber *)maxBitrateBps {
webrtc::PeerConnectionInterface::BitrateParameters params;
if (minBitrateBps != nil) {
params.min_bitrate_bps = absl::optional<int>(minBitrateBps.intValue);
}
if (currentBitrateBps != nil) {
params.current_bitrate_bps = absl::optional<int>(currentBitrateBps.intValue);
}
if (maxBitrateBps != nil) {
params.max_bitrate_bps = absl::optional<int>(maxBitrateBps.intValue);
}
return _peerConnection->SetBitrate(params).ok();
}
- (void)setBitrateAllocationStrategy:
(std::unique_ptr<rtc::BitrateAllocationStrategy>)bitrateAllocationStrategy {
_peerConnection->SetBitrateAllocationStrategy(std::move(bitrateAllocationStrategy));
}
- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath
maxSizeInBytes:(int64_t)maxSizeInBytes {
RTC_DCHECK(filePath.length);
RTC_DCHECK_GT(maxSizeInBytes, 0);
RTC_DCHECK(!_hasStartedRtcEventLog);
if (_hasStartedRtcEventLog) {
RTCLogError(@"Event logging already started.");
return NO;
}
int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC,
S_IRUSR | S_IWUSR);
if (fd < 0) {
RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
return NO;
}
_hasStartedRtcEventLog =
_peerConnection->StartRtcEventLog(fd, maxSizeInBytes);
return _hasStartedRtcEventLog;
}
- (void)stopRtcEventLog {
_peerConnection->StopRtcEventLog();
_hasStartedRtcEventLog = NO;
}
- (RTCRtpSender *)senderWithKind:(NSString *)kind
streamId:(NSString *)streamId {
std::string nativeKind = [NSString stdStringForString:kind];
std::string nativeStreamId = [NSString stdStringForString:streamId];
rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
_peerConnection->CreateSender(nativeKind, nativeStreamId));
return nativeSender ?
[[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender] :
nil;
}
- (NSArray<RTCRtpSender *> *)senders {
std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
_peerConnection->GetSenders());
NSMutableArray *senders = [[NSMutableArray alloc] init];
for (const auto &nativeSender : nativeSenders) {
RTCRtpSender *sender =
[[RTCRtpSender alloc] initWithFactory:self.factory nativeRtpSender:nativeSender];
[senders addObject:sender];
}
return senders;
}
- (NSArray<RTCRtpReceiver *> *)receivers {
std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
_peerConnection->GetReceivers());
NSMutableArray *receivers = [[NSMutableArray alloc] init];
for (const auto &nativeReceiver : nativeReceivers) {
RTCRtpReceiver *receiver =
[[RTCRtpReceiver alloc] initWithFactory:self.factory nativeRtpReceiver:nativeReceiver];
[receivers addObject:receiver];
}
return receivers;
}
- (NSArray<RTCRtpTransceiver *> *)transceivers {
std::vector<rtc::scoped_refptr<webrtc::RtpTransceiverInterface>> nativeTransceivers(
_peerConnection->GetTransceivers());
NSMutableArray *transceivers = [[NSMutableArray alloc] init];
for (auto nativeTransceiver : nativeTransceivers) {
RTCRtpTransceiver *transceiver = [[RTCRtpTransceiver alloc] initWithFactory:self.factory
nativeRtpTransceiver:nativeTransceiver];
[transceivers addObject:transceiver];
}
return transceivers;
}
#pragma mark - Private
+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
(RTCSignalingState)state {
switch (state) {
case RTCSignalingStateStable:
return webrtc::PeerConnectionInterface::kStable;
case RTCSignalingStateHaveLocalOffer:
return webrtc::PeerConnectionInterface::kHaveLocalOffer;
case RTCSignalingStateHaveLocalPrAnswer:
return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer;
case RTCSignalingStateHaveRemoteOffer:
return webrtc::PeerConnectionInterface::kHaveRemoteOffer;
case RTCSignalingStateHaveRemotePrAnswer:
return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer;
case RTCSignalingStateClosed:
return webrtc::PeerConnectionInterface::kClosed;
}
}
+ (RTCSignalingState)signalingStateForNativeState:
(webrtc::PeerConnectionInterface::SignalingState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kStable:
return RTCSignalingStateStable;
case webrtc::PeerConnectionInterface::kHaveLocalOffer:
return RTCSignalingStateHaveLocalOffer;
case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
return RTCSignalingStateHaveLocalPrAnswer;
case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
return RTCSignalingStateHaveRemoteOffer;
case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
return RTCSignalingStateHaveRemotePrAnswer;
case webrtc::PeerConnectionInterface::kClosed:
return RTCSignalingStateClosed;
}
}
+ (NSString *)stringForSignalingState:(RTCSignalingState)state {
switch (state) {
case RTCSignalingStateStable:
return @"STABLE";
case RTCSignalingStateHaveLocalOffer:
return @"HAVE_LOCAL_OFFER";
case RTCSignalingStateHaveLocalPrAnswer:
return @"HAVE_LOCAL_PRANSWER";
case RTCSignalingStateHaveRemoteOffer:
return @"HAVE_REMOTE_OFFER";
case RTCSignalingStateHaveRemotePrAnswer:
return @"HAVE_REMOTE_PRANSWER";
case RTCSignalingStateClosed:
return @"CLOSED";
}
}
+ (webrtc::PeerConnectionInterface::IceConnectionState)
nativeIceConnectionStateForState:(RTCIceConnectionState)state {
switch (state) {
case RTCIceConnectionStateNew:
return webrtc::PeerConnectionInterface::kIceConnectionNew;
case RTCIceConnectionStateChecking:
return webrtc::PeerConnectionInterface::kIceConnectionChecking;
case RTCIceConnectionStateConnected:
return webrtc::PeerConnectionInterface::kIceConnectionConnected;
case RTCIceConnectionStateCompleted:
return webrtc::PeerConnectionInterface::kIceConnectionCompleted;
case RTCIceConnectionStateFailed:
return webrtc::PeerConnectionInterface::kIceConnectionFailed;
case RTCIceConnectionStateDisconnected:
return webrtc::PeerConnectionInterface::kIceConnectionDisconnected;
case RTCIceConnectionStateClosed:
return webrtc::PeerConnectionInterface::kIceConnectionClosed;
case RTCIceConnectionStateCount:
return webrtc::PeerConnectionInterface::kIceConnectionMax;
}
}
+ (RTCIceConnectionState)iceConnectionStateForNativeState:
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kIceConnectionNew:
return RTCIceConnectionStateNew;
case webrtc::PeerConnectionInterface::kIceConnectionChecking:
return RTCIceConnectionStateChecking;
case webrtc::PeerConnectionInterface::kIceConnectionConnected:
return RTCIceConnectionStateConnected;
case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
return RTCIceConnectionStateCompleted;
case webrtc::PeerConnectionInterface::kIceConnectionFailed:
return RTCIceConnectionStateFailed;
case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
return RTCIceConnectionStateDisconnected;
case webrtc::PeerConnectionInterface::kIceConnectionClosed:
return RTCIceConnectionStateClosed;
case webrtc::PeerConnectionInterface::kIceConnectionMax:
return RTCIceConnectionStateCount;
}
}
+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state {
switch (state) {
case RTCIceConnectionStateNew:
return @"NEW";
case RTCIceConnectionStateChecking:
return @"CHECKING";
case RTCIceConnectionStateConnected:
return @"CONNECTED";
case RTCIceConnectionStateCompleted:
return @"COMPLETED";
case RTCIceConnectionStateFailed:
return @"FAILED";
case RTCIceConnectionStateDisconnected:
return @"DISCONNECTED";
case RTCIceConnectionStateClosed:
return @"CLOSED";
case RTCIceConnectionStateCount:
return @"COUNT";
}
}
+ (webrtc::PeerConnectionInterface::IceGatheringState)
nativeIceGatheringStateForState:(RTCIceGatheringState)state {
switch (state) {
case RTCIceGatheringStateNew:
return webrtc::PeerConnectionInterface::kIceGatheringNew;
case RTCIceGatheringStateGathering:
return webrtc::PeerConnectionInterface::kIceGatheringGathering;
case RTCIceGatheringStateComplete:
return webrtc::PeerConnectionInterface::kIceGatheringComplete;
}
}
+ (RTCIceGatheringState)iceGatheringStateForNativeState:
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kIceGatheringNew:
return RTCIceGatheringStateNew;
case webrtc::PeerConnectionInterface::kIceGatheringGathering:
return RTCIceGatheringStateGathering;
case webrtc::PeerConnectionInterface::kIceGatheringComplete:
return RTCIceGatheringStateComplete;
}
}
+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state {
switch (state) {
case RTCIceGatheringStateNew:
return @"NEW";
case RTCIceGatheringStateGathering:
return @"GATHERING";
case RTCIceGatheringStateComplete:
return @"COMPLETE";
}
}
+ (webrtc::PeerConnectionInterface::StatsOutputLevel)
nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level {
switch (level) {
case RTCStatsOutputLevelStandard:
return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
case RTCStatsOutputLevelDebug:
return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
}
}
- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)nativePeerConnection {
return _peerConnection;
}
@end

View File

@ -8,47 +8,4 @@
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnectionFactory.h"
#include "rtc_base/scoped_ref_ptr.h"
namespace webrtc {
class AudioDeviceModule;
class AudioEncoderFactory;
class AudioDecoderFactory;
class VideoEncoderFactory;
class VideoDecoderFactory;
class AudioProcessing;
} // namespace webrtc
NS_ASSUME_NONNULL_BEGIN
/**
* This class extension exposes methods that work directly with injectable C++ components.
*/
@interface RTCPeerConnectionFactory ()
- (instancetype)initNative NS_DESIGNATED_INITIALIZER;
/* Initializer used when WebRTC is compiled with no media support */
- (instancetype)initWithNoMedia;
/* Initialize object with injectable native audio/video encoder/decoder factories */
- (instancetype)initWithNativeAudioEncoderFactory:
(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
nativeAudioDecoderFactory:
(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
nativeVideoEncoderFactory:
(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
nativeVideoDecoderFactory:
(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
audioDeviceModule:
(nullable webrtc::AudioDeviceModule *)audioDeviceModule
audioProcessingModule:
(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
@end
NS_ASSUME_NONNULL_END
#import "api/peerconnection/RTCPeerConnectionFactory+Native.h"

View File

@ -1,31 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnectionFactory.h"
#include "api/peerconnectioninterface.h"
#include "rtc_base/scoped_ref_ptr.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCPeerConnectionFactory ()
/**
* PeerConnectionFactoryInterface created and held by this
* RTCPeerConnectionFactory object. This is needed to pass to the underlying
* C++ APIs.
*/
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
nativeFactory;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,256 +0,0 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnectionFactory+Native.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCPeerConnectionFactoryOptions+Private.h"
#import "NSString+StdString.h"
#import "RTCAudioSource+Private.h"
#import "RTCAudioTrack+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCMediaStream+Private.h"
#import "RTCPeerConnection+Private.h"
#import "RTCVideoSource+Private.h"
#import "RTCVideoTrack+Private.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCVideoCodecFactory.h"
#ifndef HAVE_NO_MEDIA
#import "WebRTC/RTCVideoCodecH264.h"
// The no-media version PeerConnectionFactory doesn't depend on these files, but the gn check tool
// is not smart enough to take the #ifdef into account.
#include "api/audio_codecs/builtin_audio_decoder_factory.h" // nogncheck
#include "api/audio_codecs/builtin_audio_encoder_factory.h" // nogncheck
#include "media/engine/convert_legacy_video_factory.h" // nogncheck
#include "modules/audio_device/include/audio_device.h" // nogncheck
#include "modules/audio_processing/include/audio_processing.h" // nogncheck
#include "sdk/objc/Framework/Native/api/video_decoder_factory.h"
#include "sdk/objc/Framework/Native/api/video_encoder_factory.h"
#include "sdk/objc/Framework/Native/src/objc_video_decoder_factory.h"
#include "sdk/objc/Framework/Native/src/objc_video_encoder_factory.h"
#endif
#if defined(WEBRTC_IOS)
#import "sdk/objc/Framework/Native/api/audio_device_module.h"
#endif
// Adding the nogncheck to disable the including header check.
// The no-media version PeerConnectionFactory doesn't depend on media related
// C++ target.
// TODO(zhihuang): Remove nogncheck once MediaEngineInterface is moved to C++
// API layer.
#include "absl/memory/memory.h"
#include "media/engine/webrtcmediaengine.h" // nogncheck
@implementation RTCPeerConnectionFactory {
std::unique_ptr<rtc::Thread> _networkThread;
std::unique_ptr<rtc::Thread> _workerThread;
std::unique_ptr<rtc::Thread> _signalingThread;
BOOL _hasStartedAecDump;
}
@synthesize nativeFactory = _nativeFactory;
- (rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
#if defined(WEBRTC_IOS)
return webrtc::CreateAudioDeviceModule();
#else
return nullptr;
#endif
}
- (instancetype)init {
#ifdef HAVE_NO_MEDIA
return [self initWithNoMedia];
#else
return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory(
[[RTCVideoEncoderFactoryH264 alloc] init])
nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory(
[[RTCVideoDecoderFactoryH264 alloc] init])
audioDeviceModule:[self audioDeviceModule]
audioProcessingModule:nullptr];
#endif
}
- (instancetype)initWithEncoderFactory:(nullable id<RTCVideoEncoderFactory>)encoderFactory
decoderFactory:(nullable id<RTCVideoDecoderFactory>)decoderFactory {
#ifdef HAVE_NO_MEDIA
return [self initWithNoMedia];
#else
std::unique_ptr<webrtc::VideoEncoderFactory> native_encoder_factory;
std::unique_ptr<webrtc::VideoDecoderFactory> native_decoder_factory;
if (encoderFactory) {
native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory);
}
if (decoderFactory) {
native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory);
}
return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory()
nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory()
nativeVideoEncoderFactory:std::move(native_encoder_factory)
nativeVideoDecoderFactory:std::move(native_decoder_factory)
audioDeviceModule:[self audioDeviceModule]
audioProcessingModule:nullptr];
#endif
}
- (instancetype)initNative {
if (self = [super init]) {
_networkThread = rtc::Thread::CreateWithSocketServer();
_networkThread->SetName("network_thread", _networkThread.get());
BOOL result = _networkThread->Start();
NSAssert(result, @"Failed to start network thread.");
_workerThread = rtc::Thread::Create();
_workerThread->SetName("worker_thread", _workerThread.get());
result = _workerThread->Start();
NSAssert(result, @"Failed to start worker thread.");
_signalingThread = rtc::Thread::Create();
_signalingThread->SetName("signaling_thread", _signalingThread.get());
result = _signalingThread->Start();
NSAssert(result, @"Failed to start signaling thread.");
}
return self;
}
- (instancetype)initWithNoMedia {
if (self = [self initNative]) {
_nativeFactory = webrtc::CreateModularPeerConnectionFactory(
_networkThread.get(),
_workerThread.get(),
_signalingThread.get(),
std::unique_ptr<cricket::MediaEngineInterface>(),
std::unique_ptr<webrtc::CallFactoryInterface>(),
std::unique_ptr<webrtc::RtcEventLogFactoryInterface>());
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
}
return self;
}
- (instancetype)initWithNativeAudioEncoderFactory:
(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory
nativeAudioDecoderFactory:
(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory
nativeVideoEncoderFactory:
(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory
nativeVideoDecoderFactory:
(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory
audioDeviceModule:
(nullable webrtc::AudioDeviceModule *)audioDeviceModule
audioProcessingModule:
(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
#ifdef HAVE_NO_MEDIA
return [self initWithNoMedia];
#else
if (self = [self initNative]) {
_nativeFactory = webrtc::CreatePeerConnectionFactory(_networkThread.get(),
_workerThread.get(),
_signalingThread.get(),
audioDeviceModule,
audioEncoderFactory,
audioDecoderFactory,
std::move(videoEncoderFactory),
std::move(videoDecoderFactory),
nullptr, // audio mixer
audioProcessingModule);
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
}
return self;
#endif
}
- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints {
std::unique_ptr<webrtc::MediaConstraints> nativeConstraints;
if (constraints) {
nativeConstraints = constraints.nativeConstraints;
}
cricket::AudioOptions options;
CopyConstraintsIntoAudioOptions(nativeConstraints.get(), &options);
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
_nativeFactory->CreateAudioSource(options);
return [[RTCAudioSource alloc] initWithFactory:self nativeAudioSource:source];
}
- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
RTCAudioSource *audioSource = [self audioSourceWithConstraints:nil];
return [self audioTrackWithSource:audioSource trackId:trackId];
}
- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source
trackId:(NSString *)trackId {
return [[RTCAudioTrack alloc] initWithFactory:self
source:source
trackId:trackId];
}
- (RTCVideoSource *)videoSource {
return [[RTCVideoSource alloc] initWithFactory:self
signalingThread:_signalingThread.get()
workerThread:_workerThread.get()];
}
- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source
trackId:(NSString *)trackId {
return [[RTCVideoTrack alloc] initWithFactory:self
source:source
trackId:trackId];
}
- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId {
return [[RTCMediaStream alloc] initWithFactory:self
streamId:streamId];
}
- (RTCPeerConnection *)peerConnectionWithConfiguration:
(RTCConfiguration *)configuration
constraints:
(RTCMediaConstraints *)constraints
delegate:
(nullable id<RTCPeerConnectionDelegate>)delegate {
return [[RTCPeerConnection alloc] initWithFactory:self
configuration:configuration
constraints:constraints
delegate:delegate];
}
- (void)setOptions:(nonnull RTCPeerConnectionFactoryOptions *)options {
RTC_DCHECK(options != nil);
_nativeFactory->SetOptions(options.nativeOptions);
}
- (BOOL)startAecDumpWithFilePath:(NSString *)filePath
maxSizeInBytes:(int64_t)maxSizeInBytes {
RTC_DCHECK(filePath.length);
RTC_DCHECK_GT(maxSizeInBytes, 0);
if (_hasStartedAecDump) {
RTCLogError(@"Aec dump already started.");
return NO;
}
int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR);
if (fd < 0) {
RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
return NO;
}
_hasStartedAecDump = _nativeFactory->StartAecDump(fd, maxSizeInBytes);
return _hasStartedAecDump;
}
- (void)stopAecDump {
_nativeFactory->StopAecDump();
_hasStartedAecDump = NO;
}
@end

View File

@ -1,21 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnectionFactoryBuilder.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCPeerConnectionFactoryBuilder (DefaultComponents)
+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,48 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnectionFactory+Native.h"
#import "RTCPeerConnectionFactoryBuilder+DefaultComponents.h"
#import "WebRTC/RTCVideoCodecH264.h"
#include "api/audio_codecs/builtin_audio_decoder_factory.h"
#include "api/audio_codecs/builtin_audio_encoder_factory.h"
#include "sdk/objc/Framework/Native/api/video_decoder_factory.h"
#include "sdk/objc/Framework/Native/api/video_encoder_factory.h"
#if defined(WEBRTC_IOS)
#import "sdk/objc/Framework/Native/api/audio_device_module.h"
#endif
@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents)
+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder {
RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init];
auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory();
[builder setAudioEncoderFactory:audioEncoderFactory];
auto audioDecoderFactory = webrtc::CreateBuiltinAudioDecoderFactory();
[builder setAudioDecoderFactory:audioDecoderFactory];
auto videoEncoderFactory =
webrtc::ObjCToNativeVideoEncoderFactory([[RTCVideoEncoderFactoryH264 alloc] init]);
[builder setVideoEncoderFactory:std::move(videoEncoderFactory)];
auto videoDecoderFactory =
webrtc::ObjCToNativeVideoDecoderFactory([[RTCVideoDecoderFactoryH264 alloc] init]);
[builder setVideoDecoderFactory:std::move(videoDecoderFactory)];
#if defined(WEBRTC_IOS)
[builder setAudioDeviceModule:webrtc::CreateAudioDeviceModule()];
#endif
return builder;
}
@end

View File

@ -1,48 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnectionFactory.h"
#include "rtc_base/scoped_ref_ptr.h"
namespace webrtc {
class AudioDeviceModule;
class AudioEncoderFactory;
class AudioDecoderFactory;
class VideoEncoderFactory;
class VideoDecoderFactory;
class AudioProcessing;
} // namespace webrtc
NS_ASSUME_NONNULL_BEGIN
@interface RTCPeerConnectionFactoryBuilder : NSObject
+ (RTCPeerConnectionFactoryBuilder *)builder;
- (RTCPeerConnectionFactory *)createPeerConnectionFactory;
- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory;
- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory;
- (void)setAudioEncoderFactory:(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory;
- (void)setAudioDecoderFactory:(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory;
- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule;
- (void)setAudioProcessingModule:(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,71 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnectionFactoryBuilder.h"
#import "RTCPeerConnectionFactory+Native.h"
#include "api/audio_codecs/audio_decoder_factory.h"
#include "api/audio_codecs/audio_encoder_factory.h"
#include "api/video_codecs/video_decoder_factory.h"
#include "api/video_codecs/video_encoder_factory.h"
#include "modules/audio_device/include/audio_device.h"
#include "modules/audio_processing/include/audio_processing.h"
@implementation RTCPeerConnectionFactoryBuilder {
std::unique_ptr<webrtc::VideoEncoderFactory> _videoEncoderFactory;
std::unique_ptr<webrtc::VideoDecoderFactory> _videoDecoderFactory;
rtc::scoped_refptr<webrtc::AudioEncoderFactory> _audioEncoderFactory;
rtc::scoped_refptr<webrtc::AudioDecoderFactory> _audioDecoderFactory;
rtc::scoped_refptr<webrtc::AudioDeviceModule> _audioDeviceModule;
rtc::scoped_refptr<webrtc::AudioProcessing> _audioProcessingModule;
}
+ (RTCPeerConnectionFactoryBuilder *)builder {
return [[RTCPeerConnectionFactoryBuilder alloc] init];
}
- (RTCPeerConnectionFactory *)createPeerConnectionFactory {
RTCPeerConnectionFactory *factory = [RTCPeerConnectionFactory alloc];
return [factory initWithNativeAudioEncoderFactory:_audioEncoderFactory
nativeAudioDecoderFactory:_audioDecoderFactory
nativeVideoEncoderFactory:std::move(_videoEncoderFactory)
nativeVideoDecoderFactory:std::move(_videoDecoderFactory)
audioDeviceModule:_audioDeviceModule
audioProcessingModule:_audioProcessingModule];
}
- (void)setVideoEncoderFactory:(std::unique_ptr<webrtc::VideoEncoderFactory>)videoEncoderFactory {
_videoEncoderFactory = std::move(videoEncoderFactory);
}
- (void)setVideoDecoderFactory:(std::unique_ptr<webrtc::VideoDecoderFactory>)videoDecoderFactory {
_videoDecoderFactory = std::move(videoDecoderFactory);
}
- (void)setAudioEncoderFactory:
(rtc::scoped_refptr<webrtc::AudioEncoderFactory>)audioEncoderFactory {
_audioEncoderFactory = audioEncoderFactory;
}
- (void)setAudioDecoderFactory:
(rtc::scoped_refptr<webrtc::AudioDecoderFactory>)audioDecoderFactory {
_audioDecoderFactory = audioDecoderFactory;
}
- (void)setAudioDeviceModule:(rtc::scoped_refptr<webrtc::AudioDeviceModule>)audioDeviceModule {
_audioDeviceModule = audioDeviceModule;
}
- (void)setAudioProcessingModule:
(rtc::scoped_refptr<webrtc::AudioProcessing>)audioProcessingModule {
_audioProcessingModule = audioProcessingModule;
}
@end

View File

@ -1,26 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnectionFactoryOptions.h"
#include "api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCPeerConnectionFactoryOptions ()
/** Returns the equivalent native PeerConnectionFactoryInterface::Options
* structure. */
@property(nonatomic, readonly)
webrtc::PeerConnectionFactoryInterface::Options nativeOptions;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,61 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnectionFactoryOptions+Private.h"
#include "rtc_base/network_constants.h"
namespace {
void setNetworkBit(webrtc::PeerConnectionFactoryInterface::Options* options,
rtc::AdapterType type,
bool ignore) {
if (ignore) {
options->network_ignore_mask |= type;
} else {
options->network_ignore_mask &= ~type;
}
}
} // namespace
@implementation RTCPeerConnectionFactoryOptions
@synthesize disableEncryption = _disableEncryption;
@synthesize disableNetworkMonitor = _disableNetworkMonitor;
@synthesize ignoreLoopbackNetworkAdapter = _ignoreLoopbackNetworkAdapter;
@synthesize ignoreVPNNetworkAdapter = _ignoreVPNNetworkAdapter;
@synthesize ignoreCellularNetworkAdapter = _ignoreCellularNetworkAdapter;
@synthesize ignoreWiFiNetworkAdapter = _ignoreWiFiNetworkAdapter;
@synthesize ignoreEthernetNetworkAdapter = _ignoreEthernetNetworkAdapter;
@synthesize enableAes128Sha1_32CryptoCipher = _enableAes128Sha1_32CryptoCipher;
@synthesize enableGcmCryptoSuites = _enableGcmCryptoSuites;
- (instancetype)init {
return [super init];
}
- (webrtc::PeerConnectionFactoryInterface::Options)nativeOptions {
webrtc::PeerConnectionFactoryInterface::Options options;
options.disable_encryption = self.disableEncryption;
options.disable_network_monitor = self.disableNetworkMonitor;
setNetworkBit(&options, rtc::ADAPTER_TYPE_LOOPBACK, self.ignoreLoopbackNetworkAdapter);
setNetworkBit(&options, rtc::ADAPTER_TYPE_VPN, self.ignoreVPNNetworkAdapter);
setNetworkBit(&options, rtc::ADAPTER_TYPE_CELLULAR, self.ignoreCellularNetworkAdapter);
setNetworkBit(&options, rtc::ADAPTER_TYPE_WIFI, self.ignoreWiFiNetworkAdapter);
setNetworkBit(&options, rtc::ADAPTER_TYPE_ETHERNET, self.ignoreEthernetNetworkAdapter);
options.crypto_options.enable_aes128_sha1_32_crypto_cipher = self.enableAes128Sha1_32CryptoCipher;
options.crypto_options.enable_gcm_crypto_suites = self.enableGcmCryptoSuites;
return options;
}
@end

View File

@ -1,27 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtcpParameters.h"
#include "api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtcpParameters ()
/** Returns the equivalent native RtcpParameters structure. */
@property(nonatomic, readonly) webrtc::RtcpParameters nativeParameters;
/** Initialize the object with a native RtcpParameters structure. */
- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,39 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtcpParameters+Private.h"
#import "NSString+StdString.h"
@implementation RTCRtcpParameters
@synthesize cname = _cname;
@synthesize isReducedSize = _isReducedSize;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:(const webrtc::RtcpParameters &)nativeParameters {
if (self = [self init]) {
_cname = [NSString stringForStdString:nativeParameters.cname];
_isReducedSize = nativeParameters.reduced_size;
}
return self;
}
- (webrtc::RtcpParameters)nativeParameters {
webrtc::RtcpParameters parameters;
parameters.cname = [NSString stdStringForString:_cname];
parameters.reduced_size = _isReducedSize;
return parameters;
}
@end

View File

@ -1,27 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpCodecParameters.h"
#include "api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpCodecParameters ()
/** Returns the equivalent native RtpCodecParameters structure. */
@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
/** Initialize the object with a native RtpCodecParameters structure. */
- (instancetype)initWithNativeParameters:(const webrtc::RtpCodecParameters &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,109 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpCodecParameters+Private.h"
#import "NSString+StdString.h"
#import "WebRTC/RTCMediaStreamTrack.h" // For "kind" strings.
#include "media/base/mediaconstants.h"
#include "rtc_base/checks.h"
const NSString * const kRTCRtxCodecName = @(cricket::kRtxCodecName);
const NSString * const kRTCRedCodecName = @(cricket::kRedCodecName);
const NSString * const kRTCUlpfecCodecName = @(cricket::kUlpfecCodecName);
const NSString * const kRTCFlexfecCodecName = @(cricket::kFlexfecCodecName);
const NSString * const kRTCOpusCodecName = @(cricket::kOpusCodecName);
const NSString * const kRTCIsacCodecName = @(cricket::kIsacCodecName);
const NSString * const kRTCL16CodecName = @(cricket::kL16CodecName);
const NSString * const kRTCG722CodecName = @(cricket::kG722CodecName);
const NSString * const kRTCIlbcCodecName = @(cricket::kIlbcCodecName);
const NSString * const kRTCPcmuCodecName = @(cricket::kPcmuCodecName);
const NSString * const kRTCPcmaCodecName = @(cricket::kPcmaCodecName);
const NSString * const kRTCDtmfCodecName = @(cricket::kDtmfCodecName);
const NSString * const kRTCComfortNoiseCodecName =
@(cricket::kComfortNoiseCodecName);
const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName);
const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName);
const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName);
@implementation RTCRtpCodecParameters
@synthesize payloadType = _payloadType;
@synthesize name = _name;
@synthesize kind = _kind;
@synthesize clockRate = _clockRate;
@synthesize numChannels = _numChannels;
@synthesize parameters = _parameters;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:
(const webrtc::RtpCodecParameters &)nativeParameters {
if (self = [self init]) {
_payloadType = nativeParameters.payload_type;
_name = [NSString stringForStdString:nativeParameters.name];
switch (nativeParameters.kind) {
case cricket::MEDIA_TYPE_AUDIO:
_kind = kRTCMediaStreamTrackKindAudio;
break;
case cricket::MEDIA_TYPE_VIDEO:
_kind = kRTCMediaStreamTrackKindVideo;
break;
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
break;
}
if (nativeParameters.clock_rate) {
_clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate];
}
if (nativeParameters.num_channels) {
_numChannels = [NSNumber numberWithInt:*nativeParameters.num_channels];
}
NSMutableDictionary *parameters = [NSMutableDictionary dictionary];
for (const auto &parameter : nativeParameters.parameters) {
[parameters setObject:[NSString stringForStdString:parameter.second]
forKey:[NSString stringForStdString:parameter.first]];
}
_parameters = parameters;
}
return self;
}
- (webrtc::RtpCodecParameters)nativeParameters {
webrtc::RtpCodecParameters parameters;
parameters.payload_type = _payloadType;
parameters.name = [NSString stdStringForString:_name];
// NSString pointer comparison is safe here since "kind" is readonly and only
// populated above.
if (_kind == kRTCMediaStreamTrackKindAudio) {
parameters.kind = cricket::MEDIA_TYPE_AUDIO;
} else if (_kind == kRTCMediaStreamTrackKindVideo) {
parameters.kind = cricket::MEDIA_TYPE_VIDEO;
} else {
RTC_NOTREACHED();
}
if (_clockRate != nil) {
parameters.clock_rate = absl::optional<int>(_clockRate.intValue);
}
if (_numChannels != nil) {
parameters.num_channels = absl::optional<int>(_numChannels.intValue);
}
for (NSString *paramKey in _parameters.allKeys) {
std::string key = [NSString stdStringForString:paramKey];
std::string value = [NSString stdStringForString:_parameters[paramKey]];
parameters.parameters[key] = value;
}
return parameters;
}
@end

View File

@ -1,27 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpEncodingParameters.h"
#include "api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpEncodingParameters ()
/** Returns the equivalent native RtpEncodingParameters structure. */
@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
/** Initialize the object with a native RtpEncodingParameters structure. */
- (instancetype)initWithNativeParameters:(const webrtc::RtpEncodingParameters &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,58 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpEncodingParameters+Private.h"
@implementation RTCRtpEncodingParameters
@synthesize isActive = _isActive;
@synthesize maxBitrateBps = _maxBitrateBps;
@synthesize minBitrateBps = _minBitrateBps;
@synthesize ssrc = _ssrc;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:
(const webrtc::RtpEncodingParameters &)nativeParameters {
if (self = [self init]) {
_isActive = nativeParameters.active;
if (nativeParameters.max_bitrate_bps) {
_maxBitrateBps =
[NSNumber numberWithInt:*nativeParameters.max_bitrate_bps];
}
if (nativeParameters.min_bitrate_bps) {
_minBitrateBps =
[NSNumber numberWithInt:*nativeParameters.min_bitrate_bps];
}
if (nativeParameters.ssrc) {
_ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc];
}
}
return self;
}
- (webrtc::RtpEncodingParameters)nativeParameters {
webrtc::RtpEncodingParameters parameters;
parameters.active = _isActive;
if (_maxBitrateBps != nil) {
parameters.max_bitrate_bps = absl::optional<int>(_maxBitrateBps.intValue);
}
if (_minBitrateBps != nil) {
parameters.min_bitrate_bps = absl::optional<int>(_minBitrateBps.intValue);
}
if (_ssrc != nil) {
parameters.ssrc = absl::optional<uint32_t>(_ssrc.unsignedLongValue);
}
return parameters;
}
@end

View File

@ -1,62 +0,0 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoCodec.h"
#include "modules/include/module_common_types.h"
@implementation RTCRtpFragmentationHeader
@synthesize fragmentationOffset = _fragmentationOffset;
@synthesize fragmentationLength = _fragmentationLength;
@synthesize fragmentationTimeDiff = _fragmentationTimeDiff;
@synthesize fragmentationPlType = _fragmentationPlType;
- (instancetype)initWithNativeFragmentationHeader:
(const webrtc::RTPFragmentationHeader *)fragmentationHeader {
if (self = [super init]) {
if (fragmentationHeader) {
int count = fragmentationHeader->fragmentationVectorSize;
NSMutableArray *offsets = [NSMutableArray array];
NSMutableArray *lengths = [NSMutableArray array];
NSMutableArray *timeDiffs = [NSMutableArray array];
NSMutableArray *plTypes = [NSMutableArray array];
for (int i = 0; i < count; ++i) {
[offsets addObject:@(fragmentationHeader->fragmentationOffset[i])];
[lengths addObject:@(fragmentationHeader->fragmentationLength[i])];
[timeDiffs addObject:@(fragmentationHeader->fragmentationTimeDiff[i])];
[plTypes addObject:@(fragmentationHeader->fragmentationPlType[i])];
}
_fragmentationOffset = [offsets copy];
_fragmentationLength = [lengths copy];
_fragmentationTimeDiff = [timeDiffs copy];
_fragmentationPlType = [plTypes copy];
}
}
return self;
}
- (std::unique_ptr<webrtc::RTPFragmentationHeader>)createNativeFragmentationHeader {
auto fragmentationHeader =
std::unique_ptr<webrtc::RTPFragmentationHeader>(new webrtc::RTPFragmentationHeader);
fragmentationHeader->VerifyAndAllocateFragmentationHeader(_fragmentationOffset.count);
for (NSUInteger i = 0; i < _fragmentationOffset.count; ++i) {
fragmentationHeader->fragmentationOffset[i] = (size_t)_fragmentationOffset[i].unsignedIntValue;
fragmentationHeader->fragmentationLength[i] = (size_t)_fragmentationLength[i].unsignedIntValue;
fragmentationHeader->fragmentationTimeDiff[i] =
(uint16_t)_fragmentationOffset[i].unsignedIntValue;
fragmentationHeader->fragmentationPlType[i] = (uint8_t)_fragmentationOffset[i].unsignedIntValue;
}
return fragmentationHeader;
}
@end

View File

@ -1,27 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpHeaderExtension.h"
#include "api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpHeaderExtension ()
/** Returns the equivalent native RtpExtension structure. */
@property(nonatomic, readonly) webrtc::RtpExtension nativeParameters;
/** Initialize the object with a native RtpExtension structure. */
- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,42 +0,0 @@
/*
* Copyright 2018 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpHeaderExtension+Private.h"
#import "NSString+StdString.h"
@implementation RTCRtpHeaderExtension
@synthesize uri = _uri;
@synthesize id = _id;
@synthesize encrypted = _encrypted;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:(const webrtc::RtpExtension &)nativeParameters {
if (self = [self init]) {
_uri = [NSString stringForStdString:nativeParameters.uri];
_id = nativeParameters.id;
_encrypted = nativeParameters.encrypt;
}
return self;
}
- (webrtc::RtpExtension)nativeParameters {
webrtc::RtpExtension extension;
extension.uri = [NSString stdStringForString:_uri];
extension.id = _id;
extension.encrypt = _encrypted;
return extension;
}
@end

View File

@ -1,27 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpParameters.h"
#include "api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpParameters ()
/** Returns the equivalent native RtpParameters structure. */
@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
/** Initialize the object with a native RtpParameters structure. */
- (instancetype)initWithNativeParameters:(const webrtc::RtpParameters &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,77 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpParameters+Private.h"
#import "NSString+StdString.h"
#import "RTCRtcpParameters+Private.h"
#import "RTCRtpCodecParameters+Private.h"
#import "RTCRtpEncodingParameters+Private.h"
#import "RTCRtpHeaderExtension+Private.h"
@implementation RTCRtpParameters
@synthesize transactionId = _transactionId;
@synthesize rtcp = _rtcp;
@synthesize headerExtensions = _headerExtensions;
@synthesize encodings = _encodings;
@synthesize codecs = _codecs;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:
(const webrtc::RtpParameters &)nativeParameters {
if (self = [self init]) {
_transactionId = [NSString stringForStdString:nativeParameters.transaction_id];
_rtcp = [[RTCRtcpParameters alloc] initWithNativeParameters:nativeParameters.rtcp];
NSMutableArray *headerExtensions = [[NSMutableArray alloc] init];
for (const auto &headerExtension : nativeParameters.header_extensions) {
[headerExtensions
addObject:[[RTCRtpHeaderExtension alloc] initWithNativeParameters:headerExtension]];
}
_headerExtensions = headerExtensions;
NSMutableArray *encodings = [[NSMutableArray alloc] init];
for (const auto &encoding : nativeParameters.encodings) {
[encodings addObject:[[RTCRtpEncodingParameters alloc]
initWithNativeParameters:encoding]];
}
_encodings = encodings;
NSMutableArray *codecs = [[NSMutableArray alloc] init];
for (const auto &codec : nativeParameters.codecs) {
[codecs addObject:[[RTCRtpCodecParameters alloc]
initWithNativeParameters:codec]];
}
_codecs = codecs;
}
return self;
}
- (webrtc::RtpParameters)nativeParameters {
webrtc::RtpParameters parameters;
parameters.transaction_id = [NSString stdStringForString:_transactionId];
parameters.rtcp = [_rtcp nativeParameters];
for (RTCRtpHeaderExtension *headerExtension in _headerExtensions) {
parameters.header_extensions.push_back(headerExtension.nativeParameters);
}
for (RTCRtpEncodingParameters *encoding in _encodings) {
parameters.encodings.push_back(encoding.nativeParameters);
}
for (RTCRtpCodecParameters *codec in _codecs) {
parameters.codecs.push_back(codec.nativeParameters);
}
return parameters;
}
@end

View File

@ -1,50 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpReceiver.h"
#include "api/rtpreceiverinterface.h"
NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
namespace webrtc {
class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface {
public:
RtpReceiverDelegateAdapter(RTCRtpReceiver* receiver);
void OnFirstPacketReceived(cricket::MediaType media_type) override;
private:
__weak RTCRtpReceiver* receiver_;
};
} // namespace webrtc
@interface RTCRtpReceiver ()
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
/** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
nativeRtpReceiver:(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
NS_DESIGNATED_INITIALIZER;
+ (RTCRtpMediaType)mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType;
+ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType;
+ (NSString*)stringForMediaType:(RTCRtpMediaType)mediaType;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,154 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpReceiver+Private.h"
#import "NSString+StdString.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCRtpParameters+Private.h"
#import "WebRTC/RTCLogging.h"
#include "api/mediastreaminterface.h"
namespace webrtc {
RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(
RTCRtpReceiver *receiver) {
RTC_CHECK(receiver);
receiver_ = receiver;
}
void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
cricket::MediaType media_type) {
RTCRtpMediaType packet_media_type =
[RTCRtpReceiver mediaTypeForNativeMediaType:media_type];
RTCRtpReceiver *receiver = receiver_;
[receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type];
}
} // namespace webrtc
@implementation RTCRtpReceiver {
RTCPeerConnectionFactory *_factory;
rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
std::unique_ptr<webrtc::RtpReceiverDelegateAdapter> _observer;
}
@synthesize delegate = _delegate;
- (NSString *)receiverId {
return [NSString stringForStdString:_nativeRtpReceiver->id()];
}
- (RTCRtpParameters *)parameters {
return [[RTCRtpParameters alloc]
initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
}
- (void)setParameters:(RTCRtpParameters *)parameters {
if (!_nativeRtpReceiver->SetParameters(parameters.nativeParameters)) {
RTCLogError(@"RTCRtpReceiver(%p): Failed to set parameters: %@", self,
parameters);
}
}
- (nullable RTCMediaStreamTrack *)track {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
_nativeRtpReceiver->track());
if (nativeTrack) {
return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory];
}
return nil;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCRtpReceiver {\n receiverId: %@\n}",
self.receiverId];
}
- (void)dealloc {
if (_nativeRtpReceiver) {
_nativeRtpReceiver->SetObserver(nullptr);
}
}
- (BOOL)isEqual:(id)object {
if (self == object) {
return YES;
}
if (object == nil) {
return NO;
}
if (![object isMemberOfClass:[self class]]) {
return NO;
}
RTCRtpReceiver *receiver = (RTCRtpReceiver *)object;
return _nativeRtpReceiver == receiver.nativeRtpReceiver;
}
- (NSUInteger)hash {
return (NSUInteger)_nativeRtpReceiver.get();
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
return _nativeRtpReceiver;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeRtpReceiver:
(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
if (self = [super init]) {
_factory = factory;
_nativeRtpReceiver = nativeRtpReceiver;
RTCLogInfo(
@"RTCRtpReceiver(%p): created receiver: %@", self, self.description);
_observer.reset(new webrtc::RtpReceiverDelegateAdapter(self));
_nativeRtpReceiver->SetObserver(_observer.get());
}
return self;
}
+ (RTCRtpMediaType)mediaTypeForNativeMediaType:
(cricket::MediaType)nativeMediaType {
switch (nativeMediaType) {
case cricket::MEDIA_TYPE_AUDIO:
return RTCRtpMediaTypeAudio;
case cricket::MEDIA_TYPE_VIDEO:
return RTCRtpMediaTypeVideo;
case cricket::MEDIA_TYPE_DATA:
return RTCRtpMediaTypeData;
}
}
+ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType {
switch (mediaType) {
case RTCRtpMediaTypeAudio:
return cricket::MEDIA_TYPE_AUDIO;
case RTCRtpMediaTypeVideo:
return cricket::MEDIA_TYPE_VIDEO;
case RTCRtpMediaTypeData:
return cricket::MEDIA_TYPE_DATA;
}
}
+ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType {
switch (mediaType) {
case RTCRtpMediaTypeAudio:
return @"AUDIO";
case RTCRtpMediaTypeVideo:
return @"VIDEO";
case RTCRtpMediaTypeData:
return @"DATA";
}
}
@end

View File

@ -1,30 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpSender.h"
#include "api/rtpsenderinterface.h"
NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
@interface RTCRtpSender ()
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory*)factory
nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -1,105 +0,0 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpSender+Private.h"
#import "NSString+StdString.h"
#import "RTCDtmfSender+Private.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCRtpParameters+Private.h"
#import "WebRTC/RTCLogging.h"
#include "api/mediastreaminterface.h"
@implementation RTCRtpSender {
RTCPeerConnectionFactory *_factory;
rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
}
@synthesize dtmfSender = _dtmfSender;
- (NSString *)senderId {
return [NSString stringForStdString:_nativeRtpSender->id()];
}
- (RTCRtpParameters *)parameters {
return [[RTCRtpParameters alloc]
initWithNativeParameters:_nativeRtpSender->GetParameters()];
}
- (void)setParameters:(RTCRtpParameters *)parameters {
if (!_nativeRtpSender->SetParameters(parameters.nativeParameters).ok()) {
RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self,
parameters);
}
}
- (RTCMediaStreamTrack *)track {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
_nativeRtpSender->track());
if (nativeTrack) {
return [RTCMediaStreamTrack mediaTrackForNativeTrack:nativeTrack factory:_factory];
}
return nil;
}
- (void)setTrack:(RTCMediaStreamTrack *)track {
if (!_nativeRtpSender->SetTrack(track.nativeTrack)) {
RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track);
}
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCRtpSender {\n senderId: %@\n}",
self.senderId];
}
- (BOOL)isEqual:(id)object {
if (self == object) {
return YES;
}
if (object == nil) {
return NO;
}
if (![object isMemberOfClass:[self class]]) {
return NO;
}
RTCRtpSender *sender = (RTCRtpSender *)object;
return _nativeRtpSender == sender.nativeRtpSender;
}
- (NSUInteger)hash {
return (NSUInteger)_nativeRtpSender.get();
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
return _nativeRtpSender;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
nativeRtpSender:(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
NSParameterAssert(factory);
NSParameterAssert(nativeRtpSender);
if (self = [super init]) {
_factory = factory;
_nativeRtpSender = nativeRtpSender;
rtc::scoped_refptr<webrtc::DtmfSenderInterface> nativeDtmfSender(
_nativeRtpSender->GetDtmfSender());
if (nativeDtmfSender) {
_dtmfSender = [[RTCDtmfSender alloc] initWithNativeDtmfSender:nativeDtmfSender];
}
RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description);
}
return self;
}
@end

Some files were not shown because too many files have changed in this diff Show More