Split iOS sdk in to separate targets

This CL splits the iOS sdk into separate static libraries for video,
audio, ui, common, and peerconnection-related code. This will in the
future make it easier to compile WebRTC without unneeded components.

BUG=webrtc:4867

Review-Url: https://codereview.webrtc.org/2862543002
Cr-Commit-Position: refs/heads/master@{#18166}
This commit is contained in:
kthelgason
2017-05-16 07:06:59 -07:00
committed by Commit bot
parent 860249ec62
commit 52c83fe710
116 changed files with 328 additions and 272 deletions

View File

@ -0,0 +1,27 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAVFoundationVideoSource.h"
#include "avfoundationvideocapturer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCAVFoundationVideoSource ()
@property(nonatomic, readonly) webrtc::AVFoundationVideoCapturer *capturer;
/** Initialize an RTCAVFoundationVideoSource with constraints. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
constraints:(nullable RTCMediaConstraints *)constraints;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,62 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAVFoundationVideoSource+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCVideoSource+Private.h"
@implementation RTCAVFoundationVideoSource {
webrtc::AVFoundationVideoCapturer *_capturer;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
constraints:(RTCMediaConstraints *)constraints {
NSParameterAssert(factory);
// We pass ownership of the capturer to the source, but since we own
// the source, it should be ok to keep a raw pointer to the
// capturer.
_capturer = new webrtc::AVFoundationVideoCapturer();
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
factory.nativeFactory->CreateVideoSource(
std::unique_ptr<cricket::VideoCapturer>(_capturer),
constraints.nativeConstraints.get());
return [super initWithNativeVideoSource:source];
}
- (void)adaptOutputFormatToWidth:(int)width
height:(int)height
fps:(int)fps {
self.capturer->AdaptOutputFormat(width, height, fps);
}
- (BOOL)canUseBackCamera {
return self.capturer->CanUseBackCamera();
}
- (BOOL)useBackCamera {
return self.capturer->GetUseBackCamera();
}
- (void)setUseBackCamera:(BOOL)useBackCamera {
self.capturer->SetUseBackCamera(useBackCamera);
}
- (AVCaptureSession *)captureSession {
return self.capturer->GetCaptureSession();
}
- (webrtc::AVFoundationVideoCapturer *)capturer {
return _capturer;
}
@end

View File

@ -0,0 +1,32 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAudioSource.h"
#import "RTCMediaSource+Private.h"
@interface RTCAudioSource ()
/**
* The AudioSourceInterface object passed to this RTCAudioSource during
* construction.
*/
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::AudioSourceInterface> nativeAudioSource;
/** Initialize an RTCAudioSource from a native AudioSourceInterface. */
- (instancetype)initWithNativeAudioSource:
(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource
NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithNativeMediaSource:
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
@end

View File

@ -0,0 +1,48 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAudioSource+Private.h"
#include "webrtc/base/checks.h"
@implementation RTCAudioSource {
}
@synthesize volume = _volume;
@synthesize nativeAudioSource = _nativeAudioSource;
- (instancetype)initWithNativeAudioSource:
(rtc::scoped_refptr<webrtc::AudioSourceInterface>)nativeAudioSource {
RTC_DCHECK(nativeAudioSource);
if (self = [super initWithNativeMediaSource:nativeAudioSource
type:RTCMediaSourceTypeAudio]) {
_nativeAudioSource = nativeAudioSource;
}
return self;
}
- (instancetype)initWithNativeMediaSource:
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type {
RTC_NOTREACHED();
return nil;
}
- (NSString *)description {
NSString *stateString = [[self class] stringForState:self.state];
return [NSString stringWithFormat:@"RTCAudioSource( %p ): %@", self, stateString];
}
- (void)setVolume:(double)volume {
_volume = volume;
_nativeAudioSource->SetVolume(volume);
}
@end

View File

@ -0,0 +1,31 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCAudioTrack.h"
#include "webrtc/api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@class RTCPeerConnectionFactory;
@interface RTCAudioTrack ()
/** AudioTrackInterface created or passed in at construction. */
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::AudioTrackInterface> nativeAudioTrack;
/** Initialize an RTCAudioTrack with an id. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
source:(RTCAudioSource *)source
trackId:(NSString *)trackId;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,66 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCAudioTrack+Private.h"
#import "NSString+StdString.h"
#import "RTCAudioSource+Private.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#include "webrtc/base/checks.h"
@implementation RTCAudioTrack
@synthesize source = _source;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
source:(RTCAudioSource *)source
trackId:(NSString *)trackId {
RTC_DCHECK(factory);
RTC_DCHECK(source);
RTC_DCHECK(trackId.length);
std::string nativeId = [NSString stdStringForString:trackId];
rtc::scoped_refptr<webrtc::AudioTrackInterface> track =
factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource);
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeAudio]) {
_source = source;
}
return self;
}
- (instancetype)initWithNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
type:(RTCMediaStreamTrackType)type {
NSParameterAssert(nativeTrack);
NSParameterAssert(type == RTCMediaStreamTrackTypeAudio);
return [super initWithNativeTrack:nativeTrack type:type];
}
- (RTCAudioSource *)source {
if (!_source) {
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
self.nativeAudioTrack->GetSource();
if (source) {
_source = [[RTCAudioSource alloc] initWithNativeAudioSource:source.get()];
}
}
return _source;
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::AudioTrackInterface>)nativeAudioTrack {
return static_cast<webrtc::AudioTrackInterface *>(self.nativeTrack.get());
}
@end

View File

@ -0,0 +1,424 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCLogging.h"
#if TARGET_OS_IPHONE
#import "WebRTC/UIDevice+RTCDevice.h"
#endif
#import "RTCDispatcher+Private.h"
const int64_t kNanosecondsPerSecond = 1000000000;
static inline BOOL IsMediaSubTypeSupported(FourCharCode mediaSubType) {
return (mediaSubType == kCVPixelFormatType_420YpCbCr8PlanarFullRange ||
mediaSubType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange);
}
@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
@end
@implementation RTCCameraVideoCapturer {
AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession;
AVCaptureDevice *_currentDevice;
RTCVideoRotation _rotation;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
// Will the session be running once all asynchronous operations have been completed?
BOOL _willBeRunning;
}
@synthesize frameQueue = _frameQueue;
@synthesize captureSession = _captureSession;
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
if (self = [super initWithDelegate:delegate]) {
// Create the capture session and all relevant inputs and outputs. We need
// to do this in init because the application may want the capture session
// before we start the capturer for e.g. AVCapturePreviewLayer. All objects
// created here are retained until dealloc and never recreated.
if (![self setupCaptureSession]) {
return nil;
}
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
#if TARGET_OS_IPHONE
[center addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
object:nil];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruption:)
name:AVCaptureSessionWasInterruptedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionInterruptionEnded:)
name:AVCaptureSessionInterruptionEndedNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleApplicationDidBecomeActive:)
name:UIApplicationDidBecomeActiveNotification
object:[UIApplication sharedApplication]];
#endif
[center addObserver:self
selector:@selector(handleCaptureSessionRuntimeError:)
name:AVCaptureSessionRuntimeErrorNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStartRunning:)
name:AVCaptureSessionDidStartRunningNotification
object:_captureSession];
[center addObserver:self
selector:@selector(handleCaptureSessionDidStopRunning:)
name:AVCaptureSessionDidStopRunningNotification
object:_captureSession];
}
return self;
}
- (void)dealloc {
NSAssert(
!_willBeRunning,
@"Session was still running in RTCCameraVideoCapturer dealloc. Forgot to call stopCapture?");
[[NSNotificationCenter defaultCenter] removeObserver:self];
}
+ (NSArray<AVCaptureDevice *> *)captureDevices {
return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
}
+ (NSArray<AVCaptureDeviceFormat *> *)supportedFormatsForDevice:(AVCaptureDevice *)device {
NSMutableArray<AVCaptureDeviceFormat *> *eligibleDeviceFormats = [NSMutableArray array];
for (AVCaptureDeviceFormat *format in device.formats) {
// Filter out subTypes that we currently don't support in the stack
FourCharCode mediaSubType = CMFormatDescriptionGetMediaSubType(format.formatDescription);
if (IsMediaSubTypeSupported(mediaSubType)) {
[eligibleDeviceFormats addObject:format];
}
}
return eligibleDeviceFormats;
}
- (void)startCaptureWithDevice:(AVCaptureDevice *)device
format:(AVCaptureDeviceFormat *)format
fps:(NSInteger)fps {
_willBeRunning = true;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("startCaptureWithDevice %@ @ %zd fps", format, fps);
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
#endif
_currentDevice = device;
NSError *error = nil;
if ([_currentDevice lockForConfiguration:&error]) {
[self updateDeviceCaptureFormat:format fps:fps];
} else {
RTCLogError(@"Failed to lock device %@. Error: %@", _currentDevice,
error.userInfo);
return;
}
[self reconfigureCaptureSessionInput];
[self updateOrientation];
[_captureSession startRunning];
[_currentDevice unlockForConfiguration];
_isRunning = true;
}];
}
- (void)stopCapture {
_willBeRunning = false;
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLogInfo("Stop");
_currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
[_captureSession stopRunning];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
#endif
_isRunning = false;
}];
}
#pragma mark iOS notifications
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[self updateOrientation];
}];
}
#endif
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
NSParameterAssert(captureOutput == _videoDataOutput);
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (pixelBuffer == nil) {
return;
}
int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) *
kNanosecondsPerSecond;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithPixelBuffer:pixelBuffer
rotation:_rotation
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
RTCLogError(@"Dropped sample buffer.");
}
#pragma mark - AVCaptureSession notifications
- (void)handleCaptureSessionInterruption:(NSNotification *)notification {
NSString *reasonString = nil;
#if defined(__IPHONE_9_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \
__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_9_0
if ([UIDevice isIOS9OrLater]) {
NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey];
if (reason) {
switch (reason.intValue) {
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableInBackground:
reasonString = @"VideoDeviceNotAvailableInBackground";
break;
case AVCaptureSessionInterruptionReasonAudioDeviceInUseByAnotherClient:
reasonString = @"AudioDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceInUseByAnotherClient:
reasonString = @"VideoDeviceInUseByAnotherClient";
break;
case AVCaptureSessionInterruptionReasonVideoDeviceNotAvailableWithMultipleForegroundApps:
reasonString = @"VideoDeviceNotAvailableWithMultipleForegroundApps";
break;
}
}
}
#endif
RTCLog(@"Capture session interrupted: %@", reasonString);
}
- (void)handleCaptureSessionInterruptionEnded:(NSNotification *)notification {
RTCLog(@"Capture session interruption ended.");
}
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
NSError *error = [notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error);
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
#if TARGET_OS_IPHONE
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
#else
[self handleFatalError];
#endif
}];
}
- (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification {
RTCLog(@"Capture session started.");
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
// If we successfully restarted after an unknown error,
// allow future retries on fatal errors.
_hasRetriedOnFatalError = NO;
}];
}
- (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification {
RTCLog(@"Capture session stopped.");
}
- (void)handleFatalError {
[RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (!_hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error.");
[self handleNonFatalError];
_hasRetriedOnFatalError = YES;
} else {
RTCLogError(@"Previous fatal error recovery failed.");
}
}];
}
- (void)handleNonFatalError {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
RTCLog(@"Restarting capture session after error.");
if (_isRunning) {
[_captureSession startRunning];
}
}];
}
#if TARGET_OS_IPHONE
#pragma mark - UIApplication notifications
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
if (_isRunning && !_captureSession.isRunning) {
RTCLog(@"Restarting capture session on active.");
[_captureSession startRunning];
}
}];
}
#endif // TARGET_OS_IPHONE
#pragma mark - Private
- (dispatch_queue_t)frameQueue {
if (!_frameQueue) {
_frameQueue =
dispatch_queue_create("org.webrtc.avfoundationvideocapturer.video", DISPATCH_QUEUE_SERIAL);
dispatch_set_target_queue(_frameQueue,
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0));
}
return _frameQueue;
}
- (BOOL)setupCaptureSession {
NSAssert(_captureSession == nil, @"Setup capture session called twice.");
_captureSession = [[AVCaptureSession alloc] init];
#if defined(WEBRTC_IOS)
_captureSession.sessionPreset = AVCaptureSessionPresetInputPriority;
_captureSession.usesApplicationAudioSession = NO;
#endif
[self setupVideoDataOutput];
// Add the output.
if (![_captureSession canAddOutput:_videoDataOutput]) {
RTCLogError(@"Video data output unsupported.");
return NO;
}
[_captureSession addOutput:_videoDataOutput];
return YES;
}
- (void)setupVideoDataOutput {
NSAssert(_videoDataOutput == nil, @"Setup video data output called twice.");
// Make the capturer output NV12. Ideally we want I420 but that's not
// currently supported on iPhone / iPad.
AVCaptureVideoDataOutput *videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
videoDataOutput.videoSettings = @{
(NSString *)
// TODO(denicija): Remove this color conversion and use the original capture format directly.
kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
};
videoDataOutput.alwaysDiscardsLateVideoFrames = NO;
[videoDataOutput setSampleBufferDelegate:self queue:self.frameQueue];
_videoDataOutput = videoDataOutput;
}
#pragma mark - Private, called inside capture queue
- (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateDeviceCaptureFormat must be called on the capture queue.");
@try {
_currentDevice.activeFormat = format;
_currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps);
} @catch (NSException *exception) {
RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo);
return;
}
}
- (void)reconfigureCaptureSessionInput {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"reconfigureCaptureSessionInput must be called on the capture queue.");
NSError *error = nil;
AVCaptureDeviceInput *input =
[AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error];
if (!input) {
RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription);
return;
}
[_captureSession beginConfiguration];
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
[_captureSession removeInput:oldInput];
}
if ([_captureSession canAddInput:input]) {
[_captureSession addInput:input];
} else {
RTCLogError(@"Cannot add camera as an input to the session.");
}
[_captureSession commitConfiguration];
}
- (void)updateOrientation {
NSAssert([RTCDispatcher isOnQueueForType:RTCDispatcherTypeCaptureSession],
@"updateOrientation must be called on the capture queue.");
#if TARGET_OS_IPHONE
BOOL usingFrontCamera = _currentDevice.position == AVCaptureDevicePositionFront;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationPortrait:
_rotation = RTCVideoRotation_90;
break;
case UIDeviceOrientationPortraitUpsideDown:
_rotation = RTCVideoRotation_270;
break;
case UIDeviceOrientationLandscapeLeft:
_rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0;
break;
case UIDeviceOrientationLandscapeRight:
_rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180;
break;
case UIDeviceOrientationFaceUp:
case UIDeviceOrientationFaceDown:
case UIDeviceOrientationUnknown:
// Ignore.
break;
}
#endif
}
@end

View File

@ -0,0 +1,73 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCConfiguration.h"
#include "webrtc/api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCConfiguration ()
+ (webrtc::PeerConnectionInterface::IceTransportsType)
nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy;
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType;
+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy;
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
(RTCBundlePolicy)policy;
+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy;
+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy;
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
(RTCRtcpMuxPolicy)policy;
+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy;
+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy;
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy;
+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy;
+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy;
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy;
+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy;
+ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy;
+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTCEncryptionKeyType)keyType;
/**
* RTCConfiguration struct representation of this RTCConfiguration. This is
* needed to pass to the underlying C++ APIs.
*/
- (webrtc::PeerConnectionInterface::RTCConfiguration *)
createNativeConfiguration;
- (instancetype)initWithNativeConfiguration:
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,371 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCConfiguration+Private.h"
#include <memory>
#import "RTCIceServer+Private.h"
#import "WebRTC/RTCLogging.h"
#include "webrtc/base/rtccertificategenerator.h"
#include "webrtc/base/sslidentity.h"
@implementation RTCConfiguration
@synthesize iceServers = _iceServers;
@synthesize iceTransportPolicy = _iceTransportPolicy;
@synthesize bundlePolicy = _bundlePolicy;
@synthesize rtcpMuxPolicy = _rtcpMuxPolicy;
@synthesize tcpCandidatePolicy = _tcpCandidatePolicy;
@synthesize candidateNetworkPolicy = _candidateNetworkPolicy;
@synthesize continualGatheringPolicy = _continualGatheringPolicy;
@synthesize audioJitterBufferMaxPackets = _audioJitterBufferMaxPackets;
@synthesize audioJitterBufferFastAccelerate = _audioJitterBufferFastAccelerate;
@synthesize iceConnectionReceivingTimeout = _iceConnectionReceivingTimeout;
@synthesize iceBackupCandidatePairPingInterval =
_iceBackupCandidatePairPingInterval;
@synthesize keyType = _keyType;
@synthesize iceCandidatePoolSize = _iceCandidatePoolSize;
@synthesize shouldPruneTurnPorts = _shouldPruneTurnPorts;
@synthesize shouldPresumeWritableWhenFullyRelayed =
_shouldPresumeWritableWhenFullyRelayed;
@synthesize iceCheckMinInterval = _iceCheckMinInterval;
- (instancetype)init {
// Copy defaults.
webrtc::PeerConnectionInterface::RTCConfiguration config(
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive);
return [self initWithNativeConfiguration:config];
}
- (instancetype)initWithNativeConfiguration:
(const webrtc::PeerConnectionInterface::RTCConfiguration &)config {
if (self = [super init]) {
NSMutableArray *iceServers = [NSMutableArray array];
for (const webrtc::PeerConnectionInterface::IceServer& server : config.servers) {
RTCIceServer *iceServer = [[RTCIceServer alloc] initWithNativeServer:server];
[iceServers addObject:iceServer];
}
_iceServers = iceServers;
_iceTransportPolicy =
[[self class] transportPolicyForTransportsType:config.type];
_bundlePolicy =
[[self class] bundlePolicyForNativePolicy:config.bundle_policy];
_rtcpMuxPolicy =
[[self class] rtcpMuxPolicyForNativePolicy:config.rtcp_mux_policy];
_tcpCandidatePolicy = [[self class] tcpCandidatePolicyForNativePolicy:
config.tcp_candidate_policy];
_candidateNetworkPolicy = [[self class]
candidateNetworkPolicyForNativePolicy:config.candidate_network_policy];
webrtc::PeerConnectionInterface::ContinualGatheringPolicy nativePolicy =
config.continual_gathering_policy;
_continualGatheringPolicy =
[[self class] continualGatheringPolicyForNativePolicy:nativePolicy];
_audioJitterBufferMaxPackets = config.audio_jitter_buffer_max_packets;
_audioJitterBufferFastAccelerate = config.audio_jitter_buffer_fast_accelerate;
_iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout;
_iceBackupCandidatePairPingInterval =
config.ice_backup_candidate_pair_ping_interval;
_keyType = RTCEncryptionKeyTypeECDSA;
_iceCandidatePoolSize = config.ice_candidate_pool_size;
_shouldPruneTurnPorts = config.prune_turn_ports;
_shouldPresumeWritableWhenFullyRelayed =
config.presume_writable_when_fully_relayed;
if (config.ice_check_min_interval) {
_iceCheckMinInterval =
[NSNumber numberWithInt:*config.ice_check_min_interval];
}
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:
@"RTCConfiguration: {\n%@\n%@\n%@\n%@\n%@\n%@\n%@\n%d\n%d\n%d\n%d\n%d\n%d\n%d\n%@\n}\n",
_iceServers,
[[self class] stringForTransportPolicy:_iceTransportPolicy],
[[self class] stringForBundlePolicy:_bundlePolicy],
[[self class] stringForRtcpMuxPolicy:_rtcpMuxPolicy],
[[self class] stringForTcpCandidatePolicy:_tcpCandidatePolicy],
[[self class] stringForCandidateNetworkPolicy:_candidateNetworkPolicy],
[[self class]
stringForContinualGatheringPolicy:_continualGatheringPolicy],
_audioJitterBufferMaxPackets,
_audioJitterBufferFastAccelerate,
_iceConnectionReceivingTimeout,
_iceBackupCandidatePairPingInterval,
_iceCandidatePoolSize,
_shouldPruneTurnPorts,
_shouldPresumeWritableWhenFullyRelayed,
_iceCheckMinInterval];
}
#pragma mark - Private
- (webrtc::PeerConnectionInterface::RTCConfiguration *)
createNativeConfiguration {
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration>
nativeConfig(new webrtc::PeerConnectionInterface::RTCConfiguration(
webrtc::PeerConnectionInterface::RTCConfigurationType::kAggressive));
for (RTCIceServer *iceServer in _iceServers) {
nativeConfig->servers.push_back(iceServer.nativeServer);
}
nativeConfig->type =
[[self class] nativeTransportsTypeForTransportPolicy:_iceTransportPolicy];
nativeConfig->bundle_policy =
[[self class] nativeBundlePolicyForPolicy:_bundlePolicy];
nativeConfig->rtcp_mux_policy =
[[self class] nativeRtcpMuxPolicyForPolicy:_rtcpMuxPolicy];
nativeConfig->tcp_candidate_policy =
[[self class] nativeTcpCandidatePolicyForPolicy:_tcpCandidatePolicy];
nativeConfig->candidate_network_policy = [[self class]
nativeCandidateNetworkPolicyForPolicy:_candidateNetworkPolicy];
nativeConfig->continual_gathering_policy = [[self class]
nativeContinualGatheringPolicyForPolicy:_continualGatheringPolicy];
nativeConfig->audio_jitter_buffer_max_packets = _audioJitterBufferMaxPackets;
nativeConfig->audio_jitter_buffer_fast_accelerate =
_audioJitterBufferFastAccelerate ? true : false;
nativeConfig->ice_connection_receiving_timeout =
_iceConnectionReceivingTimeout;
nativeConfig->ice_backup_candidate_pair_ping_interval =
_iceBackupCandidatePairPingInterval;
rtc::KeyType keyType =
[[self class] nativeEncryptionKeyTypeForKeyType:_keyType];
// Generate non-default certificate.
if (keyType != rtc::KT_DEFAULT) {
rtc::scoped_refptr<rtc::RTCCertificate> certificate =
rtc::RTCCertificateGenerator::GenerateCertificate(
rtc::KeyParams(keyType), rtc::Optional<uint64_t>());
if (!certificate) {
RTCLogError(@"Failed to generate certificate.");
return nullptr;
}
nativeConfig->certificates.push_back(certificate);
}
nativeConfig->ice_candidate_pool_size = _iceCandidatePoolSize;
nativeConfig->prune_turn_ports = _shouldPruneTurnPorts ? true : false;
nativeConfig->presume_writable_when_fully_relayed =
_shouldPresumeWritableWhenFullyRelayed ? true : false;
if (_iceCheckMinInterval != nil) {
nativeConfig->ice_check_min_interval =
rtc::Optional<int>(_iceCheckMinInterval.intValue);
}
return nativeConfig.release();
}
+ (webrtc::PeerConnectionInterface::IceTransportsType)
nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy {
switch (policy) {
case RTCIceTransportPolicyNone:
return webrtc::PeerConnectionInterface::kNone;
case RTCIceTransportPolicyRelay:
return webrtc::PeerConnectionInterface::kRelay;
case RTCIceTransportPolicyNoHost:
return webrtc::PeerConnectionInterface::kNoHost;
case RTCIceTransportPolicyAll:
return webrtc::PeerConnectionInterface::kAll;
}
}
+ (RTCIceTransportPolicy)transportPolicyForTransportsType:
(webrtc::PeerConnectionInterface::IceTransportsType)nativeType {
switch (nativeType) {
case webrtc::PeerConnectionInterface::kNone:
return RTCIceTransportPolicyNone;
case webrtc::PeerConnectionInterface::kRelay:
return RTCIceTransportPolicyRelay;
case webrtc::PeerConnectionInterface::kNoHost:
return RTCIceTransportPolicyNoHost;
case webrtc::PeerConnectionInterface::kAll:
return RTCIceTransportPolicyAll;
}
}
+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy {
switch (policy) {
case RTCIceTransportPolicyNone:
return @"NONE";
case RTCIceTransportPolicyRelay:
return @"RELAY";
case RTCIceTransportPolicyNoHost:
return @"NO_HOST";
case RTCIceTransportPolicyAll:
return @"ALL";
}
}
+ (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy:
(RTCBundlePolicy)policy {
switch (policy) {
case RTCBundlePolicyBalanced:
return webrtc::PeerConnectionInterface::kBundlePolicyBalanced;
case RTCBundlePolicyMaxCompat:
return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat;
case RTCBundlePolicyMaxBundle:
return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle;
}
}
+ (RTCBundlePolicy)bundlePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kBundlePolicyBalanced:
return RTCBundlePolicyBalanced;
case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat:
return RTCBundlePolicyMaxCompat;
case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle:
return RTCBundlePolicyMaxBundle;
}
}
+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy {
switch (policy) {
case RTCBundlePolicyBalanced:
return @"BALANCED";
case RTCBundlePolicyMaxCompat:
return @"MAX_COMPAT";
case RTCBundlePolicyMaxBundle:
return @"MAX_BUNDLE";
}
}
+ (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy:
(RTCRtcpMuxPolicy)policy {
switch (policy) {
case RTCRtcpMuxPolicyNegotiate:
return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate;
case RTCRtcpMuxPolicyRequire:
return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire;
}
}
+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate:
return RTCRtcpMuxPolicyNegotiate;
case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire:
return RTCRtcpMuxPolicyRequire;
}
}
+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy {
switch (policy) {
case RTCRtcpMuxPolicyNegotiate:
return @"NEGOTIATE";
case RTCRtcpMuxPolicyRequire:
return @"REQUIRE";
}
}
+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)
nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy {
switch (policy) {
case RTCTcpCandidatePolicyEnabled:
return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled;
case RTCTcpCandidatePolicyDisabled:
return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled;
}
}
+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)
nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy {
switch (policy) {
case RTCCandidateNetworkPolicyAll:
return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll;
case RTCCandidateNetworkPolicyLowCost:
return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost;
}
}
+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy:
(webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled:
return RTCTcpCandidatePolicyEnabled;
case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled:
return RTCTcpCandidatePolicyDisabled;
}
}
+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy {
switch (policy) {
case RTCTcpCandidatePolicyEnabled:
return @"TCP_ENABLED";
case RTCTcpCandidatePolicyDisabled:
return @"TCP_DISABLED";
}
}
+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll:
return RTCCandidateNetworkPolicyAll;
case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost:
return RTCCandidateNetworkPolicyLowCost;
}
}
+ (NSString *)stringForCandidateNetworkPolicy:
(RTCCandidateNetworkPolicy)policy {
switch (policy) {
case RTCCandidateNetworkPolicyAll:
return @"CANDIDATE_ALL_NETWORKS";
case RTCCandidateNetworkPolicyLowCost:
return @"CANDIDATE_LOW_COST_NETWORKS";
}
}
+ (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)
nativeContinualGatheringPolicyForPolicy:
(RTCContinualGatheringPolicy)policy {
switch (policy) {
case RTCContinualGatheringPolicyGatherOnce:
return webrtc::PeerConnectionInterface::GATHER_ONCE;
case RTCContinualGatheringPolicyGatherContinually:
return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY;
}
}
+ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy:
(webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy {
switch (nativePolicy) {
case webrtc::PeerConnectionInterface::GATHER_ONCE:
return RTCContinualGatheringPolicyGatherOnce;
case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY:
return RTCContinualGatheringPolicyGatherContinually;
}
}
+ (NSString *)stringForContinualGatheringPolicy:
(RTCContinualGatheringPolicy)policy {
switch (policy) {
case RTCContinualGatheringPolicyGatherOnce:
return @"GATHER_ONCE";
case RTCContinualGatheringPolicyGatherContinually:
return @"GATHER_CONTINUALLY";
}
}
+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:
(RTCEncryptionKeyType)keyType {
switch (keyType) {
case RTCEncryptionKeyTypeRSA:
return rtc::KT_RSA;
case RTCEncryptionKeyTypeECDSA:
return rtc::KT_ECDSA;
}
}
@end

View File

@ -0,0 +1,49 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCDataChannel.h"
#include "webrtc/api/datachannelinterface.h"
#include "webrtc/base/scoped_ref_ptr.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCDataBuffer ()
/**
* The native DataBuffer representation of this RTCDatabuffer object. This is
* needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly) const webrtc::DataBuffer *nativeDataBuffer;
/** Initialize an RTCDataBuffer from a native DataBuffer. */
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer;
@end
@interface RTCDataChannel ()
/** Initialize an RTCDataChannel from a native DataChannelInterface. */
- (instancetype)initWithNativeDataChannel:
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel
NS_DESIGNATED_INITIALIZER;
+ (webrtc::DataChannelInterface::DataState)
nativeDataChannelStateForState:(RTCDataChannelState)state;
+ (RTCDataChannelState)dataChannelStateForNativeState:
(webrtc::DataChannelInterface::DataState)nativeState;
+ (NSString *)stringForState:(RTCDataChannelState)state;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,220 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDataChannel+Private.h"
#import "NSString+StdString.h"
#include <memory>
namespace webrtc {
class DataChannelDelegateAdapter : public DataChannelObserver {
public:
DataChannelDelegateAdapter(RTCDataChannel *channel) { channel_ = channel; }
void OnStateChange() override {
[channel_.delegate dataChannelDidChangeState:channel_];
}
void OnMessage(const DataBuffer& buffer) override {
RTCDataBuffer *data_buffer =
[[RTCDataBuffer alloc] initWithNativeBuffer:buffer];
[channel_.delegate dataChannel:channel_
didReceiveMessageWithBuffer:data_buffer];
}
void OnBufferedAmountChange(uint64_t previousAmount) override {
id<RTCDataChannelDelegate> delegate = channel_.delegate;
SEL sel = @selector(dataChannel:didChangeBufferedAmount:);
if ([delegate respondsToSelector:sel]) {
[delegate dataChannel:channel_ didChangeBufferedAmount:previousAmount];
}
}
private:
__weak RTCDataChannel *channel_;
};
}
@implementation RTCDataBuffer {
std::unique_ptr<webrtc::DataBuffer> _dataBuffer;
}
- (instancetype)initWithData:(NSData *)data isBinary:(BOOL)isBinary {
NSParameterAssert(data);
if (self = [super init]) {
rtc::CopyOnWriteBuffer buffer(
reinterpret_cast<const uint8_t*>(data.bytes), data.length);
_dataBuffer.reset(new webrtc::DataBuffer(buffer, isBinary));
}
return self;
}
- (NSData *)data {
return [NSData dataWithBytes:_dataBuffer->data.data()
length:_dataBuffer->data.size()];
}
- (BOOL)isBinary {
return _dataBuffer->binary;
}
#pragma mark - Private
- (instancetype)initWithNativeBuffer:(const webrtc::DataBuffer&)nativeBuffer {
if (self = [super init]) {
_dataBuffer.reset(new webrtc::DataBuffer(nativeBuffer));
}
return self;
}
- (const webrtc::DataBuffer *)nativeDataBuffer {
return _dataBuffer.get();
}
@end
@implementation RTCDataChannel {
rtc::scoped_refptr<webrtc::DataChannelInterface> _nativeDataChannel;
std::unique_ptr<webrtc::DataChannelDelegateAdapter> _observer;
BOOL _isObserverRegistered;
}
@synthesize delegate = _delegate;
- (void)dealloc {
// Handles unregistering the observer properly. We need to do this because
// there may still be other references to the underlying data channel.
_nativeDataChannel->UnregisterObserver();
}
- (NSString *)label {
return [NSString stringForStdString:_nativeDataChannel->label()];
}
- (BOOL)isReliable {
return _nativeDataChannel->reliable();
}
- (BOOL)isOrdered {
return _nativeDataChannel->ordered();
}
- (NSUInteger)maxRetransmitTime {
return self.maxPacketLifeTime;
}
- (uint16_t)maxPacketLifeTime {
return _nativeDataChannel->maxRetransmitTime();
}
- (uint16_t)maxRetransmits {
return _nativeDataChannel->maxRetransmits();
}
- (NSString *)protocol {
return [NSString stringForStdString:_nativeDataChannel->protocol()];
}
- (BOOL)isNegotiated {
return _nativeDataChannel->negotiated();
}
- (NSInteger)streamId {
return self.channelId;
}
- (int)channelId {
return _nativeDataChannel->id();
}
- (RTCDataChannelState)readyState {
return [[self class] dataChannelStateForNativeState:
_nativeDataChannel->state()];
}
- (uint64_t)bufferedAmount {
return _nativeDataChannel->buffered_amount();
}
- (void)close {
_nativeDataChannel->Close();
}
- (BOOL)sendData:(RTCDataBuffer *)data {
return _nativeDataChannel->Send(*data.nativeDataBuffer);
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCDataChannel:\n%ld\n%@\n%@",
(long)self.channelId,
self.label,
[[self class]
stringForState:self.readyState]];
}
#pragma mark - Private
- (instancetype)initWithNativeDataChannel:
(rtc::scoped_refptr<webrtc::DataChannelInterface>)nativeDataChannel {
NSParameterAssert(nativeDataChannel);
if (self = [super init]) {
_nativeDataChannel = nativeDataChannel;
_observer.reset(new webrtc::DataChannelDelegateAdapter(self));
_nativeDataChannel->RegisterObserver(_observer.get());
}
return self;
}
+ (webrtc::DataChannelInterface::DataState)
nativeDataChannelStateForState:(RTCDataChannelState)state {
switch (state) {
case RTCDataChannelStateConnecting:
return webrtc::DataChannelInterface::DataState::kConnecting;
case RTCDataChannelStateOpen:
return webrtc::DataChannelInterface::DataState::kOpen;
case RTCDataChannelStateClosing:
return webrtc::DataChannelInterface::DataState::kClosing;
case RTCDataChannelStateClosed:
return webrtc::DataChannelInterface::DataState::kClosed;
}
}
+ (RTCDataChannelState)dataChannelStateForNativeState:
(webrtc::DataChannelInterface::DataState)nativeState {
switch (nativeState) {
case webrtc::DataChannelInterface::DataState::kConnecting:
return RTCDataChannelStateConnecting;
case webrtc::DataChannelInterface::DataState::kOpen:
return RTCDataChannelStateOpen;
case webrtc::DataChannelInterface::DataState::kClosing:
return RTCDataChannelStateClosing;
case webrtc::DataChannelInterface::DataState::kClosed:
return RTCDataChannelStateClosed;
}
}
+ (NSString *)stringForState:(RTCDataChannelState)state {
switch (state) {
case RTCDataChannelStateConnecting:
return @"Connecting";
case RTCDataChannelStateOpen:
return @"Open";
case RTCDataChannelStateClosing:
return @"Closing";
case RTCDataChannelStateClosed:
return @"Closed";
}
}
@end

View File

@ -0,0 +1,23 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCDataChannelConfiguration.h"
#include "webrtc/api/datachannelinterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCDataChannelConfiguration ()
@property(nonatomic, readonly) webrtc::DataChannelInit nativeDataChannelInit;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,83 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDataChannelConfiguration+Private.h"
#import "NSString+StdString.h"
@implementation RTCDataChannelConfiguration
@synthesize nativeDataChannelInit = _nativeDataChannelInit;
- (BOOL)isOrdered {
return _nativeDataChannelInit.ordered;
}
- (void)setIsOrdered:(BOOL)isOrdered {
_nativeDataChannelInit.ordered = isOrdered;
}
- (NSInteger)maxRetransmitTimeMs {
return self.maxPacketLifeTime;
}
- (void)setMaxRetransmitTimeMs:(NSInteger)maxRetransmitTimeMs {
self.maxPacketLifeTime = maxRetransmitTimeMs;
}
- (int)maxPacketLifeTime {
return _nativeDataChannelInit.maxRetransmitTime;
}
- (void)setMaxPacketLifeTime:(int)maxPacketLifeTime {
_nativeDataChannelInit.maxRetransmitTime = maxPacketLifeTime;
}
- (int)maxRetransmits {
return _nativeDataChannelInit.maxRetransmits;
}
- (void)setMaxRetransmits:(int)maxRetransmits {
_nativeDataChannelInit.maxRetransmits = maxRetransmits;
}
- (NSString *)protocol {
return [NSString stringForStdString:_nativeDataChannelInit.protocol];
}
- (void)setProtocol:(NSString *)protocol {
_nativeDataChannelInit.protocol = [NSString stdStringForString:protocol];
}
- (BOOL)isNegotiated {
return _nativeDataChannelInit.negotiated;
}
- (void)setIsNegotiated:(BOOL)isNegotiated {
_nativeDataChannelInit.negotiated = isNegotiated;
}
- (int)streamId {
return self.channelId;
}
- (void)setStreamId:(int)streamId {
self.channelId = streamId;
}
- (int)channelId {
return _nativeDataChannelInit.id;
}
- (void)setChannelId:(int)channelId {
_nativeDataChannelInit.id = channelId;
}
@end

View File

@ -0,0 +1,37 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCIceCandidate.h"
#include <memory>
#include "webrtc/api/jsep.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCIceCandidate ()
/**
* The native IceCandidateInterface representation of this RTCIceCandidate
* object. This is needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly)
std::unique_ptr<webrtc::IceCandidateInterface> nativeCandidate;
/**
* Initialize an RTCIceCandidate from a native IceCandidateInterface. No
* ownership is taken of the native candidate.
*/
- (instancetype)initWithNativeCandidate:
(const webrtc::IceCandidateInterface *)candidate;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,76 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCIceCandidate+Private.h"
#include <memory>
#import "NSString+StdString.h"
#import "WebRTC/RTCLogging.h"
@implementation RTCIceCandidate
@synthesize sdpMid = _sdpMid;
@synthesize sdpMLineIndex = _sdpMLineIndex;
@synthesize sdp = _sdp;
@synthesize serverUrl = _serverUrl;
- (instancetype)initWithSdp:(NSString *)sdp
sdpMLineIndex:(int)sdpMLineIndex
sdpMid:(NSString *)sdpMid {
NSParameterAssert(sdp.length);
if (self = [super init]) {
_sdpMid = [sdpMid copy];
_sdpMLineIndex = sdpMLineIndex;
_sdp = [sdp copy];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCIceCandidate:\n%@\n%d\n%@\n%@",
_sdpMid,
_sdpMLineIndex,
_sdp,
_serverUrl];
}
#pragma mark - Private
- (instancetype)initWithNativeCandidate:
(const webrtc::IceCandidateInterface *)candidate {
NSParameterAssert(candidate);
std::string sdp;
candidate->ToString(&sdp);
RTCIceCandidate *rtcCandidate =
[self initWithSdp:[NSString stringForStdString:sdp]
sdpMLineIndex:candidate->sdp_mline_index()
sdpMid:[NSString stringForStdString:candidate->sdp_mid()]];
rtcCandidate->_serverUrl = [NSString stringForStdString:candidate->server_url()];
return rtcCandidate;
}
- (std::unique_ptr<webrtc::IceCandidateInterface>)nativeCandidate {
webrtc::SdpParseError error;
webrtc::IceCandidateInterface *candidate = webrtc::CreateIceCandidate(
_sdpMid.stdString, _sdpMLineIndex, _sdp.stdString, &error);
if (!candidate) {
RTCLog(@"Failed to create ICE candidate: %s\nline: %s",
error.description.c_str(),
error.line.c_str());
}
return std::unique_ptr<webrtc::IceCandidateInterface>(candidate);
}
@end

View File

@ -0,0 +1,32 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCIceServer.h"
#include "webrtc/api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCIceServer ()
/**
* IceServer struct representation of this RTCIceServer object's data.
* This is needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly)
webrtc::PeerConnectionInterface::IceServer nativeServer;
/** Initialize an RTCIceServer from a native IceServer. */
- (instancetype)initWithNativeServer:
(webrtc::PeerConnectionInterface::IceServer)nativeServer;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,121 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCIceServer+Private.h"
#import "NSString+StdString.h"
@implementation RTCIceServer
@synthesize urlStrings = _urlStrings;
@synthesize username = _username;
@synthesize credential = _credential;
@synthesize tlsCertPolicy = _tlsCertPolicy;
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings {
return [self initWithURLStrings:urlStrings
username:nil
credential:nil];
}
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
username:(NSString *)username
credential:(NSString *)credential {
return [self initWithURLStrings:urlStrings
username:username
credential:credential
tlsCertPolicy:RTCTlsCertPolicySecure];
}
- (instancetype)initWithURLStrings:(NSArray<NSString *> *)urlStrings
username:(NSString *)username
credential:(NSString *)credential
tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
NSParameterAssert(urlStrings.count);
if (self = [super init]) {
_urlStrings = [[NSArray alloc] initWithArray:urlStrings copyItems:YES];
_username = [username copy];
_credential = [credential copy];
_tlsCertPolicy = tlsCertPolicy;
}
return self;
}
- (NSString *)description {
return
[NSString stringWithFormat:@"RTCIceServer:\n%@\n%@\n%@\n%@", _urlStrings,
_username, _credential,
[self stringForTlsCertPolicy:_tlsCertPolicy]];
}
#pragma mark - Private
- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy {
switch (tlsCertPolicy) {
case RTCTlsCertPolicySecure:
return @"RTCTlsCertPolicySecure";
case RTCTlsCertPolicyInsecureNoCheck:
return @"RTCTlsCertPolicyInsecureNoCheck";
}
}
- (webrtc::PeerConnectionInterface::IceServer)nativeServer {
__block webrtc::PeerConnectionInterface::IceServer iceServer;
iceServer.username = [NSString stdStringForString:_username];
iceServer.password = [NSString stdStringForString:_credential];
[_urlStrings enumerateObjectsUsingBlock:^(NSString *url,
NSUInteger idx,
BOOL *stop) {
iceServer.urls.push_back(url.stdString);
}];
switch (_tlsCertPolicy) {
case RTCTlsCertPolicySecure:
iceServer.tls_cert_policy =
webrtc::PeerConnectionInterface::kTlsCertPolicySecure;
break;
case RTCTlsCertPolicyInsecureNoCheck:
iceServer.tls_cert_policy =
webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck;
break;
}
return iceServer;
}
- (instancetype)initWithNativeServer:
(webrtc::PeerConnectionInterface::IceServer)nativeServer {
NSMutableArray *urls =
[NSMutableArray arrayWithCapacity:nativeServer.urls.size()];
for (auto const &url : nativeServer.urls) {
[urls addObject:[NSString stringForStdString:url]];
}
NSString *username = [NSString stringForStdString:nativeServer.username];
NSString *credential = [NSString stringForStdString:nativeServer.password];
RTCTlsCertPolicy tlsCertPolicy;
switch (nativeServer.tls_cert_policy) {
case webrtc::PeerConnectionInterface::kTlsCertPolicySecure:
tlsCertPolicy = RTCTlsCertPolicySecure;
break;
case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck:
tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck;
break;
}
self = [self initWithURLStrings:urls
username:username
credential:credential
tlsCertPolicy:tlsCertPolicy];
return self;
}
@end

View File

@ -0,0 +1,24 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCLegacyStatsReport.h"
#include "webrtc/api/statstypes.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCLegacyStatsReport ()
/** Initialize an RTCLegacyStatsReport object from a native StatsReport. */
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,60 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCLegacyStatsReport+Private.h"
#import "NSString+StdString.h"
#import "WebRTC/RTCLogging.h"
#include "webrtc/base/checks.h"
@implementation RTCLegacyStatsReport
@synthesize timestamp = _timestamp;
@synthesize type = _type;
@synthesize reportId = _reportId;
@synthesize values = _values;
- (NSString *)description {
return [NSString stringWithFormat:@"RTCLegacyStatsReport:\n%@\n%@\n%f\n%@",
_reportId,
_type,
_timestamp,
_values];
}
#pragma mark - Private
- (instancetype)initWithNativeReport:(const webrtc::StatsReport &)nativeReport {
if (self = [super init]) {
_timestamp = nativeReport.timestamp();
_type = [NSString stringForStdString:nativeReport.TypeToString()];
_reportId = [NSString stringForStdString:
nativeReport.id()->ToString()];
NSUInteger capacity = nativeReport.values().size();
NSMutableDictionary *values =
[NSMutableDictionary dictionaryWithCapacity:capacity];
for (auto const &valuePair : nativeReport.values()) {
NSString *key = [NSString stringForStdString:
valuePair.second->display_name()];
NSString *value = [NSString stringForStdString:
valuePair.second->ToString()];
// Not expecting duplicate keys.
RTC_DCHECK(![values objectForKey:key]);
[values setObject:value forKey:key];
}
_values = values;
}
return self;
}
@end

View File

@ -0,0 +1,54 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaConstraints.h"
#include <memory>
#include "webrtc/api/mediaconstraintsinterface.h"
namespace webrtc {
class MediaConstraints : public MediaConstraintsInterface {
public:
virtual ~MediaConstraints();
MediaConstraints();
MediaConstraints(
const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional);
virtual const Constraints& GetMandatory() const;
virtual const Constraints& GetOptional() const;
private:
MediaConstraintsInterface::Constraints mandatory_;
MediaConstraintsInterface::Constraints optional_;
};
} // namespace webrtc
NS_ASSUME_NONNULL_BEGIN
@interface RTCMediaConstraints ()
/**
* A MediaConstraints representation of this RTCMediaConstraints object. This is
* needed to pass to the underlying C++ APIs.
*/
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints;
/** Return a native Constraints object representing these constraints */
+ (webrtc::MediaConstraintsInterface::Constraints)
nativeConstraintsForConstraints:
(NSDictionary<NSString *, NSString *> *)constraints;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,129 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaConstraints+Private.h"
#import "NSString+StdString.h"
#include <memory>
NSString * const kRTCMediaConstraintsMinAspectRatio =
@(webrtc::MediaConstraintsInterface::kMinAspectRatio);
NSString * const kRTCMediaConstraintsMaxAspectRatio =
@(webrtc::MediaConstraintsInterface::kMaxAspectRatio);
NSString * const kRTCMediaConstraintsMinWidth =
@(webrtc::MediaConstraintsInterface::kMinWidth);
NSString * const kRTCMediaConstraintsMaxWidth =
@(webrtc::MediaConstraintsInterface::kMaxWidth);
NSString * const kRTCMediaConstraintsMinHeight =
@(webrtc::MediaConstraintsInterface::kMinHeight);
NSString * const kRTCMediaConstraintsMaxHeight =
@(webrtc::MediaConstraintsInterface::kMaxHeight);
NSString * const kRTCMediaConstraintsMinFrameRate =
@(webrtc::MediaConstraintsInterface::kMinFrameRate);
NSString * const kRTCMediaConstraintsMaxFrameRate =
@(webrtc::MediaConstraintsInterface::kMaxFrameRate);
NSString * const kRTCMediaConstraintsLevelControl =
@(webrtc::MediaConstraintsInterface::kLevelControl);
NSString * const kRTCMediaConstraintsAudioNetworkAdaptorConfig =
@(webrtc::MediaConstraintsInterface::kAudioNetworkAdaptorConfig);
NSString * const kRTCMediaConstraintsValueTrue =
@(webrtc::MediaConstraintsInterface::kValueTrue);
NSString * const kRTCMediaConstraintsValueFalse =
@(webrtc::MediaConstraintsInterface::kValueFalse);
namespace webrtc {
MediaConstraints::~MediaConstraints() {}
MediaConstraints::MediaConstraints() {}
MediaConstraints::MediaConstraints(
const MediaConstraintsInterface::Constraints& mandatory,
const MediaConstraintsInterface::Constraints& optional)
: mandatory_(mandatory), optional_(optional) {}
const MediaConstraintsInterface::Constraints&
MediaConstraints::GetMandatory() const {
return mandatory_;
}
const MediaConstraintsInterface::Constraints&
MediaConstraints::GetOptional() const {
return optional_;
}
} // namespace webrtc
@implementation RTCMediaConstraints {
NSDictionary<NSString *, NSString *> *_mandatory;
NSDictionary<NSString *, NSString *> *_optional;
}
- (instancetype)initWithMandatoryConstraints:
(NSDictionary<NSString *, NSString *> *)mandatory
optionalConstraints:
(NSDictionary<NSString *, NSString *> *)optional {
if (self = [super init]) {
_mandatory = [[NSDictionary alloc] initWithDictionary:mandatory
copyItems:YES];
_optional = [[NSDictionary alloc] initWithDictionary:optional
copyItems:YES];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCMediaConstraints:\n%@\n%@",
_mandatory,
_optional];
}
#pragma mark - Private
- (std::unique_ptr<webrtc::MediaConstraints>)nativeConstraints {
webrtc::MediaConstraintsInterface::Constraints mandatory =
[[self class] nativeConstraintsForConstraints:_mandatory];
webrtc::MediaConstraintsInterface::Constraints optional =
[[self class] nativeConstraintsForConstraints:_optional];
webrtc::MediaConstraints *nativeConstraints =
new webrtc::MediaConstraints(mandatory, optional);
return std::unique_ptr<webrtc::MediaConstraints>(nativeConstraints);
}
+ (webrtc::MediaConstraintsInterface::Constraints)
nativeConstraintsForConstraints:
(NSDictionary<NSString *, NSString *> *)constraints {
webrtc::MediaConstraintsInterface::Constraints nativeConstraints;
for (NSString *key in constraints) {
NSAssert([key isKindOfClass:[NSString class]],
@"%@ is not an NSString.", key);
NSString *value = [constraints objectForKey:key];
NSAssert([value isKindOfClass:[NSString class]],
@"%@ is not an NSString.", value);
if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) {
// This value is base64 encoded.
NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0];
std::string configValue =
std::string(reinterpret_cast<const char *>(charData.bytes), charData.length);
nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
key.stdString, configValue));
} else {
nativeConstraints.push_back(webrtc::MediaConstraintsInterface::Constraint(
key.stdString, value.stdString));
}
}
return nativeConstraints;
}
@end

View File

@ -0,0 +1,42 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaSource.h"
#include "webrtc/api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(NSInteger, RTCMediaSourceType) {
RTCMediaSourceTypeAudio,
RTCMediaSourceTypeVideo,
};
@interface RTCMediaSource ()
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::MediaSourceInterface> nativeMediaSource;
- (instancetype)initWithNativeMediaSource:
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type
NS_DESIGNATED_INITIALIZER;
+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
(RTCSourceState)state;
+ (RTCSourceState)sourceStateForNativeState:
(webrtc::MediaSourceInterface::SourceState)nativeState;
+ (NSString *)stringForState:(RTCSourceState)state;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,79 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaSource+Private.h"
#include "webrtc/base/checks.h"
@implementation RTCMediaSource {
RTCMediaSourceType _type;
}
@synthesize nativeMediaSource = _nativeMediaSource;
- (instancetype)initWithNativeMediaSource:
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type {
RTC_DCHECK(nativeMediaSource);
if (self = [super init]) {
_nativeMediaSource = nativeMediaSource;
_type = type;
}
return self;
}
- (RTCSourceState)state {
return [[self class] sourceStateForNativeState:_nativeMediaSource->state()];
}
#pragma mark - Private
+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:
(RTCSourceState)state {
switch (state) {
case RTCSourceStateInitializing:
return webrtc::MediaSourceInterface::kInitializing;
case RTCSourceStateLive:
return webrtc::MediaSourceInterface::kLive;
case RTCSourceStateEnded:
return webrtc::MediaSourceInterface::kEnded;
case RTCSourceStateMuted:
return webrtc::MediaSourceInterface::kMuted;
}
}
+ (RTCSourceState)sourceStateForNativeState:
(webrtc::MediaSourceInterface::SourceState)nativeState {
switch (nativeState) {
case webrtc::MediaSourceInterface::kInitializing:
return RTCSourceStateInitializing;
case webrtc::MediaSourceInterface::kLive:
return RTCSourceStateLive;
case webrtc::MediaSourceInterface::kEnded:
return RTCSourceStateEnded;
case webrtc::MediaSourceInterface::kMuted:
return RTCSourceStateMuted;
}
}
+ (NSString *)stringForState:(RTCSourceState)state {
switch (state) {
case RTCSourceStateInitializing:
return @"Initializing";
case RTCSourceStateLive:
return @"Live";
case RTCSourceStateEnded:
return @"Ended";
case RTCSourceStateMuted:
return @"Muted";
}
}
@end

View File

@ -0,0 +1,36 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaStream.h"
#include "webrtc/api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCMediaStream ()
/**
* MediaStreamInterface representation of this RTCMediaStream object. This is
* needed to pass to the underlying C++ APIs.
*/
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::MediaStreamInterface> nativeMediaStream;
/** Initialize an RTCMediaStream with an id. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
streamId:(NSString *)streamId;
/** Initialize an RTCMediaStream from a native MediaStreamInterface. */
- (instancetype)initWithNativeMediaStream:
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,122 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaStream+Private.h"
#include <vector>
#import "NSString+StdString.h"
#import "RTCAudioTrack+Private.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCVideoTrack+Private.h"
@implementation RTCMediaStream {
NSMutableArray *_audioTracks;
NSMutableArray *_videoTracks;
rtc::scoped_refptr<webrtc::MediaStreamInterface> _nativeMediaStream;
}
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
streamId:(NSString *)streamId {
NSParameterAssert(factory);
NSParameterAssert(streamId.length);
std::string nativeId = [NSString stdStringForString:streamId];
rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
factory.nativeFactory->CreateLocalMediaStream(nativeId);
return [self initWithNativeMediaStream:stream];
}
- (NSArray<RTCAudioTrack *> *)audioTracks {
return [_audioTracks copy];
}
- (NSArray<RTCVideoTrack *> *)videoTracks {
return [_videoTracks copy];
}
- (NSString *)streamId {
return [NSString stringForStdString:_nativeMediaStream->label()];
}
- (void)addAudioTrack:(RTCAudioTrack *)audioTrack {
if (_nativeMediaStream->AddTrack(audioTrack.nativeAudioTrack)) {
[_audioTracks addObject:audioTrack];
}
}
- (void)addVideoTrack:(RTCVideoTrack *)videoTrack {
if (_nativeMediaStream->AddTrack(videoTrack.nativeVideoTrack)) {
[_videoTracks addObject:videoTrack];
}
}
- (void)removeAudioTrack:(RTCAudioTrack *)audioTrack {
NSUInteger index = [_audioTracks indexOfObjectIdenticalTo:audioTrack];
NSAssert(index != NSNotFound,
@"|removeAudioTrack| called on unexpected RTCAudioTrack");
if (index != NSNotFound &&
_nativeMediaStream->RemoveTrack(audioTrack.nativeAudioTrack)) {
[_audioTracks removeObjectAtIndex:index];
}
}
- (void)removeVideoTrack:(RTCVideoTrack *)videoTrack {
NSUInteger index = [_videoTracks indexOfObjectIdenticalTo:videoTrack];
NSAssert(index != NSNotFound,
@"|removeVideoTrack| called on unexpected RTCVideoTrack");
if (index != NSNotFound &&
_nativeMediaStream->RemoveTrack(videoTrack.nativeVideoTrack)) {
[_videoTracks removeObjectAtIndex:index];
}
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCMediaStream:\n%@\nA=%lu\nV=%lu",
self.streamId,
(unsigned long)self.audioTracks.count,
(unsigned long)self.videoTracks.count];
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
return _nativeMediaStream;
}
- (instancetype)initWithNativeMediaStream:
(rtc::scoped_refptr<webrtc::MediaStreamInterface>)nativeMediaStream {
NSParameterAssert(nativeMediaStream);
if (self = [super init]) {
webrtc::AudioTrackVector audioTracks = nativeMediaStream->GetAudioTracks();
webrtc::VideoTrackVector videoTracks = nativeMediaStream->GetVideoTracks();
_audioTracks = [NSMutableArray arrayWithCapacity:audioTracks.size()];
_videoTracks = [NSMutableArray arrayWithCapacity:videoTracks.size()];
_nativeMediaStream = nativeMediaStream;
for (auto &track : audioTracks) {
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio;
RTCAudioTrack *audioTrack =
[[RTCAudioTrack alloc] initWithNativeTrack:track type:type];
[_audioTracks addObject:audioTrack];
}
for (auto &track : videoTracks) {
RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo;
RTCVideoTrack *videoTrack =
[[RTCVideoTrack alloc] initWithNativeTrack:track type:type];
[_videoTracks addObject:videoTrack];
}
}
return self;
}
@end

View File

@ -0,0 +1,54 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaStreamTrack.h"
#include "webrtc/api/mediastreaminterface.h"
typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) {
RTCMediaStreamTrackTypeAudio,
RTCMediaStreamTrackTypeVideo,
};
NS_ASSUME_NONNULL_BEGIN
@interface RTCMediaStreamTrack ()
/**
* The native MediaStreamTrackInterface passed in or created during
* construction.
*/
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack;
/**
* Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface.
*/
- (instancetype)initWithNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
type:(RTCMediaStreamTrackType)type
NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack;
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track;
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
(RTCMediaStreamTrackState)state;
+ (RTCMediaStreamTrackState)trackStateForNativeState:
(webrtc::MediaStreamTrackInterface::TrackState)nativeState;
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,137 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaStreamTrack+Private.h"
#import "NSString+StdString.h"
NSString * const kRTCMediaStreamTrackKindAudio =
@(webrtc::MediaStreamTrackInterface::kAudioKind);
NSString * const kRTCMediaStreamTrackKindVideo =
@(webrtc::MediaStreamTrackInterface::kVideoKind);
@implementation RTCMediaStreamTrack {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> _nativeTrack;
RTCMediaStreamTrackType _type;
}
- (NSString *)kind {
return [NSString stringForStdString:_nativeTrack->kind()];
}
- (NSString *)trackId {
return [NSString stringForStdString:_nativeTrack->id()];
}
- (BOOL)isEnabled {
return _nativeTrack->enabled();
}
- (void)setIsEnabled:(BOOL)isEnabled {
_nativeTrack->set_enabled(isEnabled);
}
- (RTCMediaStreamTrackState)readyState {
return [[self class] trackStateForNativeState:_nativeTrack->state()];
}
- (NSString *)description {
NSString *readyState = [[self class] stringForState:self.readyState];
return [NSString stringWithFormat:@"RTCMediaStreamTrack:\n%@\n%@\n%@\n%@",
self.kind,
self.trackId,
self.isEnabled ? @"enabled" : @"disabled",
readyState];
}
- (BOOL)isEqual:(id)object {
if (self == object) {
return YES;
}
if (![object isMemberOfClass:[self class]]) {
return NO;
}
return [self isEqualToTrack:(RTCMediaStreamTrack *)object];
}
- (NSUInteger)hash {
return (NSUInteger)_nativeTrack.get();
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
return _nativeTrack;
}
- (instancetype)initWithNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack
type:(RTCMediaStreamTrackType)type {
NSParameterAssert(nativeTrack);
if (self = [super init]) {
_nativeTrack = nativeTrack;
_type = type;
}
return self;
}
- (instancetype)initWithNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeTrack {
NSParameterAssert(nativeTrack);
if (nativeTrack->kind() ==
std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) {
return [self initWithNativeTrack:nativeTrack
type:RTCMediaStreamTrackTypeAudio];
}
if (nativeTrack->kind() ==
std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) {
return [self initWithNativeTrack:nativeTrack
type:RTCMediaStreamTrackTypeVideo];
}
return nil;
}
- (BOOL)isEqualToTrack:(RTCMediaStreamTrack *)track {
if (!track) {
return NO;
}
return _nativeTrack == track.nativeTrack;
}
+ (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState:
(RTCMediaStreamTrackState)state {
switch (state) {
case RTCMediaStreamTrackStateLive:
return webrtc::MediaStreamTrackInterface::kLive;
case RTCMediaStreamTrackStateEnded:
return webrtc::MediaStreamTrackInterface::kEnded;
}
}
+ (RTCMediaStreamTrackState)trackStateForNativeState:
(webrtc::MediaStreamTrackInterface::TrackState)nativeState {
switch (nativeState) {
case webrtc::MediaStreamTrackInterface::kLive:
return RTCMediaStreamTrackStateLive;
case webrtc::MediaStreamTrackInterface::kEnded:
return RTCMediaStreamTrackStateEnded;
}
}
+ (NSString *)stringForState:(RTCMediaStreamTrackState)state {
switch (state) {
case RTCMediaStreamTrackStateLive:
return @"Live";
case RTCMediaStreamTrackStateEnded:
return @"Ended";
}
}
@end

View File

@ -0,0 +1,32 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMetrics.h"
#import "RTCMetricsSampleInfo+Private.h"
void RTCEnableMetrics() {
webrtc::metrics::Enable();
}
NSArray<RTCMetricsSampleInfo *> *RTCGetAndResetMetrics() {
std::map<std::string, std::unique_ptr<webrtc::metrics::SampleInfo>>
histograms;
webrtc::metrics::GetAndReset(&histograms);
NSMutableArray *metrics =
[NSMutableArray arrayWithCapacity:histograms.size()];
for (auto const &histogram : histograms) {
RTCMetricsSampleInfo *metric = [[RTCMetricsSampleInfo alloc]
initWithNativeSampleInfo:*histogram.second];
[metrics addObject:metric];
}
return metrics;
}

View File

@ -0,0 +1,28 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMetricsSampleInfo.h"
// Adding 'nogncheck' to disable the gn include headers check.
// We don't want to depend on 'system_wrappers:metrics_default' because
// clients should be able to provide their own implementation.
#include "webrtc/system_wrappers/include/metrics_default.h" // nogncheck
NS_ASSUME_NONNULL_BEGIN
@interface RTCMetricsSampleInfo ()
/** Initialize an RTCMetricsSampleInfo object from native SampleInfo. */
- (instancetype)initWithNativeSampleInfo:
(const webrtc::metrics::SampleInfo &)info;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,43 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMetricsSampleInfo+Private.h"
#import "NSString+StdString.h"
@implementation RTCMetricsSampleInfo
@synthesize name = _name;
@synthesize min = _min;
@synthesize max = _max;
@synthesize bucketCount = _bucketCount;
@synthesize samples = _samples;
#pragma mark - Private
- (instancetype)initWithNativeSampleInfo:
(const webrtc::metrics::SampleInfo &)info {
if (self = [super init]) {
_name = [NSString stringForStdString:info.name];
_min = info.min;
_max = info.max;
_bucketCount = info.bucket_count;
NSMutableDictionary *samples =
[NSMutableDictionary dictionaryWithCapacity:info.samples.size()];
for (auto const &sample : info.samples) {
[samples setObject:@(sample.second) forKey:@(sample.first)];
}
_samples = samples;
}
return self;
}
@end

View File

@ -0,0 +1,34 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnection+Private.h"
#import "NSString+StdString.h"
#import "RTCDataChannel+Private.h"
#import "RTCDataChannelConfiguration+Private.h"
@implementation RTCPeerConnection (DataChannel)
- (RTCDataChannel *)dataChannelForLabel:(NSString *)label
configuration:
(RTCDataChannelConfiguration *)configuration {
std::string labelString = [NSString stdStringForString:label];
const webrtc::DataChannelInit nativeInit =
configuration.nativeDataChannelInit;
rtc::scoped_refptr<webrtc::DataChannelInterface> dataChannel =
self.nativePeerConnection->CreateDataChannel(labelString,
&nativeInit);
if (!dataChannel) {
return nil;
}
return [[RTCDataChannel alloc] initWithNativeDataChannel:dataChannel];
}
@end

View File

@ -0,0 +1,107 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnection.h"
#include "webrtc/api/peerconnectioninterface.h"
NS_ASSUME_NONNULL_BEGIN
namespace webrtc {
/**
* These objects are created by RTCPeerConnectionFactory to wrap an
* id<RTCPeerConnectionDelegate> and call methods on that interface.
*/
class PeerConnectionDelegateAdapter : public PeerConnectionObserver {
public:
PeerConnectionDelegateAdapter(RTCPeerConnection *peerConnection);
virtual ~PeerConnectionDelegateAdapter();
void OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) override;
void OnAddStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
void OnRemoveStream(rtc::scoped_refptr<MediaStreamInterface> stream) override;
void OnDataChannel(
rtc::scoped_refptr<DataChannelInterface> data_channel) override;
void OnRenegotiationNeeded() override;
void OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) override;
void OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) override;
void OnIceCandidate(const IceCandidateInterface *candidate) override;
void OnIceCandidatesRemoved(
const std::vector<cricket::Candidate>& candidates) override;
private:
__weak RTCPeerConnection *peer_connection_;
};
} // namespace webrtc
@interface RTCPeerConnection ()
/** The native PeerConnectionInterface created during construction. */
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::PeerConnectionInterface> nativePeerConnection;
/** Initialize an RTCPeerConnection with a configuration, constraints, and
* delegate.
*/
- (instancetype)initWithFactory:
(RTCPeerConnectionFactory *)factory
configuration:
(RTCConfiguration *)configuration
constraints:
(RTCMediaConstraints *)constraints
delegate:
(nullable id<RTCPeerConnectionDelegate>)delegate
NS_DESIGNATED_INITIALIZER;
+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
(RTCSignalingState)state;
+ (RTCSignalingState)signalingStateForNativeState:
(webrtc::PeerConnectionInterface::SignalingState)nativeState;
+ (NSString *)stringForSignalingState:(RTCSignalingState)state;
+ (webrtc::PeerConnectionInterface::IceConnectionState)
nativeIceConnectionStateForState:(RTCIceConnectionState)state;
+ (RTCIceConnectionState)iceConnectionStateForNativeState:
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState;
+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state;
+ (webrtc::PeerConnectionInterface::IceGatheringState)
nativeIceGatheringStateForState:(RTCIceGatheringState)state;
+ (RTCIceGatheringState)iceGatheringStateForNativeState:
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState;
+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state;
+ (webrtc::PeerConnectionInterface::StatsOutputLevel)
nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,64 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnection+Private.h"
#import "NSString+StdString.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCLegacyStatsReport+Private.h"
#include "webrtc/base/checks.h"
namespace webrtc {
class StatsObserverAdapter : public StatsObserver {
public:
StatsObserverAdapter(void (^completionHandler)
(NSArray<RTCLegacyStatsReport *> *stats)) {
completion_handler_ = completionHandler;
}
~StatsObserverAdapter() {
completion_handler_ = nil;
}
void OnComplete(const StatsReports& reports) override {
RTC_DCHECK(completion_handler_);
NSMutableArray *stats = [NSMutableArray arrayWithCapacity:reports.size()];
for (const auto* report : reports) {
RTCLegacyStatsReport *statsReport =
[[RTCLegacyStatsReport alloc] initWithNativeReport:*report];
[stats addObject:statsReport];
}
completion_handler_(stats);
completion_handler_ = nil;
}
private:
void (^completion_handler_)(NSArray<RTCLegacyStatsReport *> *stats);
};
} // namespace webrtc
@implementation RTCPeerConnection (Stats)
- (void)statsForTrack:(RTCMediaStreamTrack *)mediaStreamTrack
statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel
completionHandler:
(void (^)(NSArray<RTCLegacyStatsReport *> *stats))completionHandler {
rtc::scoped_refptr<webrtc::StatsObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::StatsObserverAdapter>
(completionHandler));
webrtc::PeerConnectionInterface::StatsOutputLevel nativeOutputLevel =
[[self class] nativeStatsOutputLevelForLevel:statsOutputLevel];
self.nativePeerConnection->GetStats(
observer, mediaStreamTrack.nativeTrack, nativeOutputLevel);
}
@end

View File

@ -0,0 +1,601 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnection+Private.h"
#import "NSString+StdString.h"
#import "RTCConfiguration+Private.h"
#import "RTCDataChannel+Private.h"
#import "RTCIceCandidate+Private.h"
#import "RTCLegacyStatsReport+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCMediaStream+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCRtpReceiver+Private.h"
#import "RTCRtpSender+Private.h"
#import "RTCSessionDescription+Private.h"
#import "WebRTC/RTCLogging.h"
#include <memory>
#include "webrtc/api/jsepicecandidate.h"
#include "webrtc/base/checks.h"
NSString * const kRTCPeerConnectionErrorDomain =
@"org.webrtc.RTCPeerConnection";
int const kRTCPeerConnnectionSessionDescriptionError = -1;
namespace webrtc {
class CreateSessionDescriptionObserverAdapter
: public CreateSessionDescriptionObserver {
public:
CreateSessionDescriptionObserverAdapter(
void (^completionHandler)(RTCSessionDescription *sessionDescription,
NSError *error)) {
completion_handler_ = completionHandler;
}
~CreateSessionDescriptionObserverAdapter() {
completion_handler_ = nil;
}
void OnSuccess(SessionDescriptionInterface *desc) override {
RTC_DCHECK(completion_handler_);
std::unique_ptr<webrtc::SessionDescriptionInterface> description =
std::unique_ptr<webrtc::SessionDescriptionInterface>(desc);
RTCSessionDescription* session =
[[RTCSessionDescription alloc] initWithNativeDescription:
description.get()];
completion_handler_(session, nil);
completion_handler_ = nil;
}
void OnFailure(const std::string& error) override {
RTC_DCHECK(completion_handler_);
NSString* str = [NSString stringForStdString:error];
NSError* err =
[NSError errorWithDomain:kRTCPeerConnectionErrorDomain
code:kRTCPeerConnnectionSessionDescriptionError
userInfo:@{ NSLocalizedDescriptionKey : str }];
completion_handler_(nil, err);
completion_handler_ = nil;
}
private:
void (^completion_handler_)
(RTCSessionDescription *sessionDescription, NSError *error);
};
class SetSessionDescriptionObserverAdapter :
public SetSessionDescriptionObserver {
public:
SetSessionDescriptionObserverAdapter(void (^completionHandler)
(NSError *error)) {
completion_handler_ = completionHandler;
}
~SetSessionDescriptionObserverAdapter() {
completion_handler_ = nil;
}
void OnSuccess() override {
RTC_DCHECK(completion_handler_);
completion_handler_(nil);
completion_handler_ = nil;
}
void OnFailure(const std::string& error) override {
RTC_DCHECK(completion_handler_);
NSString* str = [NSString stringForStdString:error];
NSError* err =
[NSError errorWithDomain:kRTCPeerConnectionErrorDomain
code:kRTCPeerConnnectionSessionDescriptionError
userInfo:@{ NSLocalizedDescriptionKey : str }];
completion_handler_(err);
completion_handler_ = nil;
}
private:
void (^completion_handler_)(NSError *error);
};
PeerConnectionDelegateAdapter::PeerConnectionDelegateAdapter(
RTCPeerConnection *peerConnection) {
peer_connection_ = peerConnection;
}
PeerConnectionDelegateAdapter::~PeerConnectionDelegateAdapter() {
peer_connection_ = nil;
}
void PeerConnectionDelegateAdapter::OnSignalingChange(
PeerConnectionInterface::SignalingState new_state) {
RTCSignalingState state =
[[RTCPeerConnection class] signalingStateForNativeState:new_state];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didChangeSignalingState:state];
}
void PeerConnectionDelegateAdapter::OnAddStream(
rtc::scoped_refptr<MediaStreamInterface> stream) {
RTCMediaStream *mediaStream =
[[RTCMediaStream alloc] initWithNativeMediaStream:stream];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didAddStream:mediaStream];
}
void PeerConnectionDelegateAdapter::OnRemoveStream(
rtc::scoped_refptr<MediaStreamInterface> stream) {
RTCMediaStream *mediaStream =
[[RTCMediaStream alloc] initWithNativeMediaStream:stream];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didRemoveStream:mediaStream];
}
void PeerConnectionDelegateAdapter::OnDataChannel(
rtc::scoped_refptr<DataChannelInterface> data_channel) {
RTCDataChannel *dataChannel =
[[RTCDataChannel alloc] initWithNativeDataChannel:data_channel];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didOpenDataChannel:dataChannel];
}
void PeerConnectionDelegateAdapter::OnRenegotiationNeeded() {
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnectionShouldNegotiate:peer_connection];
}
void PeerConnectionDelegateAdapter::OnIceConnectionChange(
PeerConnectionInterface::IceConnectionState new_state) {
RTCIceConnectionState state =
[[RTCPeerConnection class] iceConnectionStateForNativeState:new_state];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didChangeIceConnectionState:state];
}
void PeerConnectionDelegateAdapter::OnIceGatheringChange(
PeerConnectionInterface::IceGatheringState new_state) {
RTCIceGatheringState state =
[[RTCPeerConnection class] iceGatheringStateForNativeState:new_state];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didChangeIceGatheringState:state];
}
void PeerConnectionDelegateAdapter::OnIceCandidate(
const IceCandidateInterface *candidate) {
RTCIceCandidate *iceCandidate =
[[RTCIceCandidate alloc] initWithNativeCandidate:candidate];
RTCPeerConnection *peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didGenerateIceCandidate:iceCandidate];
}
void PeerConnectionDelegateAdapter::OnIceCandidatesRemoved(
const std::vector<cricket::Candidate>& candidates) {
NSMutableArray* ice_candidates =
[NSMutableArray arrayWithCapacity:candidates.size()];
for (const auto& candidate : candidates) {
std::unique_ptr<JsepIceCandidate> candidate_wrapper(
new JsepIceCandidate(candidate.transport_name(), -1, candidate));
RTCIceCandidate* ice_candidate = [[RTCIceCandidate alloc]
initWithNativeCandidate:candidate_wrapper.get()];
[ice_candidates addObject:ice_candidate];
}
RTCPeerConnection* peer_connection = peer_connection_;
[peer_connection.delegate peerConnection:peer_connection
didRemoveIceCandidates:ice_candidates];
}
} // namespace webrtc
@implementation RTCPeerConnection {
NSMutableArray<RTCMediaStream *> *_localStreams;
std::unique_ptr<webrtc::PeerConnectionDelegateAdapter> _observer;
rtc::scoped_refptr<webrtc::PeerConnectionInterface> _peerConnection;
std::unique_ptr<webrtc::MediaConstraints> _nativeConstraints;
BOOL _hasStartedRtcEventLog;
}
@synthesize delegate = _delegate;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
configuration:(RTCConfiguration *)configuration
constraints:(RTCMediaConstraints *)constraints
delegate:(id<RTCPeerConnectionDelegate>)delegate {
NSParameterAssert(factory);
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
[configuration createNativeConfiguration]);
if (!config) {
return nil;
}
if (self = [super init]) {
_observer.reset(new webrtc::PeerConnectionDelegateAdapter(self));
_nativeConstraints = constraints.nativeConstraints;
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
config.get());
_peerConnection =
factory.nativeFactory->CreatePeerConnection(*config,
nullptr,
nullptr,
_observer.get());
if (!_peerConnection) {
return nil;
}
_localStreams = [[NSMutableArray alloc] init];
_delegate = delegate;
}
return self;
}
- (NSArray<RTCMediaStream *> *)localStreams {
return [_localStreams copy];
}
- (RTCSessionDescription *)localDescription {
const webrtc::SessionDescriptionInterface *description =
_peerConnection->local_description();
return description ?
[[RTCSessionDescription alloc] initWithNativeDescription:description]
: nil;
}
- (RTCSessionDescription *)remoteDescription {
const webrtc::SessionDescriptionInterface *description =
_peerConnection->remote_description();
return description ?
[[RTCSessionDescription alloc] initWithNativeDescription:description]
: nil;
}
- (RTCSignalingState)signalingState {
return [[self class]
signalingStateForNativeState:_peerConnection->signaling_state()];
}
- (RTCIceConnectionState)iceConnectionState {
return [[self class] iceConnectionStateForNativeState:
_peerConnection->ice_connection_state()];
}
- (RTCIceGatheringState)iceGatheringState {
return [[self class] iceGatheringStateForNativeState:
_peerConnection->ice_gathering_state()];
}
- (BOOL)setConfiguration:(RTCConfiguration *)configuration {
std::unique_ptr<webrtc::PeerConnectionInterface::RTCConfiguration> config(
[configuration createNativeConfiguration]);
if (!config) {
return NO;
}
CopyConstraintsIntoRtcConfiguration(_nativeConstraints.get(),
config.get());
return _peerConnection->SetConfiguration(*config);
}
- (RTCConfiguration *)configuration {
webrtc::PeerConnectionInterface::RTCConfiguration config =
_peerConnection->GetConfiguration();
return [[RTCConfiguration alloc] initWithNativeConfiguration:config];
}
- (void)close {
_peerConnection->Close();
}
- (void)addIceCandidate:(RTCIceCandidate *)candidate {
std::unique_ptr<const webrtc::IceCandidateInterface> iceCandidate(
candidate.nativeCandidate);
_peerConnection->AddIceCandidate(iceCandidate.get());
}
- (void)removeIceCandidates:(NSArray<RTCIceCandidate *> *)iceCandidates {
std::vector<cricket::Candidate> candidates;
for (RTCIceCandidate *iceCandidate in iceCandidates) {
std::unique_ptr<const webrtc::IceCandidateInterface> candidate(
iceCandidate.nativeCandidate);
if (candidate) {
candidates.push_back(candidate->candidate());
// Need to fill the transport name from the sdp_mid.
candidates.back().set_transport_name(candidate->sdp_mid());
}
}
if (!candidates.empty()) {
_peerConnection->RemoveIceCandidates(candidates);
}
}
- (void)addStream:(RTCMediaStream *)stream {
if (!_peerConnection->AddStream(stream.nativeMediaStream)) {
RTCLogError(@"Failed to add stream: %@", stream);
return;
}
[_localStreams addObject:stream];
}
- (void)removeStream:(RTCMediaStream *)stream {
_peerConnection->RemoveStream(stream.nativeMediaStream);
[_localStreams removeObject:stream];
}
- (void)offerForConstraints:(RTCMediaConstraints *)constraints
completionHandler:
(void (^)(RTCSessionDescription *sessionDescription,
NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
_peerConnection->CreateOffer(observer, constraints.nativeConstraints.get());
}
- (void)answerForConstraints:(RTCMediaConstraints *)constraints
completionHandler:
(void (^)(RTCSessionDescription *sessionDescription,
NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::CreateSessionDescriptionObserverAdapter>
observer(new rtc::RefCountedObject
<webrtc::CreateSessionDescriptionObserverAdapter>(completionHandler));
_peerConnection->CreateAnswer(observer, constraints.nativeConstraints.get());
}
- (void)setLocalDescription:(RTCSessionDescription *)sdp
completionHandler:(void (^)(NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
completionHandler));
_peerConnection->SetLocalDescription(observer, sdp.nativeDescription);
}
- (void)setRemoteDescription:(RTCSessionDescription *)sdp
completionHandler:(void (^)(NSError *error))completionHandler {
rtc::scoped_refptr<webrtc::SetSessionDescriptionObserverAdapter> observer(
new rtc::RefCountedObject<webrtc::SetSessionDescriptionObserverAdapter>(
completionHandler));
_peerConnection->SetRemoteDescription(observer, sdp.nativeDescription);
}
- (BOOL)startRtcEventLogWithFilePath:(NSString *)filePath
maxSizeInBytes:(int64_t)maxSizeInBytes {
RTC_DCHECK(filePath.length);
RTC_DCHECK_GT(maxSizeInBytes, 0);
RTC_DCHECK(!_hasStartedRtcEventLog);
if (_hasStartedRtcEventLog) {
RTCLogError(@"Event logging already started.");
return NO;
}
int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC,
S_IRUSR | S_IWUSR);
if (fd < 0) {
RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
return NO;
}
_hasStartedRtcEventLog =
_peerConnection->StartRtcEventLog(fd, maxSizeInBytes);
return _hasStartedRtcEventLog;
}
- (void)stopRtcEventLog {
_peerConnection->StopRtcEventLog();
_hasStartedRtcEventLog = NO;
}
- (RTCRtpSender *)senderWithKind:(NSString *)kind
streamId:(NSString *)streamId {
std::string nativeKind = [NSString stdStringForString:kind];
std::string nativeStreamId = [NSString stdStringForString:streamId];
rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeSender(
_peerConnection->CreateSender(nativeKind, nativeStreamId));
return nativeSender ?
[[RTCRtpSender alloc] initWithNativeRtpSender:nativeSender]
: nil;
}
- (NSArray<RTCRtpSender *> *)senders {
std::vector<rtc::scoped_refptr<webrtc::RtpSenderInterface>> nativeSenders(
_peerConnection->GetSenders());
NSMutableArray *senders = [[NSMutableArray alloc] init];
for (const auto &nativeSender : nativeSenders) {
RTCRtpSender *sender =
[[RTCRtpSender alloc] initWithNativeRtpSender:nativeSender];
[senders addObject:sender];
}
return senders;
}
- (NSArray<RTCRtpReceiver *> *)receivers {
std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> nativeReceivers(
_peerConnection->GetReceivers());
NSMutableArray *receivers = [[NSMutableArray alloc] init];
for (const auto &nativeReceiver : nativeReceivers) {
RTCRtpReceiver *receiver =
[[RTCRtpReceiver alloc] initWithNativeRtpReceiver:nativeReceiver];
[receivers addObject:receiver];
}
return receivers;
}
#pragma mark - Private
+ (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState:
(RTCSignalingState)state {
switch (state) {
case RTCSignalingStateStable:
return webrtc::PeerConnectionInterface::kStable;
case RTCSignalingStateHaveLocalOffer:
return webrtc::PeerConnectionInterface::kHaveLocalOffer;
case RTCSignalingStateHaveLocalPrAnswer:
return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer;
case RTCSignalingStateHaveRemoteOffer:
return webrtc::PeerConnectionInterface::kHaveRemoteOffer;
case RTCSignalingStateHaveRemotePrAnswer:
return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer;
case RTCSignalingStateClosed:
return webrtc::PeerConnectionInterface::kClosed;
}
}
+ (RTCSignalingState)signalingStateForNativeState:
(webrtc::PeerConnectionInterface::SignalingState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kStable:
return RTCSignalingStateStable;
case webrtc::PeerConnectionInterface::kHaveLocalOffer:
return RTCSignalingStateHaveLocalOffer;
case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer:
return RTCSignalingStateHaveLocalPrAnswer;
case webrtc::PeerConnectionInterface::kHaveRemoteOffer:
return RTCSignalingStateHaveRemoteOffer;
case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer:
return RTCSignalingStateHaveRemotePrAnswer;
case webrtc::PeerConnectionInterface::kClosed:
return RTCSignalingStateClosed;
}
}
+ (NSString *)stringForSignalingState:(RTCSignalingState)state {
switch (state) {
case RTCSignalingStateStable:
return @"STABLE";
case RTCSignalingStateHaveLocalOffer:
return @"HAVE_LOCAL_OFFER";
case RTCSignalingStateHaveLocalPrAnswer:
return @"HAVE_LOCAL_PRANSWER";
case RTCSignalingStateHaveRemoteOffer:
return @"HAVE_REMOTE_OFFER";
case RTCSignalingStateHaveRemotePrAnswer:
return @"HAVE_REMOTE_PRANSWER";
case RTCSignalingStateClosed:
return @"CLOSED";
}
}
+ (webrtc::PeerConnectionInterface::IceConnectionState)
nativeIceConnectionStateForState:(RTCIceConnectionState)state {
switch (state) {
case RTCIceConnectionStateNew:
return webrtc::PeerConnectionInterface::kIceConnectionNew;
case RTCIceConnectionStateChecking:
return webrtc::PeerConnectionInterface::kIceConnectionChecking;
case RTCIceConnectionStateConnected:
return webrtc::PeerConnectionInterface::kIceConnectionConnected;
case RTCIceConnectionStateCompleted:
return webrtc::PeerConnectionInterface::kIceConnectionCompleted;
case RTCIceConnectionStateFailed:
return webrtc::PeerConnectionInterface::kIceConnectionFailed;
case RTCIceConnectionStateDisconnected:
return webrtc::PeerConnectionInterface::kIceConnectionDisconnected;
case RTCIceConnectionStateClosed:
return webrtc::PeerConnectionInterface::kIceConnectionClosed;
case RTCIceConnectionStateCount:
return webrtc::PeerConnectionInterface::kIceConnectionMax;
}
}
+ (RTCIceConnectionState)iceConnectionStateForNativeState:
(webrtc::PeerConnectionInterface::IceConnectionState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kIceConnectionNew:
return RTCIceConnectionStateNew;
case webrtc::PeerConnectionInterface::kIceConnectionChecking:
return RTCIceConnectionStateChecking;
case webrtc::PeerConnectionInterface::kIceConnectionConnected:
return RTCIceConnectionStateConnected;
case webrtc::PeerConnectionInterface::kIceConnectionCompleted:
return RTCIceConnectionStateCompleted;
case webrtc::PeerConnectionInterface::kIceConnectionFailed:
return RTCIceConnectionStateFailed;
case webrtc::PeerConnectionInterface::kIceConnectionDisconnected:
return RTCIceConnectionStateDisconnected;
case webrtc::PeerConnectionInterface::kIceConnectionClosed:
return RTCIceConnectionStateClosed;
case webrtc::PeerConnectionInterface::kIceConnectionMax:
return RTCIceConnectionStateCount;
}
}
+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state {
switch (state) {
case RTCIceConnectionStateNew:
return @"NEW";
case RTCIceConnectionStateChecking:
return @"CHECKING";
case RTCIceConnectionStateConnected:
return @"CONNECTED";
case RTCIceConnectionStateCompleted:
return @"COMPLETED";
case RTCIceConnectionStateFailed:
return @"FAILED";
case RTCIceConnectionStateDisconnected:
return @"DISCONNECTED";
case RTCIceConnectionStateClosed:
return @"CLOSED";
case RTCIceConnectionStateCount:
return @"COUNT";
}
}
+ (webrtc::PeerConnectionInterface::IceGatheringState)
nativeIceGatheringStateForState:(RTCIceGatheringState)state {
switch (state) {
case RTCIceGatheringStateNew:
return webrtc::PeerConnectionInterface::kIceGatheringNew;
case RTCIceGatheringStateGathering:
return webrtc::PeerConnectionInterface::kIceGatheringGathering;
case RTCIceGatheringStateComplete:
return webrtc::PeerConnectionInterface::kIceGatheringComplete;
}
}
+ (RTCIceGatheringState)iceGatheringStateForNativeState:
(webrtc::PeerConnectionInterface::IceGatheringState)nativeState {
switch (nativeState) {
case webrtc::PeerConnectionInterface::kIceGatheringNew:
return RTCIceGatheringStateNew;
case webrtc::PeerConnectionInterface::kIceGatheringGathering:
return RTCIceGatheringStateGathering;
case webrtc::PeerConnectionInterface::kIceGatheringComplete:
return RTCIceGatheringStateComplete;
}
}
+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state {
switch (state) {
case RTCIceGatheringStateNew:
return @"NEW";
case RTCIceGatheringStateGathering:
return @"GATHERING";
case RTCIceGatheringStateComplete:
return @"COMPLETE";
}
}
+ (webrtc::PeerConnectionInterface::StatsOutputLevel)
nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level {
switch (level) {
case RTCStatsOutputLevelStandard:
return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard;
case RTCStatsOutputLevelDebug:
return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug;
}
}
- (rtc::scoped_refptr<webrtc::PeerConnectionInterface>)nativePeerConnection {
return _peerConnection;
}
@end

View File

@ -0,0 +1,30 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCPeerConnectionFactory.h"
#include "webrtc/api/peerconnectioninterface.h"
#include "webrtc/base/scoped_ref_ptr.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCPeerConnectionFactory ()
/**
* PeerConnectionFactoryInterface created and held by this
* RTCPeerConnectionFactory object. This is needed to pass to the underlying
* C++ APIs.
*/
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface> nativeFactory;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,144 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCPeerConnectionFactory+Private.h"
#import "NSString+StdString.h"
#import "RTCAudioSource+Private.h"
#import "RTCAudioTrack+Private.h"
#import "RTCMediaConstraints+Private.h"
#import "RTCMediaStream+Private.h"
#import "RTCPeerConnection+Private.h"
#import "RTCVideoSource+Private.h"
#import "RTCVideoTrack+Private.h"
#import "RTCAVFoundationVideoSource+Private.h"
#import "WebRTC/RTCLogging.h"
#include "Video/objcvideotracksource.h"
#include "VideoToolbox/videocodecfactory.h"
@implementation RTCPeerConnectionFactory {
std::unique_ptr<rtc::Thread> _networkThread;
std::unique_ptr<rtc::Thread> _workerThread;
std::unique_ptr<rtc::Thread> _signalingThread;
BOOL _hasStartedAecDump;
}
@synthesize nativeFactory = _nativeFactory;
- (instancetype)init {
if ((self = [super init])) {
_networkThread = rtc::Thread::CreateWithSocketServer();
BOOL result = _networkThread->Start();
NSAssert(result, @"Failed to start network thread.");
_workerThread = rtc::Thread::Create();
result = _workerThread->Start();
NSAssert(result, @"Failed to start worker thread.");
_signalingThread = rtc::Thread::Create();
result = _signalingThread->Start();
NSAssert(result, @"Failed to start signaling thread.");
const auto encoder_factory = new webrtc::VideoToolboxVideoEncoderFactory();
const auto decoder_factory = new webrtc::VideoToolboxVideoDecoderFactory();
// Ownership of encoder/decoder factories is passed on to the
// peerconnectionfactory, that handles deleting them.
_nativeFactory = webrtc::CreatePeerConnectionFactory(
_networkThread.get(), _workerThread.get(), _signalingThread.get(),
nullptr, encoder_factory, decoder_factory);
NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!");
}
return self;
}
- (RTCAudioSource *)audioSourceWithConstraints:(nullable RTCMediaConstraints *)constraints {
std::unique_ptr<webrtc::MediaConstraints> nativeConstraints;
if (constraints) {
nativeConstraints = constraints.nativeConstraints;
}
rtc::scoped_refptr<webrtc::AudioSourceInterface> source =
_nativeFactory->CreateAudioSource(nativeConstraints.get());
return [[RTCAudioSource alloc] initWithNativeAudioSource:source];
}
- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
RTCAudioSource *audioSource = [self audioSourceWithConstraints:nil];
return [self audioTrackWithSource:audioSource trackId:trackId];
}
- (RTCAudioTrack *)audioTrackWithSource:(RTCAudioSource *)source
trackId:(NSString *)trackId {
return [[RTCAudioTrack alloc] initWithFactory:self
source:source
trackId:trackId];
}
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
(nullable RTCMediaConstraints *)constraints {
return [[RTCAVFoundationVideoSource alloc] initWithFactory:self
constraints:constraints];
}
- (RTCVideoSource *)videoSource {
rtc::scoped_refptr<webrtc::ObjcVideoTrackSource> objcVideoTrackSource(
new rtc::RefCountedObject<webrtc::ObjcVideoTrackSource>());
return [[RTCVideoSource alloc] initWithNativeVideoSource:objcVideoTrackSource];
}
- (RTCVideoTrack *)videoTrackWithSource:(RTCVideoSource *)source
trackId:(NSString *)trackId {
return [[RTCVideoTrack alloc] initWithFactory:self
source:source
trackId:trackId];
}
- (RTCMediaStream *)mediaStreamWithStreamId:(NSString *)streamId {
return [[RTCMediaStream alloc] initWithFactory:self
streamId:streamId];
}
- (RTCPeerConnection *)peerConnectionWithConfiguration:
(RTCConfiguration *)configuration
constraints:
(RTCMediaConstraints *)constraints
delegate:
(nullable id<RTCPeerConnectionDelegate>)delegate {
return [[RTCPeerConnection alloc] initWithFactory:self
configuration:configuration
constraints:constraints
delegate:delegate];
}
- (BOOL)startAecDumpWithFilePath:(NSString *)filePath
maxSizeInBytes:(int64_t)maxSizeInBytes {
RTC_DCHECK(filePath.length);
RTC_DCHECK_GT(maxSizeInBytes, 0);
if (_hasStartedAecDump) {
RTCLogError(@"Aec dump already started.");
return NO;
}
int fd = open(filePath.UTF8String, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR);
if (fd < 0) {
RTCLogError(@"Error opening file: %@. Error: %d", filePath, errno);
return NO;
}
_hasStartedAecDump = _nativeFactory->StartAecDump(fd, maxSizeInBytes);
return _hasStartedAecDump;
}
- (void)stopAecDump {
_nativeFactory->StopAecDump();
_hasStartedAecDump = NO;
}
@end

View File

@ -0,0 +1,28 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpCodecParameters.h"
#include "webrtc/api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpCodecParameters ()
/** Returns the equivalent native RtpCodecParameters structure. */
@property(nonatomic, readonly) webrtc::RtpCodecParameters nativeParameters;
/** Initialize the object with a native RtpCodecParameters structure. */
- (instancetype)initWithNativeParameters:
(const webrtc::RtpCodecParameters &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,97 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpCodecParameters+Private.h"
#import "NSString+StdString.h"
#import "WebRTC/RTCMediaStreamTrack.h" // For "kind" strings.
#include "webrtc/base/checks.h"
#include "webrtc/media/base/mediaconstants.h"
const NSString * const kRTCRtxCodecName = @(cricket::kRtxCodecName);
const NSString * const kRTCRedCodecName = @(cricket::kRedCodecName);
const NSString * const kRTCUlpfecCodecName = @(cricket::kUlpfecCodecName);
const NSString * const kRTCFlexfecCodecName = @(cricket::kFlexfecCodecName);
const NSString * const kRTCOpusCodecName = @(cricket::kOpusCodecName);
const NSString * const kRTCIsacCodecName = @(cricket::kIsacCodecName);
const NSString * const kRTCL16CodecName = @(cricket::kL16CodecName);
const NSString * const kRTCG722CodecName = @(cricket::kG722CodecName);
const NSString * const kRTCIlbcCodecName = @(cricket::kIlbcCodecName);
const NSString * const kRTCPcmuCodecName = @(cricket::kPcmuCodecName);
const NSString * const kRTCPcmaCodecName = @(cricket::kPcmaCodecName);
const NSString * const kRTCDtmfCodecName = @(cricket::kDtmfCodecName);
const NSString * const kRTCComfortNoiseCodecName =
@(cricket::kComfortNoiseCodecName);
const NSString * const kVp8CodecName = @(cricket::kVp8CodecName);
const NSString * const kVp9CodecName = @(cricket::kVp9CodecName);
const NSString * const kH264CodecName = @(cricket::kH264CodecName);
@implementation RTCRtpCodecParameters
@synthesize payloadType = _payloadType;
@synthesize name = _name;
@synthesize kind = _kind;
@synthesize clockRate = _clockRate;
@synthesize numChannels = _numChannels;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:
(const webrtc::RtpCodecParameters &)nativeParameters {
if (self = [self init]) {
_payloadType = nativeParameters.payload_type;
_name = [NSString stringForStdString:nativeParameters.name];
switch (nativeParameters.kind) {
case cricket::MEDIA_TYPE_AUDIO:
_kind = kRTCMediaStreamTrackKindAudio;
break;
case cricket::MEDIA_TYPE_VIDEO:
_kind = kRTCMediaStreamTrackKindVideo;
break;
case cricket::MEDIA_TYPE_DATA:
RTC_NOTREACHED();
break;
}
if (nativeParameters.clock_rate) {
_clockRate = [NSNumber numberWithInt:*nativeParameters.clock_rate];
}
if (nativeParameters.num_channels) {
_numChannels = [NSNumber numberWithInt:*nativeParameters.num_channels];
}
}
return self;
}
- (webrtc::RtpCodecParameters)nativeParameters {
webrtc::RtpCodecParameters parameters;
parameters.payload_type = _payloadType;
parameters.name = [NSString stdStringForString:_name];
// NSString pointer comparison is safe here since "kind" is readonly and only
// populated above.
if (_kind == kRTCMediaStreamTrackKindAudio) {
parameters.kind = cricket::MEDIA_TYPE_AUDIO;
} else if (_kind == kRTCMediaStreamTrackKindVideo) {
parameters.kind = cricket::MEDIA_TYPE_VIDEO;
} else {
RTC_NOTREACHED();
}
if (_clockRate != nil) {
parameters.clock_rate = rtc::Optional<int>(_clockRate.intValue);
}
if (_numChannels != nil) {
parameters.num_channels = rtc::Optional<int>(_numChannels.intValue);
}
return parameters;
}
@end

View File

@ -0,0 +1,28 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpEncodingParameters.h"
#include "webrtc/api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpEncodingParameters ()
/** Returns the equivalent native RtpEncodingParameters structure. */
@property(nonatomic, readonly) webrtc::RtpEncodingParameters nativeParameters;
/** Initialize the object with a native RtpEncodingParameters structure. */
- (instancetype)initWithNativeParameters:
(const webrtc::RtpEncodingParameters &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,50 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpEncodingParameters+Private.h"
@implementation RTCRtpEncodingParameters
@synthesize isActive = _isActive;
@synthesize maxBitrateBps = _maxBitrateBps;
@synthesize ssrc = _ssrc;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:
(const webrtc::RtpEncodingParameters &)nativeParameters {
if (self = [self init]) {
_isActive = nativeParameters.active;
if (nativeParameters.max_bitrate_bps) {
_maxBitrateBps =
[NSNumber numberWithInt:*nativeParameters.max_bitrate_bps];
}
if (nativeParameters.ssrc) {
_ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc];
}
}
return self;
}
- (webrtc::RtpEncodingParameters)nativeParameters {
webrtc::RtpEncodingParameters parameters;
parameters.active = _isActive;
if (_maxBitrateBps != nil) {
parameters.max_bitrate_bps = rtc::Optional<int>(_maxBitrateBps.intValue);
}
if (_ssrc != nil) {
parameters.ssrc = rtc::Optional<uint32_t>(_ssrc.unsignedLongValue);
}
return parameters;
}
@end

View File

@ -0,0 +1,28 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpParameters.h"
#include "webrtc/api/rtpparameters.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpParameters ()
/** Returns the equivalent native RtpParameters structure. */
@property(nonatomic, readonly) webrtc::RtpParameters nativeParameters;
/** Initialize the object with a native RtpParameters structure. */
- (instancetype)initWithNativeParameters:
(const webrtc::RtpParameters &)nativeParameters;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,56 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpParameters+Private.h"
#import "RTCRtpCodecParameters+Private.h"
#import "RTCRtpEncodingParameters+Private.h"
@implementation RTCRtpParameters
@synthesize encodings = _encodings;
@synthesize codecs = _codecs;
- (instancetype)init {
return [super init];
}
- (instancetype)initWithNativeParameters:
(const webrtc::RtpParameters &)nativeParameters {
if (self = [self init]) {
NSMutableArray *encodings = [[NSMutableArray alloc] init];
for (const auto &encoding : nativeParameters.encodings) {
[encodings addObject:[[RTCRtpEncodingParameters alloc]
initWithNativeParameters:encoding]];
}
_encodings = encodings;
NSMutableArray *codecs = [[NSMutableArray alloc] init];
for (const auto &codec : nativeParameters.codecs) {
[codecs addObject:[[RTCRtpCodecParameters alloc]
initWithNativeParameters:codec]];
}
_codecs = codecs;
}
return self;
}
- (webrtc::RtpParameters)nativeParameters {
webrtc::RtpParameters parameters;
for (RTCRtpEncodingParameters *encoding in _encodings) {
parameters.encodings.push_back(encoding.nativeParameters);
}
for (RTCRtpCodecParameters *codec in _codecs) {
parameters.codecs.push_back(codec.nativeParameters);
}
return parameters;
}
@end

View File

@ -0,0 +1,45 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpReceiver.h"
#include "webrtc/api/rtpreceiverinterface.h"
NS_ASSUME_NONNULL_BEGIN
namespace webrtc {
class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface {
public:
RtpReceiverDelegateAdapter(RTCRtpReceiver* receiver);
void OnFirstPacketReceived(cricket::MediaType media_type) override;
private:
__weak RTCRtpReceiver* receiver_;
};
} // namespace webrtc
@interface RTCRtpReceiver ()
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::RtpReceiverInterface> nativeRtpReceiver;
/** Initialize an RTCRtpReceiver with a native RtpReceiverInterface. */
- (instancetype)initWithNativeRtpReceiver:
(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver
NS_DESIGNATED_INITIALIZER;
+ (RTCRtpMediaType)mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,123 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpReceiver+Private.h"
#import "NSString+StdString.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCRtpParameters+Private.h"
#import "WebRTC/RTCLogging.h"
#include "webrtc/api/mediastreaminterface.h"
namespace webrtc {
RtpReceiverDelegateAdapter::RtpReceiverDelegateAdapter(
RTCRtpReceiver *receiver) {
RTC_CHECK(receiver);
receiver_ = receiver;
}
void RtpReceiverDelegateAdapter::OnFirstPacketReceived(
cricket::MediaType media_type) {
RTCRtpMediaType packet_media_type =
[RTCRtpReceiver mediaTypeForNativeMediaType:media_type];
RTCRtpReceiver *receiver = receiver_;
[receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type];
}
} // namespace webrtc
@implementation RTCRtpReceiver {
rtc::scoped_refptr<webrtc::RtpReceiverInterface> _nativeRtpReceiver;
std::unique_ptr<webrtc::RtpReceiverDelegateAdapter> _observer;
}
@synthesize delegate = _delegate;
- (NSString *)receiverId {
return [NSString stringForStdString:_nativeRtpReceiver->id()];
}
- (RTCRtpParameters *)parameters {
return [[RTCRtpParameters alloc]
initWithNativeParameters:_nativeRtpReceiver->GetParameters()];
}
- (void)setParameters:(RTCRtpParameters *)parameters {
if (!_nativeRtpReceiver->SetParameters(parameters.nativeParameters)) {
RTCLogError(@"RTCRtpReceiver(%p): Failed to set parameters: %@", self,
parameters);
}
}
- (RTCMediaStreamTrack *)track {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
_nativeRtpReceiver->track());
if (nativeTrack) {
return [[RTCMediaStreamTrack alloc] initWithNativeTrack:nativeTrack];
}
return nil;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCRtpReceiver {\n receiverId: %@\n}",
self.receiverId];
}
- (BOOL)isEqual:(id)object {
if (self == object) {
return YES;
}
if (object == nil) {
return NO;
}
if (![object isMemberOfClass:[self class]]) {
return NO;
}
RTCRtpReceiver *receiver = (RTCRtpReceiver *)object;
return _nativeRtpReceiver == receiver.nativeRtpReceiver;
}
- (NSUInteger)hash {
return (NSUInteger)_nativeRtpReceiver.get();
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
return _nativeRtpReceiver;
}
- (instancetype)initWithNativeRtpReceiver:
(rtc::scoped_refptr<webrtc::RtpReceiverInterface>)nativeRtpReceiver {
if (self = [super init]) {
_nativeRtpReceiver = nativeRtpReceiver;
RTCLogInfo(
@"RTCRtpReceiver(%p): created receiver: %@", self, self.description);
_observer.reset(new webrtc::RtpReceiverDelegateAdapter(self));
_nativeRtpReceiver->SetObserver(_observer.get());
}
return self;
}
+ (RTCRtpMediaType)mediaTypeForNativeMediaType:
(cricket::MediaType)nativeMediaType {
switch (nativeMediaType) {
case cricket::MEDIA_TYPE_AUDIO:
return RTCRtpMediaTypeAudio;
case cricket::MEDIA_TYPE_VIDEO:
return RTCRtpMediaTypeVideo;
case cricket::MEDIA_TYPE_DATA:
return RTCRtpMediaTypeData;
}
}
@end

View File

@ -0,0 +1,29 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCRtpSender.h"
#include "webrtc/api/rtpsenderinterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCRtpSender ()
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::RtpSenderInterface> nativeRtpSender;
/** Initialize an RTCRtpSender with a native RtpSenderInterface. */
- (instancetype)initWithNativeRtpSender:
(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender
NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,94 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCRtpSender+Private.h"
#import "NSString+StdString.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCRtpParameters+Private.h"
#import "WebRTC/RTCLogging.h"
#include "webrtc/api/mediastreaminterface.h"
@implementation RTCRtpSender {
rtc::scoped_refptr<webrtc::RtpSenderInterface> _nativeRtpSender;
}
- (NSString *)senderId {
return [NSString stringForStdString:_nativeRtpSender->id()];
}
- (RTCRtpParameters *)parameters {
return [[RTCRtpParameters alloc]
initWithNativeParameters:_nativeRtpSender->GetParameters()];
}
- (void)setParameters:(RTCRtpParameters *)parameters {
if (!_nativeRtpSender->SetParameters(parameters.nativeParameters)) {
RTCLogError(@"RTCRtpSender(%p): Failed to set parameters: %@", self,
parameters);
}
}
- (RTCMediaStreamTrack *)track {
rtc::scoped_refptr<webrtc::MediaStreamTrackInterface> nativeTrack(
_nativeRtpSender->track());
if (nativeTrack) {
return [[RTCMediaStreamTrack alloc] initWithNativeTrack:nativeTrack];
}
return nil;
}
- (void)setTrack:(RTCMediaStreamTrack *)track {
if (!_nativeRtpSender->SetTrack(track.nativeTrack)) {
RTCLogError(@"RTCRtpSender(%p): Failed to set track %@", self, track);
}
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCRtpSender {\n senderId: %@\n}",
self.senderId];
}
- (BOOL)isEqual:(id)object {
if (self == object) {
return YES;
}
if (object == nil) {
return NO;
}
if (![object isMemberOfClass:[self class]]) {
return NO;
}
RTCRtpSender *sender = (RTCRtpSender *)object;
return _nativeRtpSender == sender.nativeRtpSender;
}
- (NSUInteger)hash {
return (NSUInteger)_nativeRtpSender.get();
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
return _nativeRtpSender;
}
- (instancetype)initWithNativeRtpSender:
(rtc::scoped_refptr<webrtc::RtpSenderInterface>)nativeRtpSender {
NSParameterAssert(nativeRtpSender);
if (self = [super init]) {
_nativeRtpSender = nativeRtpSender;
RTCLogInfo(@"RTCRtpSender(%p): created sender: %@", self, self.description);
}
return self;
}
@end

View File

@ -0,0 +1,26 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCSSLAdapter.h"
#include "webrtc/base/checks.h"
#include "webrtc/base/ssladapter.h"
BOOL RTCInitializeSSL() {
BOOL initialized = rtc::InitializeSSL();
RTC_DCHECK(initialized);
return initialized;
}
BOOL RTCCleanupSSL() {
BOOL cleanedUp = rtc::CleanupSSL();
RTC_DCHECK(cleanedUp);
return cleanedUp;
}

View File

@ -0,0 +1,41 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCSessionDescription.h"
#include "webrtc/api/jsep.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCSessionDescription ()
/**
* The native SessionDescriptionInterface representation of this
* RTCSessionDescription object. This is needed to pass to the underlying C++
* APIs.
*/
@property(nonatomic, readonly)
webrtc::SessionDescriptionInterface *nativeDescription;
/**
* Initialize an RTCSessionDescription from a native
* SessionDescriptionInterface. No ownership is taken of the native session
* description.
*/
- (instancetype)initWithNativeDescription:
(const webrtc::SessionDescriptionInterface *)nativeDescription;
+ (std::string)stdStringForType:(RTCSdpType)type;
+ (RTCSdpType)typeForStdString:(const std::string &)string;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,102 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCSessionDescription+Private.h"
#import "NSString+StdString.h"
#import "WebRTC/RTCLogging.h"
#include "webrtc/base/checks.h"
@implementation RTCSessionDescription
@synthesize type = _type;
@synthesize sdp = _sdp;
+ (NSString *)stringForType:(RTCSdpType)type {
std::string string = [[self class] stdStringForType:type];
return [NSString stringForStdString:string];
}
+ (RTCSdpType)typeForString:(NSString *)string {
std::string typeString = string.stdString;
return [[self class] typeForStdString:typeString];
}
- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp {
NSParameterAssert(sdp.length);
if (self = [super init]) {
_type = type;
_sdp = [sdp copy];
}
return self;
}
- (NSString *)description {
return [NSString stringWithFormat:@"RTCSessionDescription:\n%@\n%@",
[[self class] stringForType:_type],
_sdp];
}
#pragma mark - Private
- (webrtc::SessionDescriptionInterface *)nativeDescription {
webrtc::SdpParseError error;
webrtc::SessionDescriptionInterface *description =
webrtc::CreateSessionDescription([[self class] stdStringForType:_type],
_sdp.stdString,
&error);
if (!description) {
RTCLogError(@"Failed to create session description: %s\nline: %s",
error.description.c_str(),
error.line.c_str());
}
return description;
}
- (instancetype)initWithNativeDescription:
(const webrtc::SessionDescriptionInterface *)nativeDescription {
NSParameterAssert(nativeDescription);
std::string sdp;
nativeDescription->ToString(&sdp);
RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()];
return [self initWithType:type
sdp:[NSString stringForStdString:sdp]];
}
+ (std::string)stdStringForType:(RTCSdpType)type {
switch (type) {
case RTCSdpTypeOffer:
return webrtc::SessionDescriptionInterface::kOffer;
case RTCSdpTypePrAnswer:
return webrtc::SessionDescriptionInterface::kPrAnswer;
case RTCSdpTypeAnswer:
return webrtc::SessionDescriptionInterface::kAnswer;
}
}
+ (RTCSdpType)typeForStdString:(const std::string &)string {
if (string == webrtc::SessionDescriptionInterface::kOffer) {
return RTCSdpTypeOffer;
} else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) {
return RTCSdpTypePrAnswer;
} else if (string == webrtc::SessionDescriptionInterface::kAnswer) {
return RTCSdpTypeAnswer;
} else {
RTC_NOTREACHED();
return RTCSdpTypeOffer;
}
}
@end

View File

@ -0,0 +1,29 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCTracing.h"
#include "webrtc/base/event_tracer.h"
void RTCSetupInternalTracer() {
rtc::tracing::SetupInternalTracer();
}
BOOL RTCStartInternalCapture(NSString *filePath) {
return rtc::tracing::StartInternalCapture(filePath.UTF8String);
}
void RTCStopInternalCapture() {
rtc::tracing::StopInternalCapture();
}
void RTCShutdownInternalTracer() {
rtc::tracing::ShutdownInternalTracer();
}

View File

@ -0,0 +1,25 @@
/*
* Copyright 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoCapturer.h"
@implementation RTCVideoCapturer
@synthesize delegate = _delegate;
- (instancetype)initWithDelegate:(id<RTCVideoCapturerDelegate>)delegate {
NSAssert(delegate != nil, @"delegate cannot be nil");
if (self = [super init]) {
_delegate = delegate;
}
return self;
}
@end

View File

@ -0,0 +1,29 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoFrame.h"
#include "webrtc/api/video/video_frame_buffer.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoFrame ()
@property(nonatomic, readonly) rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer;
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs
NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,120 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoFrame+Private.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/corevideo_frame_buffer.h"
@implementation RTCVideoFrame {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> _videoBuffer;
RTCVideoRotation _rotation;
int64_t _timeStampNs;
}
- (int)width {
return _videoBuffer->width();
}
- (int)height {
return _videoBuffer->height();
}
- (RTCVideoRotation)rotation {
return _rotation;
}
- (const uint8_t *)dataY {
return _videoBuffer->DataY();
}
- (const uint8_t *)dataU {
return _videoBuffer->DataU();
}
- (const uint8_t *)dataV {
return _videoBuffer->DataV();
}
- (int)strideY {
return _videoBuffer->StrideY();
}
- (int)strideU {
return _videoBuffer->StrideU();
}
- (int)strideV {
return _videoBuffer->StrideV();
}
- (int64_t)timeStampNs {
return _timeStampNs;
}
- (CVPixelBufferRef)nativeHandle {
return static_cast<CVPixelBufferRef>(_videoBuffer->native_handle());
}
- (RTCVideoFrame *)newI420VideoFrame {
return [[RTCVideoFrame alloc]
initWithVideoBuffer:_videoBuffer->NativeToI420Buffer()
rotation:_rotation
timeStampNs:_timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(pixelBuffer));
return [self initWithVideoBuffer:videoBuffer
rotation:rotation
timeStampNs:timeStampNs];
}
- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer
scaledWidth:(int)scaledWidth
scaledHeight:(int)scaledHeight
cropWidth:(int)cropWidth
cropHeight:(int)cropHeight
cropX:(int)cropX
cropY:(int)cropY
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
rtc::scoped_refptr<webrtc::VideoFrameBuffer> videoBuffer(
new rtc::RefCountedObject<webrtc::CoreVideoFrameBuffer>(
pixelBuffer,
scaledWidth, scaledHeight,
cropWidth, cropHeight,
cropX, cropY));
return [self initWithVideoBuffer:videoBuffer
rotation:rotation
timeStampNs:timeStampNs];
}
#pragma mark - Private
- (instancetype)initWithVideoBuffer:
(rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer
rotation:(RTCVideoRotation)rotation
timeStampNs:(int64_t)timeStampNs {
if (self = [super init]) {
_videoBuffer = videoBuffer;
_rotation = rotation;
_timeStampNs = timeStampNs;
}
return self;
}
- (rtc::scoped_refptr<webrtc::VideoFrameBuffer>)videoBuffer {
return _videoBuffer;
}
@end

View File

@ -0,0 +1,42 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoRendererAdapter.h"
#import "WebRTC/RTCVideoRenderer.h"
#include "webrtc/api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoRendererAdapter ()
/**
* The Objective-C video renderer passed to this adapter during construction.
* Calls made to the webrtc::VideoRenderInterface will be adapted and passed to
* this video renderer.
*/
@property(nonatomic, readonly) id<RTCVideoRenderer> videoRenderer;
/**
* The native VideoSinkInterface surface exposed by this adapter. Calls made
* to this interface will be adapted and passed to the RTCVideoRenderer supplied
* during construction. This pointer is unsafe and owned by this class.
*/
@property(nonatomic, readonly)
rtc::VideoSinkInterface<webrtc::VideoFrame> *nativeVideoRenderer;
/** Initialize an RTCVideoRendererAdapter with an RTCVideoRenderer. */
- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer
NS_DESIGNATED_INITIALIZER;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,27 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/*
* Creates a rtc::VideoSinkInterface surface for an RTCVideoRenderer. The
* rtc::VideoSinkInterface is used by WebRTC rendering code - this
* adapter adapts calls made to that interface to the RTCVideoRenderer supplied
* during construction.
*/
@interface RTCVideoRendererAdapter : NSObject
- (instancetype)init NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,70 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoRendererAdapter+Private.h"
#import "RTCVideoFrame+Private.h"
#include <memory>
namespace webrtc {
class VideoRendererAdapter
: public rtc::VideoSinkInterface<webrtc::VideoFrame> {
public:
VideoRendererAdapter(RTCVideoRendererAdapter* adapter) {
adapter_ = adapter;
size_ = CGSizeZero;
}
void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override {
RTCVideoFrame* videoFrame = [[RTCVideoFrame alloc]
initWithVideoBuffer:nativeVideoFrame.video_frame_buffer()
rotation:static_cast<RTCVideoRotation>(
nativeVideoFrame.rotation())
timeStampNs:nativeVideoFrame.timestamp_us() *
rtc::kNumNanosecsPerMicrosec];
CGSize current_size = (videoFrame.rotation % 180 == 0)
? CGSizeMake(videoFrame.width, videoFrame.height)
: CGSizeMake(videoFrame.height, videoFrame.width);
if (!CGSizeEqualToSize(size_, current_size)) {
size_ = current_size;
[adapter_.videoRenderer setSize:size_];
}
[adapter_.videoRenderer renderFrame:videoFrame];
}
private:
__weak RTCVideoRendererAdapter *adapter_;
CGSize size_;
};
}
@implementation RTCVideoRendererAdapter {
std::unique_ptr<webrtc::VideoRendererAdapter> _adapter;
}
@synthesize videoRenderer = _videoRenderer;
- (instancetype)initWithNativeRenderer:(id<RTCVideoRenderer>)videoRenderer {
NSParameterAssert(videoRenderer);
if (self = [super init]) {
_videoRenderer = videoRenderer;
_adapter.reset(new webrtc::VideoRendererAdapter(self));
}
return self;
}
- (rtc::VideoSinkInterface<webrtc::VideoFrame> *)nativeVideoRenderer {
return _adapter.get();
}
@end

View File

@ -0,0 +1,40 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoSource.h"
#import "RTCMediaSource+Private.h"
#include "webrtc/api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoSource ()
/**
* The VideoTrackSourceInterface object passed to this RTCVideoSource during
* construction.
*/
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>
nativeVideoSource;
/** Initialize an RTCVideoSource from a native VideoTrackSourceInterface. */
- (instancetype)initWithNativeVideoSource:
(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource
NS_DESIGNATED_INITIALIZER;
- (instancetype)initWithNativeMediaSource:
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,60 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoSource+Private.h"
#include "webrtc/base/checks.h"
#include "webrtc/sdk/objc/Framework/Classes/Video/objcvideotracksource.h"
// TODO(magjed): Refactor this class and target ObjcVideoTrackSource only once
// RTCAVFoundationVideoSource is gone. See http://crbug/webrtc/7177 for more
// info.
@implementation RTCVideoSource {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> _nativeVideoSource;
}
- (instancetype)initWithNativeVideoSource:
(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
RTC_DCHECK(nativeVideoSource);
if (self = [super initWithNativeMediaSource:nativeVideoSource
type:RTCMediaSourceTypeVideo]) {
_nativeVideoSource = nativeVideoSource;
}
return self;
}
- (instancetype)initWithNativeMediaSource:
(rtc::scoped_refptr<webrtc::MediaSourceInterface>)nativeMediaSource
type:(RTCMediaSourceType)type {
RTC_NOTREACHED();
return nil;
}
- (NSString *)description {
NSString *stateString = [[self class] stringForState:self.state];
return [NSString stringWithFormat:@"RTCVideoSource( %p ): %@", self, stateString];
}
- (void)capturer:(RTCVideoCapturer *)capturer didCaptureVideoFrame:(RTCVideoFrame *)frame {
static_cast<webrtc::ObjcVideoTrackSource *>(_nativeVideoSource.get())->OnCapturedFrame(frame);
}
- (void)adaptOutputFormatToWidth:(int)width height:(int)height fps:(int)fps {
static_cast<webrtc::ObjcVideoTrackSource *>(_nativeVideoSource.get())
->OnOutputFormatRequest(width, height, fps);
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::VideoTrackSourceInterface>)nativeVideoSource {
return _nativeVideoSource;
}
@end

View File

@ -0,0 +1,30 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCVideoTrack.h"
#include "webrtc/api/mediastreaminterface.h"
NS_ASSUME_NONNULL_BEGIN
@interface RTCVideoTrack ()
/** VideoTrackInterface created or passed in at construction. */
@property(nonatomic, readonly)
rtc::scoped_refptr<webrtc::VideoTrackInterface> nativeVideoTrack;
/** Initialize an RTCVideoTrack with its source and an id. */
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
source:(RTCVideoSource *)source
trackId:(NSString *)trackId;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,110 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCVideoTrack+Private.h"
#import "NSString+StdString.h"
#import "RTCMediaStreamTrack+Private.h"
#import "RTCPeerConnectionFactory+Private.h"
#import "RTCVideoRendererAdapter+Private.h"
#import "RTCVideoSource+Private.h"
@implementation RTCVideoTrack {
NSMutableArray *_adapters;
}
@synthesize source = _source;
- (instancetype)initWithFactory:(RTCPeerConnectionFactory *)factory
source:(RTCVideoSource *)source
trackId:(NSString *)trackId {
NSParameterAssert(factory);
NSParameterAssert(source);
NSParameterAssert(trackId.length);
std::string nativeId = [NSString stdStringForString:trackId];
rtc::scoped_refptr<webrtc::VideoTrackInterface> track =
factory.nativeFactory->CreateVideoTrack(nativeId,
source.nativeVideoSource);
if ([self initWithNativeTrack:track type:RTCMediaStreamTrackTypeVideo]) {
_source = source;
}
return self;
}
- (instancetype)initWithNativeTrack:
(rtc::scoped_refptr<webrtc::MediaStreamTrackInterface>)nativeMediaTrack
type:(RTCMediaStreamTrackType)type {
NSParameterAssert(nativeMediaTrack);
NSParameterAssert(type == RTCMediaStreamTrackTypeVideo);
if (self = [super initWithNativeTrack:nativeMediaTrack type:type]) {
_adapters = [NSMutableArray array];
}
return self;
}
- (void)dealloc {
for (RTCVideoRendererAdapter *adapter in _adapters) {
self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer);
}
}
- (RTCVideoSource *)source {
if (!_source) {
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source =
self.nativeVideoTrack->GetSource();
if (source) {
_source = [[RTCVideoSource alloc] initWithNativeVideoSource:source.get()];
}
}
return _source;
}
- (void)addRenderer:(id<RTCVideoRenderer>)renderer {
// Make sure we don't have this renderer yet.
for (RTCVideoRendererAdapter *adapter in _adapters) {
if (adapter.videoRenderer == renderer) {
NSAssert(NO, @"|renderer| is already attached to this track");
return;
}
}
// Create a wrapper that provides a native pointer for us.
RTCVideoRendererAdapter* adapter =
[[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer];
[_adapters addObject:adapter];
self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer,
rtc::VideoSinkWants());
}
- (void)removeRenderer:(id<RTCVideoRenderer>)renderer {
__block NSUInteger indexToRemove = NSNotFound;
[_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter,
NSUInteger idx,
BOOL *stop) {
if (adapter.videoRenderer == renderer) {
indexToRemove = idx;
*stop = YES;
}
}];
if (indexToRemove == NSNotFound) {
return;
}
RTCVideoRendererAdapter *adapterToRemove =
[_adapters objectAtIndex:indexToRemove];
self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer);
[_adapters removeObjectAtIndex:indexToRemove];
}
#pragma mark - Private
- (rtc::scoped_refptr<webrtc::VideoTrackInterface>)nativeVideoTrack {
return static_cast<webrtc::VideoTrackInterface *>(self.nativeTrack.get());
}
@end