Moving src/webrtc into src/.

In order to eliminate the WebRTC Subtree mirror in Chromium, 
WebRTC is moving the content of the src/webrtc directory up
to the src/ directory.

NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
TBR=tommi@webrtc.org

Bug: chromium:611808
Change-Id: Iac59c5b51b950f174119565bac87955a7994bc38
Reviewed-on: https://webrtc-review.googlesource.com/1560
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Henrik Kjellander <kjellander@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#19845}
This commit is contained in:
Mirko Bonadei
2017-09-15 06:15:48 +02:00
committed by Commit Bot
parent 6674846b4a
commit bb547203bf
4576 changed files with 1092 additions and 1196 deletions

View File

@ -0,0 +1,54 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDAppClient.h"
#import "WebRTC/RTCPeerConnection.h"
#import "ARDRoomServerClient.h"
#import "ARDSignalingChannel.h"
#import "ARDTURNClient.h"
@class RTCPeerConnectionFactory;
@interface ARDAppClient () <ARDSignalingChannelDelegate,
RTCPeerConnectionDelegate>
// All properties should only be mutated from the main queue.
@property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient;
@property(nonatomic, strong) id<ARDSignalingChannel> channel;
@property(nonatomic, strong) id<ARDSignalingChannel> loopbackChannel;
@property(nonatomic, strong) id<ARDTURNClient> turnClient;
@property(nonatomic, strong) RTCPeerConnection *peerConnection;
@property(nonatomic, strong) RTCPeerConnectionFactory *factory;
@property(nonatomic, strong) NSMutableArray *messageQueue;
@property(nonatomic, assign) BOOL isTurnComplete;
@property(nonatomic, assign) BOOL hasReceivedSdp;
@property(nonatomic, readonly) BOOL hasJoinedRoomServerRoom;
@property(nonatomic, strong) NSString *roomId;
@property(nonatomic, strong) NSString *clientId;
@property(nonatomic, assign) BOOL isInitiator;
@property(nonatomic, strong) NSMutableArray *iceServers;
@property(nonatomic, strong) NSURL *webSocketURL;
@property(nonatomic, strong) NSURL *webSocketRestURL;
@property(nonatomic, readonly) BOOL isLoopback;
@property(nonatomic, strong)
RTCMediaConstraints *defaultPeerConnectionConstraints;
- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
signalingChannel:(id<ARDSignalingChannel>)channel
turnClient:(id<ARDTURNClient>)turnClient
delegate:(id<ARDAppClientDelegate>)delegate;
@end

View File

@ -0,0 +1,80 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCPeerConnection.h"
#import "WebRTC/RTCVideoTrack.h"
typedef NS_ENUM(NSInteger, ARDAppClientState) {
// Disconnected from servers.
kARDAppClientStateDisconnected,
// Connecting to servers.
kARDAppClientStateConnecting,
// Connected to servers.
kARDAppClientStateConnected,
};
@class ARDAppClient;
@class ARDSettingsModel;
@class RTCMediaConstraints;
// The delegate is informed of pertinent events and will be called on the
// main queue.
@protocol ARDAppClientDelegate <NSObject>
- (void)appClient:(ARDAppClient *)client
didChangeState:(ARDAppClientState)state;
- (void)appClient:(ARDAppClient *)client
didChangeConnectionState:(RTCIceConnectionState)state;
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer;
- (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack;
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack;
- (void)appClient:(ARDAppClient *)client
didError:(NSError *)error;
- (void)appClient:(ARDAppClient *)client
didGetStats:(NSArray *)stats;
@end
// Handles connections to the AppRTC server for a given room. Methods on this
// class should only be called from the main queue.
@interface ARDAppClient : NSObject
// If |shouldGetStats| is true, stats will be reported in 1s intervals through
// the delegate.
@property(nonatomic, assign) BOOL shouldGetStats;
@property(nonatomic, readonly) ARDAppClientState state;
@property(nonatomic, weak) id<ARDAppClientDelegate> delegate;
// Convenience constructor since all expected use cases will need a delegate
// in order to receive remote tracks.
- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate;
// Establishes a connection with the AppRTC servers for the given room id.
// |settings| is an object containing settings such as video codec for the call.
// If |isLoopback| is true, the call will connect to itself.
- (void)connectToRoomWithId:(NSString *)roomId
settings:(ARDSettingsModel *)settings
isLoopback:(BOOL)isLoopback;
// Disconnects from the AppRTC servers and any connected clients.
- (void)disconnect;
@end

View File

@ -0,0 +1,849 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDAppClient+Internal.h"
#import "WebRTC/RTCAVFoundationVideoSource.h"
#import "WebRTC/RTCAudioTrack.h"
#import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCConfiguration.h"
#import "WebRTC/RTCFileLogger.h"
#import "WebRTC/RTCIceServer.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCMediaConstraints.h"
#import "WebRTC/RTCMediaStream.h"
#import "WebRTC/RTCPeerConnectionFactory.h"
#import "WebRTC/RTCRtpSender.h"
#import "WebRTC/RTCTracing.h"
#import "WebRTC/RTCVideoTrack.h"
#import "ARDAppEngineClient.h"
#import "ARDJoinResponse.h"
#import "ARDMessageResponse.h"
#import "ARDSDPUtils.h"
#import "ARDSettingsModel.h"
#import "ARDSignalingMessage.h"
#import "ARDTURNClient+Internal.h"
#import "ARDUtilities.h"
#import "ARDWebSocketChannel.h"
#import "RTCIceCandidate+JSON.h"
#import "RTCSessionDescription+JSON.h"
static NSString * const kARDIceServerRequestUrl = @"https://appr.tc/params";
static NSString * const kARDAppClientErrorDomain = @"ARDAppClient";
static NSInteger const kARDAppClientErrorUnknown = -1;
static NSInteger const kARDAppClientErrorRoomFull = -2;
static NSInteger const kARDAppClientErrorCreateSDP = -3;
static NSInteger const kARDAppClientErrorSetSDP = -4;
static NSInteger const kARDAppClientErrorInvalidClient = -5;
static NSInteger const kARDAppClientErrorInvalidRoom = -6;
static NSString * const kARDMediaStreamId = @"ARDAMS";
static NSString * const kARDAudioTrackId = @"ARDAMSa0";
static NSString * const kARDVideoTrackId = @"ARDAMSv0";
static NSString * const kARDVideoTrackKind = @"video";
// TODO(tkchin): Add these as UI options.
static BOOL const kARDAppClientEnableTracing = NO;
static BOOL const kARDAppClientEnableRtcEventLog = YES;
static int64_t const kARDAppClientAecDumpMaxSizeInBytes = 5e6; // 5 MB.
static int64_t const kARDAppClientRtcEventLogMaxSizeInBytes = 5e6; // 5 MB.
static int const kKbpsMultiplier = 1000;
// We need a proxy to NSTimer because it causes a strong retain cycle. When
// using the proxy, |invalidate| must be called before it properly deallocs.
@interface ARDTimerProxy : NSObject
- (instancetype)initWithInterval:(NSTimeInterval)interval
repeats:(BOOL)repeats
timerHandler:(void (^)(void))timerHandler;
- (void)invalidate;
@end
@implementation ARDTimerProxy {
NSTimer *_timer;
void (^_timerHandler)(void);
}
- (instancetype)initWithInterval:(NSTimeInterval)interval
repeats:(BOOL)repeats
timerHandler:(void (^)(void))timerHandler {
NSParameterAssert(timerHandler);
if (self = [super init]) {
_timerHandler = timerHandler;
_timer = [NSTimer scheduledTimerWithTimeInterval:interval
target:self
selector:@selector(timerDidFire:)
userInfo:nil
repeats:repeats];
}
return self;
}
- (void)invalidate {
[_timer invalidate];
}
- (void)timerDidFire:(NSTimer *)timer {
_timerHandler();
}
@end
@implementation ARDAppClient {
RTCFileLogger *_fileLogger;
ARDTimerProxy *_statsTimer;
ARDSettingsModel *_settings;
RTCVideoTrack *_localVideoTrack;
}
@synthesize shouldGetStats = _shouldGetStats;
@synthesize state = _state;
@synthesize delegate = _delegate;
@synthesize roomServerClient = _roomServerClient;
@synthesize channel = _channel;
@synthesize loopbackChannel = _loopbackChannel;
@synthesize turnClient = _turnClient;
@synthesize peerConnection = _peerConnection;
@synthesize factory = _factory;
@synthesize messageQueue = _messageQueue;
@synthesize isTurnComplete = _isTurnComplete;
@synthesize hasReceivedSdp = _hasReceivedSdp;
@synthesize roomId = _roomId;
@synthesize clientId = _clientId;
@synthesize isInitiator = _isInitiator;
@synthesize iceServers = _iceServers;
@synthesize webSocketURL = _websocketURL;
@synthesize webSocketRestURL = _websocketRestURL;
@synthesize defaultPeerConnectionConstraints =
_defaultPeerConnectionConstraints;
@synthesize isLoopback = _isLoopback;
- (instancetype)init {
return [self initWithDelegate:nil];
}
- (instancetype)initWithDelegate:(id<ARDAppClientDelegate>)delegate {
if (self = [super init]) {
_roomServerClient = [[ARDAppEngineClient alloc] init];
_delegate = delegate;
NSURL *turnRequestURL = [NSURL URLWithString:kARDIceServerRequestUrl];
_turnClient = [[ARDTURNClient alloc] initWithURL:turnRequestURL];
[self configure];
}
return self;
}
// TODO(tkchin): Provide signaling channel factory interface so we can recreate
// channel if we need to on network failure. Also, make this the default public
// constructor.
- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
signalingChannel:(id<ARDSignalingChannel>)channel
turnClient:(id<ARDTURNClient>)turnClient
delegate:(id<ARDAppClientDelegate>)delegate {
NSParameterAssert(rsClient);
NSParameterAssert(channel);
NSParameterAssert(turnClient);
if (self = [super init]) {
_roomServerClient = rsClient;
_channel = channel;
_turnClient = turnClient;
_delegate = delegate;
[self configure];
}
return self;
}
- (void)configure {
_factory = [[RTCPeerConnectionFactory alloc] init];
_messageQueue = [NSMutableArray array];
_iceServers = [NSMutableArray array];
_fileLogger = [[RTCFileLogger alloc] init];
[_fileLogger start];
}
- (void)dealloc {
self.shouldGetStats = NO;
[self disconnect];
}
- (void)setShouldGetStats:(BOOL)shouldGetStats {
if (_shouldGetStats == shouldGetStats) {
return;
}
if (shouldGetStats) {
__weak ARDAppClient *weakSelf = self;
_statsTimer = [[ARDTimerProxy alloc] initWithInterval:1
repeats:YES
timerHandler:^{
ARDAppClient *strongSelf = weakSelf;
[strongSelf.peerConnection statsForTrack:nil
statsOutputLevel:RTCStatsOutputLevelDebug
completionHandler:^(NSArray *stats) {
dispatch_async(dispatch_get_main_queue(), ^{
ARDAppClient *strongSelf = weakSelf;
[strongSelf.delegate appClient:strongSelf didGetStats:stats];
});
}];
}];
} else {
[_statsTimer invalidate];
_statsTimer = nil;
}
_shouldGetStats = shouldGetStats;
}
- (void)setState:(ARDAppClientState)state {
if (_state == state) {
return;
}
_state = state;
[_delegate appClient:self didChangeState:_state];
}
- (void)connectToRoomWithId:(NSString *)roomId
settings:(ARDSettingsModel *)settings
isLoopback:(BOOL)isLoopback {
NSParameterAssert(roomId.length);
NSParameterAssert(_state == kARDAppClientStateDisconnected);
_settings = settings;
_isLoopback = isLoopback;
self.state = kARDAppClientStateConnecting;
#if defined(WEBRTC_IOS)
if (kARDAppClientEnableTracing) {
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-trace.txt"];
RTCStartInternalCapture(filePath);
}
#endif
// Request TURN.
__weak ARDAppClient *weakSelf = self;
[_turnClient requestServersWithCompletionHandler:^(NSArray *turnServers,
NSError *error) {
if (error) {
RTCLogError("Error retrieving TURN servers: %@",
error.localizedDescription);
}
ARDAppClient *strongSelf = weakSelf;
[strongSelf.iceServers addObjectsFromArray:turnServers];
strongSelf.isTurnComplete = YES;
[strongSelf startSignalingIfReady];
}];
// Join room on room server.
[_roomServerClient joinRoomWithRoomId:roomId
isLoopback:isLoopback
completionHandler:^(ARDJoinResponse *response, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *joinError =
[[strongSelf class] errorForJoinResultType:response.result];
if (joinError) {
RTCLogError(@"Failed to join room:%@ on room server.", roomId);
[strongSelf disconnect];
[strongSelf.delegate appClient:strongSelf didError:joinError];
return;
}
RTCLog(@"Joined room:%@ on room server.", roomId);
strongSelf.roomId = response.roomId;
strongSelf.clientId = response.clientId;
strongSelf.isInitiator = response.isInitiator;
for (ARDSignalingMessage *message in response.messages) {
if (message.type == kARDSignalingMessageTypeOffer ||
message.type == kARDSignalingMessageTypeAnswer) {
strongSelf.hasReceivedSdp = YES;
[strongSelf.messageQueue insertObject:message atIndex:0];
} else {
[strongSelf.messageQueue addObject:message];
}
}
strongSelf.webSocketURL = response.webSocketURL;
strongSelf.webSocketRestURL = response.webSocketRestURL;
[strongSelf registerWithColliderIfReady];
[strongSelf startSignalingIfReady];
}];
}
- (void)disconnect {
if (_state == kARDAppClientStateDisconnected) {
return;
}
if (self.hasJoinedRoomServerRoom) {
[_roomServerClient leaveRoomWithRoomId:_roomId
clientId:_clientId
completionHandler:nil];
}
if (_channel) {
if (_channel.state == kARDSignalingChannelStateRegistered) {
// Tell the other client we're hanging up.
ARDByeMessage *byeMessage = [[ARDByeMessage alloc] init];
[_channel sendMessage:byeMessage];
}
// Disconnect from collider.
_channel = nil;
}
_clientId = nil;
_roomId = nil;
_isInitiator = NO;
_hasReceivedSdp = NO;
_messageQueue = [NSMutableArray array];
_localVideoTrack = nil;
#if defined(WEBRTC_IOS)
[_factory stopAecDump];
[_peerConnection stopRtcEventLog];
#endif
[_peerConnection close];
_peerConnection = nil;
self.state = kARDAppClientStateDisconnected;
#if defined(WEBRTC_IOS)
if (kARDAppClientEnableTracing) {
RTCStopInternalCapture();
}
#endif
}
#pragma mark - ARDSignalingChannelDelegate
- (void)channel:(id<ARDSignalingChannel>)channel
didReceiveMessage:(ARDSignalingMessage *)message {
switch (message.type) {
case kARDSignalingMessageTypeOffer:
case kARDSignalingMessageTypeAnswer:
// Offers and answers must be processed before any other message, so we
// place them at the front of the queue.
_hasReceivedSdp = YES;
[_messageQueue insertObject:message atIndex:0];
break;
case kARDSignalingMessageTypeCandidate:
case kARDSignalingMessageTypeCandidateRemoval:
[_messageQueue addObject:message];
break;
case kARDSignalingMessageTypeBye:
// Disconnects can be processed immediately.
[self processSignalingMessage:message];
return;
}
[self drainMessageQueueIfReady];
}
- (void)channel:(id<ARDSignalingChannel>)channel
didChangeState:(ARDSignalingChannelState)state {
switch (state) {
case kARDSignalingChannelStateOpen:
break;
case kARDSignalingChannelStateRegistered:
break;
case kARDSignalingChannelStateClosed:
case kARDSignalingChannelStateError:
// TODO(tkchin): reconnection scenarios. Right now we just disconnect
// completely if the websocket connection fails.
[self disconnect];
break;
}
}
#pragma mark - RTCPeerConnectionDelegate
// Callbacks for this delegate occur on non-main thread and need to be
// dispatched back to main queue as needed.
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didChangeSignalingState:(RTCSignalingState)stateChanged {
RTCLog(@"Signaling state changed: %ld", (long)stateChanged);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didAddStream:(RTCMediaStream *)stream {
dispatch_async(dispatch_get_main_queue(), ^{
RTCLog(@"Received %lu video tracks and %lu audio tracks",
(unsigned long)stream.videoTracks.count,
(unsigned long)stream.audioTracks.count);
if (stream.videoTracks.count) {
RTCVideoTrack *videoTrack = stream.videoTracks[0];
[_delegate appClient:self didReceiveRemoteVideoTrack:videoTrack];
}
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didRemoveStream:(RTCMediaStream *)stream {
RTCLog(@"Stream was removed.");
}
- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection {
RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didChangeIceConnectionState:(RTCIceConnectionState)newState {
RTCLog(@"ICE state changed: %ld", (long)newState);
dispatch_async(dispatch_get_main_queue(), ^{
[_delegate appClient:self didChangeConnectionState:newState];
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didChangeIceGatheringState:(RTCIceGatheringState)newState {
RTCLog(@"ICE gathering state changed: %ld", (long)newState);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didGenerateIceCandidate:(RTCIceCandidate *)candidate {
dispatch_async(dispatch_get_main_queue(), ^{
ARDICECandidateMessage *message =
[[ARDICECandidateMessage alloc] initWithCandidate:candidate];
[self sendSignalingMessage:message];
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didRemoveIceCandidates:(NSArray<RTCIceCandidate *> *)candidates {
dispatch_async(dispatch_get_main_queue(), ^{
ARDICECandidateRemovalMessage *message =
[[ARDICECandidateRemovalMessage alloc]
initWithRemovedCandidates:candidates];
[self sendSignalingMessage:message];
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didOpenDataChannel:(RTCDataChannel *)dataChannel {
}
#pragma mark - RTCSessionDescriptionDelegate
// Callbacks for this delegate occur on non-main thread and need to be
// dispatched back to main queue as needed.
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didCreateSessionDescription:(RTCSessionDescription *)sdp
error:(NSError *)error {
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
RTCLogError(@"Failed to create session description. Error: %@", error);
[self disconnect];
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey: @"Failed to create session description.",
};
NSError *sdpError =
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorCreateSDP
userInfo:userInfo];
[_delegate appClient:self didError:sdpError];
return;
}
// Prefer codec from settings if available.
RTCSessionDescription *sdpPreferringCodec =
[ARDSDPUtils descriptionForDescription:sdp
preferredVideoCodec:[_settings currentVideoCodecSettingFromStore]];
__weak ARDAppClient *weakSelf = self;
[_peerConnection setLocalDescription:sdpPreferringCodec
completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didSetSessionDescriptionWithError:error];
}];
ARDSessionDescriptionMessage *message =
[[ARDSessionDescriptionMessage alloc]
initWithDescription:sdpPreferringCodec];
[self sendSignalingMessage:message];
[self setMaxBitrateForPeerConnectionVideoSender];
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didSetSessionDescriptionWithError:(NSError *)error {
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
RTCLogError(@"Failed to set session description. Error: %@", error);
[self disconnect];
NSDictionary *userInfo = @{
NSLocalizedDescriptionKey: @"Failed to set session description.",
};
NSError *sdpError =
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorSetSDP
userInfo:userInfo];
[_delegate appClient:self didError:sdpError];
return;
}
// If we're answering and we've just set the remote offer we need to create
// an answer and set the local description.
if (!_isInitiator && !_peerConnection.localDescription) {
RTCMediaConstraints *constraints = [self defaultAnswerConstraints];
__weak ARDAppClient *weakSelf = self;
[_peerConnection answerForConstraints:constraints
completionHandler:^(RTCSessionDescription *sdp,
NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
}
});
}
#pragma mark - Private
#if defined(WEBRTC_IOS)
- (NSString *)documentsFilePathForFileName:(NSString *)fileName {
NSParameterAssert(fileName.length);
NSArray *paths = NSSearchPathForDirectoriesInDomains(
NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirPath = paths.firstObject;
NSString *filePath =
[documentsDirPath stringByAppendingPathComponent:fileName];
return filePath;
}
#endif
- (BOOL)hasJoinedRoomServerRoom {
return _clientId.length;
}
// Begins the peer connection connection process if we have both joined a room
// on the room server and tried to obtain a TURN server. Otherwise does nothing.
// A peer connection object will be created with a stream that contains local
// audio and video capture. If this client is the caller, an offer is created as
// well, otherwise the client will wait for an offer to arrive.
- (void)startSignalingIfReady {
if (!_isTurnComplete || !self.hasJoinedRoomServerRoom) {
return;
}
self.state = kARDAppClientStateConnected;
// Create peer connection.
RTCMediaConstraints *constraints = [self defaultPeerConnectionConstraints];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
config.iceServers = _iceServers;
_peerConnection = [_factory peerConnectionWithConfiguration:config
constraints:constraints
delegate:self];
// Create AV senders.
[self createAudioSender];
[self createVideoSender];
if (_isInitiator) {
// Send offer.
__weak ARDAppClient *weakSelf = self;
[_peerConnection offerForConstraints:[self defaultOfferConstraints]
completionHandler:^(RTCSessionDescription *sdp,
NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
} else {
// Check if we've received an offer.
[self drainMessageQueueIfReady];
}
#if defined(WEBRTC_IOS)
// Start event log.
if (kARDAppClientEnableRtcEventLog) {
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-rtceventlog"];
if (![_peerConnection startRtcEventLogWithFilePath:filePath
maxSizeInBytes:kARDAppClientRtcEventLogMaxSizeInBytes]) {
RTCLogError(@"Failed to start event logging.");
}
}
// Start aecdump diagnostic recording.
if ([_settings currentCreateAecDumpSettingFromStore]) {
NSString *filePath = [self documentsFilePathForFileName:@"webrtc-audio.aecdump"];
if (![_factory startAecDumpWithFilePath:filePath
maxSizeInBytes:kARDAppClientAecDumpMaxSizeInBytes]) {
RTCLogError(@"Failed to start aec dump.");
}
}
#endif
}
// Processes the messages that we've received from the room server and the
// signaling channel. The offer or answer message must be processed before other
// signaling messages, however they can arrive out of order. Hence, this method
// only processes pending messages if there is a peer connection object and
// if we have received either an offer or answer.
- (void)drainMessageQueueIfReady {
if (!_peerConnection || !_hasReceivedSdp) {
return;
}
for (ARDSignalingMessage *message in _messageQueue) {
[self processSignalingMessage:message];
}
[_messageQueue removeAllObjects];
}
// Processes the given signaling message based on its type.
- (void)processSignalingMessage:(ARDSignalingMessage *)message {
NSParameterAssert(_peerConnection ||
message.type == kARDSignalingMessageTypeBye);
switch (message.type) {
case kARDSignalingMessageTypeOffer:
case kARDSignalingMessageTypeAnswer: {
ARDSessionDescriptionMessage *sdpMessage =
(ARDSessionDescriptionMessage *)message;
RTCSessionDescription *description = sdpMessage.sessionDescription;
// Prefer codec from settings if available.
RTCSessionDescription *sdpPreferringCodec =
[ARDSDPUtils descriptionForDescription:description
preferredVideoCodec:[_settings currentVideoCodecSettingFromStore]];
__weak ARDAppClient *weakSelf = self;
[_peerConnection setRemoteDescription:sdpPreferringCodec
completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didSetSessionDescriptionWithError:error];
}];
break;
}
case kARDSignalingMessageTypeCandidate: {
ARDICECandidateMessage *candidateMessage =
(ARDICECandidateMessage *)message;
[_peerConnection addIceCandidate:candidateMessage.candidate];
break;
}
case kARDSignalingMessageTypeCandidateRemoval: {
ARDICECandidateRemovalMessage *candidateMessage =
(ARDICECandidateRemovalMessage *)message;
[_peerConnection removeIceCandidates:candidateMessage.candidates];
break;
}
case kARDSignalingMessageTypeBye:
// Other client disconnected.
// TODO(tkchin): support waiting in room for next client. For now just
// disconnect.
[self disconnect];
break;
}
}
// Sends a signaling message to the other client. The caller will send messages
// through the room server, whereas the callee will send messages over the
// signaling channel.
- (void)sendSignalingMessage:(ARDSignalingMessage *)message {
if (_isInitiator) {
__weak ARDAppClient *weakSelf = self;
[_roomServerClient sendMessage:message
forRoomId:_roomId
clientId:_clientId
completionHandler:^(ARDMessageResponse *response,
NSError *error) {
ARDAppClient *strongSelf = weakSelf;
if (error) {
[strongSelf.delegate appClient:strongSelf didError:error];
return;
}
NSError *messageError =
[[strongSelf class] errorForMessageResultType:response.result];
if (messageError) {
[strongSelf.delegate appClient:strongSelf didError:messageError];
return;
}
}];
} else {
[_channel sendMessage:message];
}
}
- (RTCRtpSender *)createVideoSender {
RTCRtpSender *sender =
[_peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo
streamId:kARDMediaStreamId];
_localVideoTrack = [self createLocalVideoTrack];
if (_localVideoTrack) {
sender.track = _localVideoTrack;
[_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
}
return sender;
}
- (void)setMaxBitrateForPeerConnectionVideoSender {
for (RTCRtpSender *sender in _peerConnection.senders) {
if (sender.track != nil) {
if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
}
}
}
}
- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTCRtpSender *)sender {
if (maxBitrate.intValue <= 0) {
return;
}
RTCRtpParameters *parametersToModify = sender.parameters;
for (RTCRtpEncodingParameters *encoding in parametersToModify.encodings) {
encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
}
[sender setParameters:parametersToModify];
}
- (RTCRtpSender *)createAudioSender {
RTCMediaConstraints *constraints = [self defaultMediaAudioConstraints];
RTCAudioSource *source = [_factory audioSourceWithConstraints:constraints];
RTCAudioTrack *track = [_factory audioTrackWithSource:source
trackId:kARDAudioTrackId];
RTCRtpSender *sender =
[_peerConnection senderWithKind:kRTCMediaStreamTrackKindAudio
streamId:kARDMediaStreamId];
sender.track = track;
return sender;
}
- (RTCVideoTrack *)createLocalVideoTrack {
RTCVideoTrack* localVideoTrack = nil;
// The iOS simulator doesn't provide any sort of camera capture
// support or emulation (http://goo.gl/rHAnC1) so don't bother
// trying to open a local stream.
#if !TARGET_IPHONE_SIMULATOR
if (![_settings currentAudioOnlySettingFromStore]) {
RTCVideoSource *source = [_factory videoSource];
RTCCameraVideoCapturer *capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:source];
[_delegate appClient:self didCreateLocalCapturer:capturer];
localVideoTrack =
[_factory videoTrackWithSource:source
trackId:kARDVideoTrackId];
}
#endif
return localVideoTrack;
}
#pragma mark - Collider methods
- (void)registerWithColliderIfReady {
if (!self.hasJoinedRoomServerRoom) {
return;
}
// Open WebSocket connection.
if (!_channel) {
_channel =
[[ARDWebSocketChannel alloc] initWithURL:_websocketURL
restURL:_websocketRestURL
delegate:self];
if (_isLoopback) {
_loopbackChannel =
[[ARDLoopbackWebSocketChannel alloc] initWithURL:_websocketURL
restURL:_websocketRestURL];
}
}
[_channel registerForRoomId:_roomId clientId:_clientId];
if (_isLoopback) {
[_loopbackChannel registerForRoomId:_roomId clientId:@"LOOPBACK_CLIENT_ID"];
}
}
#pragma mark - Defaults
- (RTCMediaConstraints *)defaultMediaAudioConstraints {
NSString *valueLevelControl = [_settings currentUseLevelControllerSettingFromStore] ?
kRTCMediaConstraintsValueTrue :
kRTCMediaConstraintsValueFalse;
NSDictionary *mandatoryConstraints = @{ kRTCMediaConstraintsLevelControl : valueLevelControl };
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
}
- (RTCMediaConstraints *)defaultAnswerConstraints {
return [self defaultOfferConstraints];
}
- (RTCMediaConstraints *)defaultOfferConstraints {
NSDictionary *mandatoryConstraints = @{
@"OfferToReceiveAudio" : @"true",
@"OfferToReceiveVideo" : @"true"
};
RTCMediaConstraints* constraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
}
- (RTCMediaConstraints *)defaultPeerConnectionConstraints {
if (_defaultPeerConnectionConstraints) {
return _defaultPeerConnectionConstraints;
}
NSString *value = _isLoopback ? @"false" : @"true";
NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value };
RTCMediaConstraints* constraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:nil
optionalConstraints:optionalConstraints];
return constraints;
}
#pragma mark - Errors
+ (NSError *)errorForJoinResultType:(ARDJoinResultType)resultType {
NSError *error = nil;
switch (resultType) {
case kARDJoinResultTypeSuccess:
break;
case kARDJoinResultTypeUnknown: {
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey: @"Unknown error.",
}];
break;
}
case kARDJoinResultTypeFull: {
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorRoomFull
userInfo:@{
NSLocalizedDescriptionKey: @"Room is full.",
}];
break;
}
}
return error;
}
+ (NSError *)errorForMessageResultType:(ARDMessageResultType)resultType {
NSError *error = nil;
switch (resultType) {
case kARDMessageResultTypeSuccess:
break;
case kARDMessageResultTypeUnknown:
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorUnknown
userInfo:@{
NSLocalizedDescriptionKey: @"Unknown error.",
}];
break;
case kARDMessageResultTypeInvalidClient:
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorInvalidClient
userInfo:@{
NSLocalizedDescriptionKey: @"Invalid client.",
}];
break;
case kARDMessageResultTypeInvalidRoom:
error = [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorInvalidRoom
userInfo:@{
NSLocalizedDescriptionKey: @"Invalid room.",
}];
break;
}
return error;
}
@end

View File

@ -0,0 +1,14 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDRoomServerClient.h"
@interface ARDAppEngineClient : NSObject <ARDRoomServerClient>
@end

View File

@ -0,0 +1,179 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDAppEngineClient.h"
#import "WebRTC/RTCLogging.h"
#import "ARDJoinResponse.h"
#import "ARDMessageResponse.h"
#import "ARDSignalingMessage.h"
#import "ARDUtilities.h"
// TODO(tkchin): move these to a configuration object.
static NSString * const kARDRoomServerHostUrl =
@"https://appr.tc";
static NSString * const kARDRoomServerJoinFormat =
@"https://appr.tc/join/%@";
static NSString * const kARDRoomServerJoinFormatLoopback =
@"https://appr.tc/join/%@?debug=loopback";
static NSString * const kARDRoomServerMessageFormat =
@"https://appr.tc/message/%@/%@";
static NSString * const kARDRoomServerLeaveFormat =
@"https://appr.tc/leave/%@/%@";
static NSString * const kARDAppEngineClientErrorDomain = @"ARDAppEngineClient";
static NSInteger const kARDAppEngineClientErrorBadResponse = -1;
@implementation ARDAppEngineClient
#pragma mark - ARDRoomServerClient
- (void)joinRoomWithRoomId:(NSString *)roomId
isLoopback:(BOOL)isLoopback
completionHandler:(void (^)(ARDJoinResponse *response,
NSError *error))completionHandler {
NSParameterAssert(roomId.length);
NSString *urlString = nil;
if (isLoopback) {
urlString =
[NSString stringWithFormat:kARDRoomServerJoinFormatLoopback, roomId];
} else {
urlString =
[NSString stringWithFormat:kARDRoomServerJoinFormat, roomId];
}
NSURL *roomURL = [NSURL URLWithString:urlString];
RTCLog(@"Joining room:%@ on room server.", roomId);
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:roomURL];
request.HTTPMethod = @"POST";
__weak ARDAppEngineClient *weakSelf = self;
[NSURLConnection sendAsyncRequest:request
completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
ARDAppEngineClient *strongSelf = weakSelf;
if (error) {
if (completionHandler) {
completionHandler(nil, error);
}
return;
}
ARDJoinResponse *joinResponse = [ARDJoinResponse responseFromJSONData:data];
if (!joinResponse) {
if (completionHandler) {
NSError *error = [[self class] badResponseError];
completionHandler(nil, error);
}
return;
}
if (completionHandler) {
completionHandler(joinResponse, nil);
}
}];
}
- (void)sendMessage:(ARDSignalingMessage *)message
forRoomId:(NSString *)roomId
clientId:(NSString *)clientId
completionHandler:(void (^)(ARDMessageResponse *response,
NSError *error))completionHandler {
NSParameterAssert(message);
NSParameterAssert(roomId.length);
NSParameterAssert(clientId.length);
NSData *data = [message JSONData];
NSString *urlString =
[NSString stringWithFormat:
kARDRoomServerMessageFormat, roomId, clientId];
NSURL *url = [NSURL URLWithString:urlString];
RTCLog(@"C->RS POST: %@", message);
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"POST";
request.HTTPBody = data;
__weak ARDAppEngineClient *weakSelf = self;
[NSURLConnection sendAsyncRequest:request
completionHandler:^(NSURLResponse *response,
NSData *data,
NSError *error) {
ARDAppEngineClient *strongSelf = weakSelf;
if (error) {
if (completionHandler) {
completionHandler(nil, error);
}
return;
}
ARDMessageResponse *messageResponse =
[ARDMessageResponse responseFromJSONData:data];
if (!messageResponse) {
if (completionHandler) {
NSError *error = [[self class] badResponseError];
completionHandler(nil, error);
}
return;
}
if (completionHandler) {
completionHandler(messageResponse, nil);
}
}];
}
- (void)leaveRoomWithRoomId:(NSString *)roomId
clientId:(NSString *)clientId
completionHandler:(void (^)(NSError *error))completionHandler {
NSParameterAssert(roomId.length);
NSParameterAssert(clientId.length);
NSString *urlString =
[NSString stringWithFormat:kARDRoomServerLeaveFormat, roomId, clientId];
NSURL *url = [NSURL URLWithString:urlString];
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"POST";
RTCLog(@"C->RS: BYE");
__block NSError *error = nil;
// We want a synchronous request so that we know that we've left the room on
// room server before we do any further work.
dispatch_semaphore_t sem = dispatch_semaphore_create(0);
[NSURLConnection sendAsyncRequest:request
completionHandler:^(NSURLResponse *response, NSData *data, NSError *e) {
if (e) {
error = e;
}
dispatch_semaphore_signal(sem);
}];
dispatch_semaphore_wait(sem, DISPATCH_TIME_FOREVER);
if (error) {
RTCLogError(@"Error leaving room %@ on room server: %@", roomId, error.localizedDescription);
if (completionHandler) {
completionHandler(error);
}
return;
}
RTCLog(@"Left room:%@ on room server.", roomId);
if (completionHandler) {
completionHandler(nil);
}
}
#pragma mark - Private
+ (NSError *)badResponseError {
NSError *error =
[[NSError alloc] initWithDomain:kARDAppEngineClientErrorDomain
code:kARDAppEngineClientErrorBadResponse
userInfo:@{
NSLocalizedDescriptionKey: @"Error parsing response.",
}];
return error;
}
@end

View File

@ -0,0 +1,30 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
/** Class used to estimate bitrate based on byte count. It is expected that
* byte count is monotonocially increasing. This class tracks the times that
* byte count is updated, and measures the bitrate based on the byte difference
* over the interval between updates.
*/
@interface ARDBitrateTracker : NSObject
/** The bitrate in bits per second. */
@property(nonatomic, readonly) double bitrate;
/** The bitrate as a formatted string in bps, Kbps or Mbps. */
@property(nonatomic, readonly) NSString *bitrateString;
/** Converts the bitrate to a readable format in bps, Kbps or Mbps. */
+ (NSString *)bitrateStringForBitrate:(double)bitrate;
/** Updates the tracked bitrate with the new byte count. */
- (void)updateBitrateWithCurrentByteCount:(NSInteger)byteCount;
@end

View File

@ -0,0 +1,45 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDBitrateTracker.h"
#import <QuartzCore/QuartzCore.h>
@implementation ARDBitrateTracker {
CFTimeInterval _prevTime;
NSInteger _prevByteCount;
}
@synthesize bitrate = _bitrate;
+ (NSString *)bitrateStringForBitrate:(double)bitrate {
if (bitrate > 1e6) {
return [NSString stringWithFormat:@"%.2fMbps", bitrate * 1e-6];
} else if (bitrate > 1e3) {
return [NSString stringWithFormat:@"%.0fKbps", bitrate * 1e-3];
} else {
return [NSString stringWithFormat:@"%.0fbps", bitrate];
}
}
- (NSString *)bitrateString {
return [[self class] bitrateStringForBitrate:_bitrate];
}
- (void)updateBitrateWithCurrentByteCount:(NSInteger)byteCount {
CFTimeInterval currentTime = CACurrentMediaTime();
if (_prevTime && (byteCount > _prevByteCount)) {
_bitrate = (byteCount - _prevByteCount) * 8 / (currentTime - _prevTime);
}
_prevByteCount = byteCount;
_prevTime = currentTime;
}
@end

View File

@ -0,0 +1,24 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <WebRTC/RTCCameraVideoCapturer.h>
@class ARDSettingsModel;
// Controls the camera. Handles starting the capture, switching cameras etc.
@interface ARDCaptureController : NSObject
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
settings:(ARDSettingsModel *)settings;
- (void)startCapture;
- (void)stopCapture;
- (void)switchCamera;
@end

View File

@ -0,0 +1,92 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDCaptureController.h"
#import "ARDSettingsModel.h"
@implementation ARDCaptureController {
RTCCameraVideoCapturer *_capturer;
ARDSettingsModel *_settings;
BOOL _usingFrontCamera;
}
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
settings:(ARDSettingsModel *)settings {
if ([super init]) {
_capturer = capturer;
_settings = settings;
_usingFrontCamera = YES;
}
return self;
}
- (void)startCapture {
AVCaptureDevicePosition position =
_usingFrontCamera ? AVCaptureDevicePositionFront : AVCaptureDevicePositionBack;
AVCaptureDevice *device = [self findDeviceForPosition:position];
AVCaptureDeviceFormat *format = [self selectFormatForDevice:device];
NSInteger fps = [self selectFpsForFormat:format];
[_capturer startCaptureWithDevice:device format:format fps:fps];
}
- (void)stopCapture {
[_capturer stopCapture];
}
- (void)switchCamera {
_usingFrontCamera = !_usingFrontCamera;
[self startCapture];
}
#pragma mark - Private
- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position {
NSArray<AVCaptureDevice *> *captureDevices = [RTCCameraVideoCapturer captureDevices];
for (AVCaptureDevice *device in captureDevices) {
if (device.position == position) {
return device;
}
}
return captureDevices[0];
}
- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
NSArray<AVCaptureDeviceFormat *> *formats =
[RTCCameraVideoCapturer supportedFormatsForDevice:device];
int targetWidth = [_settings currentVideoResolutionWidthFromStore];
int targetHeight = [_settings currentVideoResolutionHeightFromStore];
AVCaptureDeviceFormat *selectedFormat = nil;
int currentDiff = INT_MAX;
for (AVCaptureDeviceFormat *format in formats) {
CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription);
int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height);
if (diff < currentDiff) {
selectedFormat = format;
currentDiff = diff;
}
}
NSAssert(selectedFormat != nil, @"No suitable capture format found.");
return selectedFormat;
}
- (NSInteger)selectFpsForFormat:(AVCaptureDeviceFormat *)format {
Float64 maxFramerate = 0;
for (AVFrameRateRange *fpsRange in format.videoSupportedFrameRateRanges) {
maxFramerate = fmax(maxFramerate, fpsRange.maxFrameRate);
}
return maxFramerate;
}
@end

View File

@ -0,0 +1,23 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDJoinResponse.h"
@interface ARDJoinResponse ()
@property(nonatomic, assign) ARDJoinResultType result;
@property(nonatomic, assign) BOOL isInitiator;
@property(nonatomic, strong) NSString *roomId;
@property(nonatomic, strong) NSString *clientId;
@property(nonatomic, strong) NSArray *messages;
@property(nonatomic, strong) NSURL *webSocketURL;
@property(nonatomic, strong) NSURL *webSocketRestURL;
@end

View File

@ -0,0 +1,32 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, ARDJoinResultType) {
kARDJoinResultTypeUnknown,
kARDJoinResultTypeSuccess,
kARDJoinResultTypeFull
};
// Result of joining a room on the room server.
@interface ARDJoinResponse : NSObject
@property(nonatomic, readonly) ARDJoinResultType result;
@property(nonatomic, readonly) BOOL isInitiator;
@property(nonatomic, readonly) NSString *roomId;
@property(nonatomic, readonly) NSString *clientId;
@property(nonatomic, readonly) NSArray *messages;
@property(nonatomic, readonly) NSURL *webSocketURL;
@property(nonatomic, readonly) NSURL *webSocketRestURL;
+ (ARDJoinResponse *)responseFromJSONData:(NSData *)data;
@end

View File

@ -0,0 +1,82 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDJoinResponse+Internal.h"
#import "ARDSignalingMessage.h"
#import "ARDUtilities.h"
#import "RTCIceServer+JSON.h"
static NSString const *kARDJoinResultKey = @"result";
static NSString const *kARDJoinResultParamsKey = @"params";
static NSString const *kARDJoinInitiatorKey = @"is_initiator";
static NSString const *kARDJoinRoomIdKey = @"room_id";
static NSString const *kARDJoinClientIdKey = @"client_id";
static NSString const *kARDJoinMessagesKey = @"messages";
static NSString const *kARDJoinWebSocketURLKey = @"wss_url";
static NSString const *kARDJoinWebSocketRestURLKey = @"wss_post_url";
@implementation ARDJoinResponse
@synthesize result = _result;
@synthesize isInitiator = _isInitiator;
@synthesize roomId = _roomId;
@synthesize clientId = _clientId;
@synthesize messages = _messages;
@synthesize webSocketURL = _webSocketURL;
@synthesize webSocketRestURL = _webSocketRestURL;
+ (ARDJoinResponse *)responseFromJSONData:(NSData *)data {
NSDictionary *responseJSON = [NSDictionary dictionaryWithJSONData:data];
if (!responseJSON) {
return nil;
}
ARDJoinResponse *response = [[ARDJoinResponse alloc] init];
NSString *resultString = responseJSON[kARDJoinResultKey];
response.result = [[self class] resultTypeFromString:resultString];
NSDictionary *params = responseJSON[kARDJoinResultParamsKey];
response.isInitiator = [params[kARDJoinInitiatorKey] boolValue];
response.roomId = params[kARDJoinRoomIdKey];
response.clientId = params[kARDJoinClientIdKey];
// Parse messages.
NSArray *messages = params[kARDJoinMessagesKey];
NSMutableArray *signalingMessages =
[NSMutableArray arrayWithCapacity:messages.count];
for (NSString *message in messages) {
ARDSignalingMessage *signalingMessage =
[ARDSignalingMessage messageFromJSONString:message];
[signalingMessages addObject:signalingMessage];
}
response.messages = signalingMessages;
// Parse websocket urls.
NSString *webSocketURLString = params[kARDJoinWebSocketURLKey];
response.webSocketURL = [NSURL URLWithString:webSocketURLString];
NSString *webSocketRestURLString = params[kARDJoinWebSocketRestURLKey];
response.webSocketRestURL = [NSURL URLWithString:webSocketRestURLString];
return response;
}
#pragma mark - Private
+ (ARDJoinResultType)resultTypeFromString:(NSString *)resultString {
ARDJoinResultType result = kARDJoinResultTypeUnknown;
if ([resultString isEqualToString:@"SUCCESS"]) {
result = kARDJoinResultTypeSuccess;
} else if ([resultString isEqualToString:@"FULL"]) {
result = kARDJoinResultTypeFull;
}
return result;
}
@end

View File

@ -0,0 +1,17 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDMessageResponse.h"
@interface ARDMessageResponse ()
@property(nonatomic, assign) ARDMessageResultType result;
@end

View File

@ -0,0 +1,26 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, ARDMessageResultType) {
kARDMessageResultTypeUnknown,
kARDMessageResultTypeSuccess,
kARDMessageResultTypeInvalidRoom,
kARDMessageResultTypeInvalidClient
};
@interface ARDMessageResponse : NSObject
@property(nonatomic, readonly) ARDMessageResultType result;
+ (ARDMessageResponse *)responseFromJSONData:(NSData *)data;
@end

View File

@ -0,0 +1,46 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDMessageResponse+Internal.h"
#import "ARDUtilities.h"
static NSString const *kARDMessageResultKey = @"result";
@implementation ARDMessageResponse
@synthesize result = _result;
+ (ARDMessageResponse *)responseFromJSONData:(NSData *)data {
NSDictionary *responseJSON = [NSDictionary dictionaryWithJSONData:data];
if (!responseJSON) {
return nil;
}
ARDMessageResponse *response = [[ARDMessageResponse alloc] init];
response.result =
[[self class] resultTypeFromString:responseJSON[kARDMessageResultKey]];
return response;
}
#pragma mark - Private
+ (ARDMessageResultType)resultTypeFromString:(NSString *)resultString {
ARDMessageResultType result = kARDMessageResultTypeUnknown;
if ([resultString isEqualToString:@"SUCCESS"]) {
result = kARDMessageResultTypeSuccess;
} else if ([resultString isEqualToString:@"INVALID_CLIENT"]) {
result = kARDMessageResultTypeInvalidClient;
} else if ([resultString isEqualToString:@"INVALID_ROOM"]) {
result = kARDMessageResultTypeInvalidRoom;
}
return result;
}
@end

View File

@ -0,0 +1,34 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
@class ARDJoinResponse;
@class ARDMessageResponse;
@class ARDSignalingMessage;
@protocol ARDRoomServerClient <NSObject>
- (void)joinRoomWithRoomId:(NSString *)roomId
isLoopback:(BOOL)isLoopback
completionHandler:(void (^)(ARDJoinResponse *response,
NSError *error))completionHandler;
- (void)sendMessage:(ARDSignalingMessage *)message
forRoomId:(NSString *)roomId
clientId:(NSString *)clientId
completionHandler:(void (^)(ARDMessageResponse *response,
NSError *error))completionHandler;
- (void)leaveRoomWithRoomId:(NSString *)roomId
clientId:(NSString *)clientId
completionHandler:(void (^)(NSError *error))completionHandler;
@end

View File

@ -0,0 +1,24 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
@class RTCSessionDescription;
@interface ARDSDPUtils : NSObject
// Updates the original SDP description to instead prefer the specified video
// codec. We do this by placing the specified codec at the beginning of the
// codec list if it exists in the sdp.
+ (RTCSessionDescription *)
descriptionForDescription:(RTCSessionDescription *)description
preferredVideoCodec:(NSString *)codec;
@end

View File

@ -0,0 +1,98 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDSDPUtils.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCSessionDescription.h"
@implementation ARDSDPUtils
+ (RTCSessionDescription *)
descriptionForDescription:(RTCSessionDescription *)description
preferredVideoCodec:(NSString *)codec {
NSString *sdpString = description.sdp;
NSString *lineSeparator = @"\r\n";
NSString *mLineSeparator = @" ";
// Copied from PeerConnectionClient.java.
// TODO(tkchin): Move this to a shared C++ file.
NSMutableArray *lines =
[NSMutableArray arrayWithArray:
[sdpString componentsSeparatedByString:lineSeparator]];
// Find the line starting with "m=video".
NSInteger mLineIndex = -1;
for (NSInteger i = 0; i < lines.count; ++i) {
if ([lines[i] hasPrefix:@"m=video"]) {
mLineIndex = i;
break;
}
}
if (mLineIndex == -1) {
RTCLog(@"No m=video line, so can't prefer %@", codec);
return description;
}
// An array with all payload types with name |codec|. The payload types are
// integers in the range 96-127, but they are stored as strings here.
NSMutableArray *codecPayloadTypes = [[NSMutableArray alloc] init];
// a=rtpmap:<payload type> <encoding name>/<clock rate>
// [/<encoding parameters>]
NSString *pattern =
[NSString stringWithFormat:@"^a=rtpmap:(\\d+) %@(/\\d+)+[\r]?$", codec];
NSRegularExpression *regex =
[NSRegularExpression regularExpressionWithPattern:pattern
options:0
error:nil];
for (NSString *line in lines) {
NSTextCheckingResult *codecMatches =
[regex firstMatchInString:line
options:0
range:NSMakeRange(0, line.length)];
if (codecMatches) {
[codecPayloadTypes
addObject:[line substringWithRange:[codecMatches rangeAtIndex:1]]];
}
}
if ([codecPayloadTypes count] == 0) {
RTCLog(@"No payload types with name %@", codec);
return description;
}
NSArray *origMLineParts =
[lines[mLineIndex] componentsSeparatedByString:mLineSeparator];
// The format of ML should be: m=<media> <port> <proto> <fmt> ...
const int kHeaderLength = 3;
if (origMLineParts.count <= kHeaderLength) {
RTCLogWarning(@"Wrong SDP media description format: %@", lines[mLineIndex]);
return description;
}
// Split the line into header and payloadTypes.
NSRange headerRange = NSMakeRange(0, kHeaderLength);
NSRange payloadRange =
NSMakeRange(kHeaderLength, origMLineParts.count - kHeaderLength);
NSArray *header = [origMLineParts subarrayWithRange:headerRange];
NSMutableArray *payloadTypes = [NSMutableArray
arrayWithArray:[origMLineParts subarrayWithRange:payloadRange]];
// Reconstruct the line with |codecPayloadTypes| moved to the beginning of the
// payload types.
NSMutableArray *newMLineParts = [NSMutableArray arrayWithCapacity:origMLineParts.count];
[newMLineParts addObjectsFromArray:header];
[newMLineParts addObjectsFromArray:codecPayloadTypes];
[payloadTypes removeObjectsInArray:codecPayloadTypes];
[newMLineParts addObjectsFromArray:payloadTypes];
NSString *newMLine = [newMLineParts componentsJoinedByString:mLineSeparator];
[lines replaceObjectAtIndex:mLineIndex
withObject:newMLine];
NSString *mangledSdpString = [lines componentsJoinedByString:lineSeparator];
return [[RTCSessionDescription alloc] initWithType:description.type
sdp:mangledSdpString];
}
@end

View File

@ -0,0 +1,20 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "ARDSettingsModel.h"
@class ARDSettingsStore;
NS_ASSUME_NONNULL_BEGIN
@interface ARDSettingsModel ()
- (ARDSettingsStore *)settingsStore;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,134 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
* Model class for user defined settings.
*
* Handles storing the settings and provides default values if setting is not
* set. Also provides list of available options for different settings. Stores
* for example video codec, video resolution and maximum bitrate.
*/
@interface ARDSettingsModel : NSObject
/**
* Returns array of available capture resoultions.
*
* The capture resolutions are represented as strings in the following format
* [width]x[height]
*/
- (NSArray<NSString *> *)availableVideoResolutions;
/**
* Returns current video resolution string.
* If no resolution is in store, default value of 640x480 is returned.
* When defaulting to value, the default is saved in store for consistency reasons.
*/
- (NSString *)currentVideoResolutionSettingFromStore;
- (int)currentVideoResolutionWidthFromStore;
- (int)currentVideoResolutionHeightFromStore;
/**
* Stores the provided video resolution string into the store.
*
* If the provided resolution is no part of the available video resolutions
* the store operation will not be executed and NO will be returned.
* @param resolution the string to be stored.
* @return YES/NO depending on success.
*/
- (BOOL)storeVideoResolutionSetting:(NSString *)resolution;
/**
* Returns array of available video codecs.
*/
- (NSArray<NSString *> *)availableVideoCodecs;
/**
* Returns current video codec setting from store if present or default (H264) otherwise.
*/
- (NSString *)currentVideoCodecSettingFromStore;
/**
* Stores the provided video codec setting into the store.
*
* If the provided video codec is not part of the available video codecs
* the store operation will not be executed and NO will be returned.
* @param video codec settings the string to be stored.
* @return YES/NO depending on success.
*/
- (BOOL)storeVideoCodecSetting:(NSString *)videoCodec;
/**
* Returns current max bitrate setting from store if present.
*/
- (nullable NSNumber *)currentMaxBitrateSettingFromStore;
/**
* Stores the provided bitrate value into the store.
*
* @param bitrate NSNumber representation of the max bitrate value.
*/
- (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate;
/**
* Returns current audio only setting from store if present or default (NO) otherwise.
*/
- (BOOL)currentAudioOnlySettingFromStore;
/**
* Stores the provided audio only setting into the store.
*
* @param setting the boolean value to be stored.
*/
- (void)storeAudioOnlySetting:(BOOL)audioOnly;
/**
* Returns current create AecDump setting from store if present or default (NO) otherwise.
*/
- (BOOL)currentCreateAecDumpSettingFromStore;
/**
* Stores the provided create AecDump setting into the store.
*
* @param setting the boolean value to be stored.
*/
- (void)storeCreateAecDumpSetting:(BOOL)createAecDump;
/**
* Returns current setting whether to use level controller from store if present or default (NO)
* otherwise.
*/
- (BOOL)currentUseLevelControllerSettingFromStore;
/**
* Stores the provided use level controller setting into the store.
*
* @param setting the boolean value to be stored.
*/
- (void)storeUseLevelControllerSetting:(BOOL)useLevelController;
/**
* Returns current setting whether to use manual audio config from store if present or default (YES)
* otherwise.
*/
- (BOOL)currentUseManualAudioConfigSettingFromStore;
/**
* Stores the provided use manual audio config setting into the store.
*
* @param setting the boolean value to be stored.
*/
- (void)storeUseManualAudioConfigSetting:(BOOL)useManualAudioConfig;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,182 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDSettingsModel+Private.h"
#import "ARDSettingsStore.h"
#import "WebRTC/RTCCameraVideoCapturer.h"
#import "WebRTC/RTCMediaConstraints.h"
NS_ASSUME_NONNULL_BEGIN
static NSArray<NSString *> *videoCodecsStaticValues() {
return @[ @"H264", @"VP8", @"VP9" ];
}
@interface ARDSettingsModel () {
ARDSettingsStore *_settingsStore;
}
@end
@implementation ARDSettingsModel
- (NSArray<NSString *> *)availableVideoResolutions {
NSMutableSet<NSArray<NSNumber *> *> *resolutions =
[[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
for (AVCaptureDevice *device in [RTCCameraVideoCapturer captureDevices]) {
for (AVCaptureDeviceFormat *format in
[RTCCameraVideoCapturer supportedFormatsForDevice:device]) {
CMVideoDimensions resolution =
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
[resolutions addObject:resolutionObject];
}
}
NSArray<NSArray<NSNumber *> *> *sortedResolutions =
[[resolutions allObjects] sortedArrayUsingComparator:^NSComparisonResult(
NSArray<NSNumber *> *obj1, NSArray<NSNumber *> *obj2) {
return obj1.firstObject > obj2.firstObject;
}];
NSMutableArray<NSString *> *resolutionStrings = [[NSMutableArray<NSString *> alloc] init];
for (NSArray<NSNumber *> *resolution in sortedResolutions) {
NSString *resolutionString =
[NSString stringWithFormat:@"%@x%@", resolution.firstObject, resolution.lastObject];
[resolutionStrings addObject:resolutionString];
}
return [resolutionStrings copy];
}
- (NSString *)currentVideoResolutionSettingFromStore {
[self registerStoreDefaults];
return [[self settingsStore] videoResolution];
}
- (BOOL)storeVideoResolutionSetting:(NSString *)resolution {
if (![[self availableVideoResolutions] containsObject:resolution]) {
return NO;
}
[[self settingsStore] setVideoResolution:resolution];
return YES;
}
- (NSArray<NSString *> *)availableVideoCodecs {
return videoCodecsStaticValues();
}
- (NSString *)currentVideoCodecSettingFromStore {
[self registerStoreDefaults];
return [[self settingsStore] videoCodec];
}
- (BOOL)storeVideoCodecSetting:(NSString *)videoCodec {
if (![[self availableVideoCodecs] containsObject:videoCodec]) {
return NO;
}
[[self settingsStore] setVideoCodec:videoCodec];
return YES;
}
- (nullable NSNumber *)currentMaxBitrateSettingFromStore {
[self registerStoreDefaults];
return [[self settingsStore] maxBitrate];
}
- (void)storeMaxBitrateSetting:(nullable NSNumber *)bitrate {
[[self settingsStore] setMaxBitrate:bitrate];
}
- (BOOL)currentAudioOnlySettingFromStore {
return [[self settingsStore] audioOnly];
}
- (void)storeAudioOnlySetting:(BOOL)audioOnly {
[[self settingsStore] setAudioOnly:audioOnly];
}
- (BOOL)currentCreateAecDumpSettingFromStore {
return [[self settingsStore] createAecDump];
}
- (void)storeCreateAecDumpSetting:(BOOL)createAecDump {
[[self settingsStore] setCreateAecDump:createAecDump];
}
- (BOOL)currentUseLevelControllerSettingFromStore {
return [[self settingsStore] useLevelController];
}
- (void)storeUseLevelControllerSetting:(BOOL)useLevelController {
[[self settingsStore] setUseLevelController:useLevelController];
}
- (BOOL)currentUseManualAudioConfigSettingFromStore {
return [[self settingsStore] useManualAudioConfig];
}
- (void)storeUseManualAudioConfigSetting:(BOOL)useManualAudioConfig {
[[self settingsStore] setUseManualAudioConfig:useManualAudioConfig];
}
#pragma mark - Testable
- (ARDSettingsStore *)settingsStore {
if (!_settingsStore) {
_settingsStore = [[ARDSettingsStore alloc] init];
[self registerStoreDefaults];
}
return _settingsStore;
}
- (int)currentVideoResolutionWidthFromStore {
NSString *resolution = [self currentVideoResolutionSettingFromStore];
return [self videoResolutionComponentAtIndex:0 inString:resolution];
}
- (int)currentVideoResolutionHeightFromStore {
NSString *resolution = [self currentVideoResolutionSettingFromStore];
return [self videoResolutionComponentAtIndex:1 inString:resolution];
}
#pragma mark -
- (NSString *)defaultVideoResolutionSetting {
return [self availableVideoResolutions][0];
}
- (NSString *)defaultVideoCodecSetting {
return videoCodecsStaticValues()[0];
}
- (int)videoResolutionComponentAtIndex:(int)index inString:(NSString *)resolution {
if (index != 0 && index != 1) {
return 0;
}
NSArray<NSString *> *components = [resolution componentsSeparatedByString:@"x"];
if (components.count != 2) {
return 0;
}
return components[index].intValue;
}
- (void)registerStoreDefaults {
[ARDSettingsStore setDefaultsForVideoResolution:[self defaultVideoResolutionSetting]
videoCodec:[self defaultVideoCodecSetting]
bitrate:nil
audioOnly:NO
createAecDump:NO
useLevelController:NO
useManualAudioConfig:YES];
}
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,54 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
NS_ASSUME_NONNULL_BEGIN
/**
* Light-weight persistent store for user settings.
*
* It will persist between application launches and application updates.
*/
@interface ARDSettingsStore : NSObject
/**
* Set fallback values in case the setting has not been written by the user.
* @param dictionary of values to store
*/
+ (void)setDefaultsForVideoResolution:(NSString *)videoResolution
videoCodec:(NSString *)videoCodec
bitrate:(nullable NSNumber *)bitrate
audioOnly:(BOOL)audioOnly
createAecDump:(BOOL)createAecDump
useLevelController:(BOOL)useLevelController
useManualAudioConfig:(BOOL)useManualAudioConfig;
@property(nonatomic) NSString *videoResolution;
@property(nonatomic) NSString *videoCodec;
/**
* Returns current max bitrate number stored in the store.
*/
- (nullable NSNumber *)maxBitrate;
/**
* Stores the provided value as maximum bitrate setting.
* @param value the number to be stored
*/
- (void)setMaxBitrate:(nullable NSNumber *)value;
@property(nonatomic) BOOL audioOnly;
@property(nonatomic) BOOL createAecDump;
@property(nonatomic) BOOL useLevelController;
@property(nonatomic) BOOL useManualAudioConfig;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,122 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDSettingsStore.h"
static NSString *const kVideoResolutionKey = @"rtc_video_resolution_key";
static NSString *const kVideoCodecKey = @"rtc_video_codec_key";
static NSString *const kBitrateKey = @"rtc_max_bitrate_key";
static NSString *const kAudioOnlyKey = @"rtc_audio_only_key";
static NSString *const kCreateAecDumpKey = @"rtc_create_aec_dump_key";
static NSString *const kUseLevelControllerKey = @"rtc_use_level_controller_key";
static NSString *const kUseManualAudioConfigKey = @"rtc_use_manual_audio_config_key";
NS_ASSUME_NONNULL_BEGIN
@interface ARDSettingsStore () {
NSUserDefaults *_storage;
}
@property(nonatomic, strong, readonly) NSUserDefaults *storage;
@end
@implementation ARDSettingsStore
+ (void)setDefaultsForVideoResolution:(NSString *)videoResolution
videoCodec:(NSString *)videoCodec
bitrate:(nullable NSNumber *)bitrate
audioOnly:(BOOL)audioOnly
createAecDump:(BOOL)createAecDump
useLevelController:(BOOL)useLevelController
useManualAudioConfig:(BOOL)useManualAudioConfig {
NSMutableDictionary<NSString *, id> *defaultsDictionary = [@{
kVideoResolutionKey : videoResolution,
kVideoCodecKey : videoCodec,
kAudioOnlyKey : @(audioOnly),
kCreateAecDumpKey : @(createAecDump),
kUseLevelControllerKey : @(useLevelController),
kUseManualAudioConfigKey : @(useManualAudioConfig)
} mutableCopy];
if (bitrate) {
[defaultsDictionary setObject:bitrate forKey:kBitrateKey];
}
[[NSUserDefaults standardUserDefaults] registerDefaults:defaultsDictionary];
}
- (NSUserDefaults *)storage {
if (!_storage) {
_storage = [NSUserDefaults standardUserDefaults];
}
return _storage;
}
- (NSString *)videoResolution {
return [self.storage objectForKey:kVideoResolutionKey];
}
- (void)setVideoResolution:(NSString *)resolution {
[self.storage setObject:resolution forKey:kVideoResolutionKey];
[self.storage synchronize];
}
- (NSString *)videoCodec {
return [self.storage objectForKey:kVideoCodecKey];
}
- (void)setVideoCodec:(NSString *)videoCodec {
[self.storage setObject:videoCodec forKey:kVideoCodecKey];
[self.storage synchronize];
}
- (nullable NSNumber *)maxBitrate {
return [self.storage objectForKey:kBitrateKey];
}
- (void)setMaxBitrate:(nullable NSNumber *)value {
[self.storage setObject:value forKey:kBitrateKey];
[self.storage synchronize];
}
- (BOOL)audioOnly {
return [self.storage boolForKey:kAudioOnlyKey];
}
- (void)setAudioOnly:(BOOL)audioOnly {
[self.storage setBool:audioOnly forKey:kAudioOnlyKey];
[self.storage synchronize];
}
- (BOOL)createAecDump {
return [self.storage boolForKey:kCreateAecDumpKey];
}
- (void)setCreateAecDump:(BOOL)createAecDump {
[self.storage setBool:createAecDump forKey:kCreateAecDumpKey];
[self.storage synchronize];
}
- (BOOL)useLevelController {
return [self.storage boolForKey:kUseLevelControllerKey];
}
- (void)setUseLevelController:(BOOL)useLevelController {
[self.storage setBool:useLevelController forKey:kUseLevelControllerKey];
[self.storage synchronize];
}
- (BOOL)useManualAudioConfig {
return [self.storage boolForKey:kUseManualAudioConfigKey];
}
- (void)setUseManualAudioConfig:(BOOL)useManualAudioConfig {
[self.storage setBool:useManualAudioConfig forKey:kUseManualAudioConfigKey];
[self.storage synchronize];
}
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,52 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "ARDSignalingMessage.h"
typedef NS_ENUM(NSInteger, ARDSignalingChannelState) {
// State when disconnected.
kARDSignalingChannelStateClosed,
// State when connection is established but not ready for use.
kARDSignalingChannelStateOpen,
// State when connection is established and registered.
kARDSignalingChannelStateRegistered,
// State when connection encounters a fatal error.
kARDSignalingChannelStateError
};
@protocol ARDSignalingChannel;
@protocol ARDSignalingChannelDelegate <NSObject>
- (void)channel:(id<ARDSignalingChannel>)channel
didChangeState:(ARDSignalingChannelState)state;
- (void)channel:(id<ARDSignalingChannel>)channel
didReceiveMessage:(ARDSignalingMessage *)message;
@end
@protocol ARDSignalingChannel <NSObject>
@property(nonatomic, readonly) NSString *roomId;
@property(nonatomic, readonly) NSString *clientId;
@property(nonatomic, readonly) ARDSignalingChannelState state;
@property(nonatomic, weak) id<ARDSignalingChannelDelegate> delegate;
// Registers the channel for the given room and client id.
- (void)registerForRoomId:(NSString *)roomId
clientId:(NSString *)clientId;
// Sends signaling message over the channel.
- (void)sendMessage:(ARDSignalingMessage *)message;
@end

View File

@ -0,0 +1,59 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "WebRTC/RTCIceCandidate.h"
#import "WebRTC/RTCSessionDescription.h"
typedef enum {
kARDSignalingMessageTypeCandidate,
kARDSignalingMessageTypeCandidateRemoval,
kARDSignalingMessageTypeOffer,
kARDSignalingMessageTypeAnswer,
kARDSignalingMessageTypeBye,
} ARDSignalingMessageType;
@interface ARDSignalingMessage : NSObject
@property(nonatomic, readonly) ARDSignalingMessageType type;
+ (ARDSignalingMessage *)messageFromJSONString:(NSString *)jsonString;
- (NSData *)JSONData;
@end
@interface ARDICECandidateMessage : ARDSignalingMessage
@property(nonatomic, readonly) RTCIceCandidate *candidate;
- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate;
@end
@interface ARDICECandidateRemovalMessage : ARDSignalingMessage
@property(nonatomic, readonly) NSArray<RTCIceCandidate *> *candidates;
- (instancetype)initWithRemovedCandidates:
(NSArray<RTCIceCandidate *> *)candidates;
@end
@interface ARDSessionDescriptionMessage : ARDSignalingMessage
@property(nonatomic, readonly) RTCSessionDescription *sessionDescription;
- (instancetype)initWithDescription:(RTCSessionDescription *)description;
@end
@interface ARDByeMessage : ARDSignalingMessage
@end

View File

@ -0,0 +1,161 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDSignalingMessage.h"
#import "WebRTC/RTCLogging.h"
#import "ARDUtilities.h"
#import "RTCIceCandidate+JSON.h"
#import "RTCSessionDescription+JSON.h"
static NSString * const kARDSignalingMessageTypeKey = @"type";
static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
@implementation ARDSignalingMessage
@synthesize type = _type;
- (instancetype)initWithType:(ARDSignalingMessageType)type {
if (self = [super init]) {
_type = type;
}
return self;
}
- (NSString *)description {
return [[NSString alloc] initWithData:[self JSONData]
encoding:NSUTF8StringEncoding];
}
+ (ARDSignalingMessage *)messageFromJSONString:(NSString *)jsonString {
NSDictionary *values = [NSDictionary dictionaryWithJSONString:jsonString];
if (!values) {
RTCLogError(@"Error parsing signaling message JSON.");
return nil;
}
NSString *typeString = values[kARDSignalingMessageTypeKey];
ARDSignalingMessage *message = nil;
if ([typeString isEqualToString:@"candidate"]) {
RTCIceCandidate *candidate =
[RTCIceCandidate candidateFromJSONDictionary:values];
message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
} else if ([typeString isEqualToString:kARDTypeValueRemoveCandidates]) {
RTCLogInfo(@"Received remove-candidates message");
NSArray<RTCIceCandidate *> *candidates =
[RTCIceCandidate candidatesFromJSONDictionary:values];
message = [[ARDICECandidateRemovalMessage alloc]
initWithRemovedCandidates:candidates];
} else if ([typeString isEqualToString:@"offer"] ||
[typeString isEqualToString:@"answer"]) {
RTCSessionDescription *description =
[RTCSessionDescription descriptionFromJSONDictionary:values];
message =
[[ARDSessionDescriptionMessage alloc] initWithDescription:description];
} else if ([typeString isEqualToString:@"bye"]) {
message = [[ARDByeMessage alloc] init];
} else {
RTCLogError(@"Unexpected type: %@", typeString);
}
return message;
}
- (NSData *)JSONData {
return nil;
}
@end
@implementation ARDICECandidateMessage
@synthesize candidate = _candidate;
- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate {
if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) {
_candidate = candidate;
}
return self;
}
- (NSData *)JSONData {
return [_candidate JSONData];
}
@end
@implementation ARDICECandidateRemovalMessage
@synthesize candidates = _candidates;
- (instancetype)initWithRemovedCandidates:(
NSArray<RTCIceCandidate *> *)candidates {
NSParameterAssert(candidates.count);
if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) {
_candidates = candidates;
}
return self;
}
- (NSData *)JSONData {
return
[RTCIceCandidate JSONDataForIceCandidates:_candidates
withType:kARDTypeValueRemoveCandidates];
}
@end
@implementation ARDSessionDescriptionMessage
@synthesize sessionDescription = _sessionDescription;
- (instancetype)initWithDescription:(RTCSessionDescription *)description {
ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
RTCSdpType sdpType = description.type;
switch (sdpType) {
case RTCSdpTypeOffer:
messageType = kARDSignalingMessageTypeOffer;
break;
case RTCSdpTypeAnswer:
messageType = kARDSignalingMessageTypeAnswer;
break;
case RTCSdpTypePrAnswer:
NSAssert(NO, @"Unexpected type: %@",
[RTCSessionDescription stringForType:sdpType]);
break;
}
if (self = [super initWithType:messageType]) {
_sessionDescription = description;
}
return self;
}
- (NSData *)JSONData {
return [_sessionDescription JSONData];
}
@end
@implementation ARDByeMessage
- (instancetype)init {
return [super initWithType:kARDSignalingMessageTypeBye];
}
- (NSData *)JSONData {
NSDictionary *message = @{
@"type": @"bye"
};
return [NSJSONSerialization dataWithJSONObject:message
options:NSJSONWritingPrettyPrinted
error:NULL];
}
@end

View File

@ -0,0 +1,29 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
@class RTCLegacyStatsReport;
/** Class used to accumulate stats information into a single displayable string.
*/
@interface ARDStatsBuilder : NSObject
/** String that represents the accumulated stats reports passed into this
* class.
*/
@property(nonatomic, readonly) NSString *statsString;
/** Parses the information in the stats report into an appropriate internal
* format used to generate the stats string.
*/
- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport;
@end

View File

@ -0,0 +1,325 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDStatsBuilder.h"
#import "WebRTC/RTCLegacyStatsReport.h"
#import "ARDBitrateTracker.h"
#import "ARDUtilities.h"
@implementation ARDStatsBuilder {
// Connection stats.
NSString *_connRecvBitrate;
NSString *_connRtt;
NSString *_connSendBitrate;
NSString *_localCandType;
NSString *_remoteCandType;
NSString *_transportType;
// BWE stats.
NSString *_actualEncBitrate;
NSString *_availableRecvBw;
NSString *_availableSendBw;
NSString *_targetEncBitrate;
// Video send stats.
NSString *_videoEncodeMs;
NSString *_videoInputFps;
NSString *_videoInputHeight;
NSString *_videoInputWidth;
NSString *_videoSendCodec;
NSString *_videoSendBitrate;
NSString *_videoSendFps;
NSString *_videoSendHeight;
NSString *_videoSendWidth;
// QP stats.
int _videoQPSum;
int _framesEncoded;
int _oldVideoQPSum;
int _oldFramesEncoded;
// Video receive stats.
NSString *_videoDecodeMs;
NSString *_videoDecodedFps;
NSString *_videoOutputFps;
NSString *_videoRecvBitrate;
NSString *_videoRecvFps;
NSString *_videoRecvHeight;
NSString *_videoRecvWidth;
// Audio send stats.
NSString *_audioSendBitrate;
NSString *_audioSendCodec;
// Audio receive stats.
NSString *_audioCurrentDelay;
NSString *_audioExpandRate;
NSString *_audioRecvBitrate;
NSString *_audioRecvCodec;
// Bitrate trackers.
ARDBitrateTracker *_audioRecvBitrateTracker;
ARDBitrateTracker *_audioSendBitrateTracker;
ARDBitrateTracker *_connRecvBitrateTracker;
ARDBitrateTracker *_connSendBitrateTracker;
ARDBitrateTracker *_videoRecvBitrateTracker;
ARDBitrateTracker *_videoSendBitrateTracker;
}
- (instancetype)init {
if (self = [super init]) {
_audioSendBitrateTracker = [[ARDBitrateTracker alloc] init];
_audioRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
_connSendBitrateTracker = [[ARDBitrateTracker alloc] init];
_connRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
_videoSendBitrateTracker = [[ARDBitrateTracker alloc] init];
_videoRecvBitrateTracker = [[ARDBitrateTracker alloc] init];
_videoQPSum = 0;
_framesEncoded = 0;
}
return self;
}
- (NSString *)statsString {
NSMutableString *result = [NSMutableString string];
NSString *systemStatsFormat = @"(cpu)%ld%%\n";
[result appendString:[NSString stringWithFormat:systemStatsFormat,
(long)ARDGetCpuUsagePercentage()]];
// Connection stats.
NSString *connStatsFormat = @"CN %@ms | %@->%@/%@ | (s)%@ | (r)%@\n";
[result appendString:[NSString stringWithFormat:connStatsFormat,
_connRtt,
_localCandType, _remoteCandType, _transportType,
_connSendBitrate, _connRecvBitrate]];
// Video send stats.
NSString *videoSendFormat = @"VS (input) %@x%@@%@fps | (sent) %@x%@@%@fps\n"
"VS (enc) %@/%@ | (sent) %@/%@ | %@ms | %@\n"
"AvgQP (past %d encoded frames) = %d\n ";
int avgqp = [self calculateAvgQP];
[result appendString:[NSString stringWithFormat:videoSendFormat,
_videoInputWidth, _videoInputHeight, _videoInputFps,
_videoSendWidth, _videoSendHeight, _videoSendFps,
_actualEncBitrate, _targetEncBitrate,
_videoSendBitrate, _availableSendBw,
_videoEncodeMs,
_videoSendCodec,
_framesEncoded - _oldFramesEncoded, avgqp]];
// Video receive stats.
NSString *videoReceiveFormat =
@"VR (recv) %@x%@@%@fps | (decoded)%@ | (output)%@fps | %@/%@ | %@ms\n";
[result appendString:[NSString stringWithFormat:videoReceiveFormat,
_videoRecvWidth, _videoRecvHeight, _videoRecvFps,
_videoDecodedFps,
_videoOutputFps,
_videoRecvBitrate, _availableRecvBw,
_videoDecodeMs]];
// Audio send stats.
NSString *audioSendFormat = @"AS %@ | %@\n";
[result appendString:[NSString stringWithFormat:audioSendFormat,
_audioSendBitrate, _audioSendCodec]];
// Audio receive stats.
NSString *audioReceiveFormat = @"AR %@ | %@ | %@ms | (expandrate)%@";
[result appendString:[NSString stringWithFormat:audioReceiveFormat,
_audioRecvBitrate, _audioRecvCodec, _audioCurrentDelay,
_audioExpandRate]];
return result;
}
- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport {
NSString *reportType = statsReport.type;
if ([reportType isEqualToString:@"ssrc"] &&
[statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) {
if ([statsReport.reportId rangeOfString:@"send"].location != NSNotFound) {
[self parseSendSsrcStatsReport:statsReport];
}
if ([statsReport.reportId rangeOfString:@"recv"].location != NSNotFound) {
[self parseRecvSsrcStatsReport:statsReport];
}
} else if ([reportType isEqualToString:@"VideoBwe"]) {
[self parseBweStatsReport:statsReport];
} else if ([reportType isEqualToString:@"googCandidatePair"]) {
[self parseConnectionStatsReport:statsReport];
}
}
#pragma mark - Private
- (int)calculateAvgQP {
int deltaFramesEncoded = _framesEncoded - _oldFramesEncoded;
int deltaQPSum = _videoQPSum - _oldVideoQPSum;
return deltaFramesEncoded != 0 ? deltaQPSum / deltaFramesEncoded : 0;
}
- (void)parseBweStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googAvailableSendBandwidth"]) {
_availableSendBw =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) {
_availableRecvBw =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googActualEncBitrate"]) {
_actualEncBitrate =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googTargetEncBitrate"]) {
_targetEncBitrate =
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
}
}];
}
- (void)parseConnectionStatsReport:(RTCLegacyStatsReport *)statsReport {
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
if (![activeConnection isEqualToString:@"true"]) {
return;
}
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googRtt"]) {
_connRtt = value;
} else if ([key isEqualToString:@"googLocalCandidateType"]) {
_localCandType = value;
} else if ([key isEqualToString:@"googRemoteCandidateType"]) {
_remoteCandType = value;
} else if ([key isEqualToString:@"googTransportType"]) {
_transportType = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_connRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connRecvBitrate = _connRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connSendBitrate = _connSendBitrateTracker.bitrateString;
}
}];
}
- (void)parseSendSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
NSDictionary *values = statsReport.values;
if ([values objectForKey:@"googFrameRateSent"]) {
// Video track.
[self parseVideoSendStatsReport:statsReport];
} else if ([values objectForKey:@"audioInputLevel"]) {
// Audio track.
[self parseAudioSendStatsReport:statsReport];
}
}
- (void)parseAudioSendStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) {
_audioSendCodec = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioSendBitrate = _audioSendBitrateTracker.bitrateString;
}
}];
}
- (void)parseVideoSendStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) {
_videoSendCodec = value;
} else if ([key isEqualToString:@"googFrameHeightInput"]) {
_videoInputHeight = value;
} else if ([key isEqualToString:@"googFrameWidthInput"]) {
_videoInputWidth = value;
} else if ([key isEqualToString:@"googFrameRateInput"]) {
_videoInputFps = value;
} else if ([key isEqualToString:@"googFrameHeightSent"]) {
_videoSendHeight = value;
} else if ([key isEqualToString:@"googFrameWidthSent"]) {
_videoSendWidth = value;
} else if ([key isEqualToString:@"googFrameRateSent"]) {
_videoSendFps = value;
} else if ([key isEqualToString:@"googAvgEncodeMs"]) {
_videoEncodeMs = value;
} else if ([key isEqualToString:@"bytesSent"]) {
NSInteger byteCount = value.integerValue;
[_videoSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoSendBitrate = _videoSendBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"qpSum"]) {
_oldVideoQPSum = _videoQPSum;
_videoQPSum = value.integerValue;
} else if ([key isEqualToString:@"framesEncoded"]) {
_oldFramesEncoded = _framesEncoded;
_framesEncoded = value.integerValue;
}
}];
}
- (void)parseRecvSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
NSDictionary *values = statsReport.values;
if ([values objectForKey:@"googFrameWidthReceived"]) {
// Video track.
[self parseVideoRecvStatsReport:statsReport];
} else if ([values objectForKey:@"audioOutputLevel"]) {
// Audio track.
[self parseAudioRecvStatsReport:statsReport];
}
}
- (void)parseAudioRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) {
_audioRecvCodec = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_audioRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioRecvBitrate = _audioRecvBitrateTracker.bitrateString;
} else if ([key isEqualToString:@"googSpeechExpandRate"]) {
_audioExpandRate = value;
} else if ([key isEqualToString:@"googCurrentDelayMs"]) {
_audioCurrentDelay = value;
}
}];
}
- (void)parseVideoRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googFrameHeightReceived"]) {
_videoRecvHeight = value;
} else if ([key isEqualToString:@"googFrameWidthReceived"]) {
_videoRecvWidth = value;
} else if ([key isEqualToString:@"googFrameRateReceived"]) {
_videoRecvFps = value;
} else if ([key isEqualToString:@"googFrameRateDecoded"]) {
_videoDecodedFps = value;
} else if ([key isEqualToString:@"googFrameRateOutput"]) {
_videoOutputFps = value;
} else if ([key isEqualToString:@"googDecodeMs"]) {
_videoDecodeMs = value;
} else if ([key isEqualToString:@"bytesReceived"]) {
NSInteger byteCount = value.integerValue;
[_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoRecvBitrate = _videoRecvBitrateTracker.bitrateString;
}
}];
}
@end

View File

@ -0,0 +1,17 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDTURNClient.h"
@interface ARDTURNClient : NSObject <ARDTURNClient>
- (instancetype)initWithURL:(NSURL *)url;
@end

View File

@ -0,0 +1,22 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
@class RTCIceServer;
@protocol ARDTURNClient <NSObject>
// Returns TURN server urls if successful.
- (void)requestServersWithCompletionHandler:
(void (^)(NSArray *turnServers,
NSError *error))completionHandler;
@end

View File

@ -0,0 +1,86 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDTURNClient+Internal.h"
#import "ARDUtilities.h"
#import "RTCIceServer+JSON.h"
// TODO(tkchin): move this to a configuration object.
static NSString *kTURNRefererURLString = @"https://appr.tc";
static NSString *kARDTURNClientErrorDomain = @"ARDTURNClient";
static NSInteger kARDTURNClientErrorBadResponse = -1;
@implementation ARDTURNClient {
NSURL *_url;
}
- (instancetype)initWithURL:(NSURL *)url {
NSParameterAssert([url absoluteString].length);
if (self = [super init]) {
_url = url;
}
return self;
}
- (void)requestServersWithCompletionHandler:
(void (^)(NSArray *turnServers, NSError *error))completionHandler {
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:_url];
[NSURLConnection sendAsyncRequest:request
completionHandler:^(NSURLResponse *response, NSData *data, NSError *error) {
if (error) {
completionHandler(nil, error);
return;
}
NSDictionary *responseDict = [NSDictionary dictionaryWithJSONData:data];
NSString *iceServerUrl = responseDict[@"ice_server_url"];
[self makeTurnServerRequestToURL:[NSURL URLWithString:iceServerUrl]
WithCompletionHandler:completionHandler];
}];
}
#pragma mark - Private
- (void)makeTurnServerRequestToURL:(NSURL *)url
WithCompletionHandler:(void (^)(NSArray *turnServers,
NSError *error))completionHandler {
NSMutableURLRequest *iceServerRequest = [NSMutableURLRequest requestWithURL:url];
iceServerRequest.HTTPMethod = @"POST";
[iceServerRequest addValue:kTURNRefererURLString forHTTPHeaderField:@"referer"];
[NSURLConnection sendAsyncRequest:iceServerRequest
completionHandler:^(NSURLResponse *response,
NSData *data,
NSError *error) {
if (error) {
completionHandler(nil, error);
return;
}
NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
NSMutableArray *turnServers = [NSMutableArray array];
[turnResponseDict[@"iceServers"] enumerateObjectsUsingBlock:
^(NSDictionary *obj, NSUInteger idx, BOOL *stop){
[turnServers addObject:[RTCIceServer serverFromJSONDictionary:obj]];
}];
if (!turnServers) {
NSError *responseError =
[[NSError alloc] initWithDomain:kARDTURNClientErrorDomain
code:kARDTURNClientErrorBadResponse
userInfo:@{
NSLocalizedDescriptionKey: @"Bad TURN response.",
}];
completionHandler(nil, responseError);
return;
}
completionHandler(turnServers, nil);
}];
}
@end

View File

@ -0,0 +1,41 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import "ARDSignalingChannel.h"
// Wraps a WebSocket connection to the AppRTC WebSocket server.
@interface ARDWebSocketChannel : NSObject <ARDSignalingChannel>
- (instancetype)initWithURL:(NSURL *)url
restURL:(NSURL *)restURL
delegate:(id<ARDSignalingChannelDelegate>)delegate;
// Registers with the WebSocket server for the given room and client id once
// the web socket connection is open.
- (void)registerForRoomId:(NSString *)roomId
clientId:(NSString *)clientId;
// Sends message over the WebSocket connection if registered, otherwise POSTs to
// the web socket server instead.
- (void)sendMessage:(ARDSignalingMessage *)message;
@end
// Loopback mode is used to cause the client to connect to itself for testing.
// A second web socket connection is established simulating the other client.
// Any messages received are sent back to the WebSocket server after modifying
// them as appropriate.
@interface ARDLoopbackWebSocketChannel : ARDWebSocketChannel
- (instancetype)initWithURL:(NSURL *)url restURL:(NSURL *)restURL;
@end

View File

@ -0,0 +1,251 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDWebSocketChannel.h"
#import "WebRTC/RTCLogging.h"
#import "SRWebSocket.h"
#import "ARDSignalingMessage.h"
#import "ARDUtilities.h"
// TODO(tkchin): move these to a configuration object.
static NSString const *kARDWSSMessageErrorKey = @"error";
static NSString const *kARDWSSMessagePayloadKey = @"msg";
@interface ARDWebSocketChannel () <SRWebSocketDelegate>
@end
@implementation ARDWebSocketChannel {
NSURL *_url;
NSURL *_restURL;
SRWebSocket *_socket;
}
@synthesize delegate = _delegate;
@synthesize state = _state;
@synthesize roomId = _roomId;
@synthesize clientId = _clientId;
- (instancetype)initWithURL:(NSURL *)url
restURL:(NSURL *)restURL
delegate:(id<ARDSignalingChannelDelegate>)delegate {
if (self = [super init]) {
_url = url;
_restURL = restURL;
_delegate = delegate;
_socket = [[SRWebSocket alloc] initWithURL:url];
_socket.delegate = self;
RTCLog(@"Opening WebSocket.");
[_socket open];
}
return self;
}
- (void)dealloc {
[self disconnect];
}
- (void)setState:(ARDSignalingChannelState)state {
if (_state == state) {
return;
}
_state = state;
[_delegate channel:self didChangeState:_state];
}
- (void)registerForRoomId:(NSString *)roomId
clientId:(NSString *)clientId {
NSParameterAssert(roomId.length);
NSParameterAssert(clientId.length);
_roomId = roomId;
_clientId = clientId;
if (_state == kARDSignalingChannelStateOpen) {
[self registerWithCollider];
}
}
- (void)sendMessage:(ARDSignalingMessage *)message {
NSParameterAssert(_clientId.length);
NSParameterAssert(_roomId.length);
NSData *data = [message JSONData];
if (_state == kARDSignalingChannelStateRegistered) {
NSString *payload =
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
NSDictionary *message = @{
@"cmd": @"send",
@"msg": payload,
};
NSData *messageJSONObject =
[NSJSONSerialization dataWithJSONObject:message
options:NSJSONWritingPrettyPrinted
error:nil];
NSString *messageString =
[[NSString alloc] initWithData:messageJSONObject
encoding:NSUTF8StringEncoding];
RTCLog(@"C->WSS: %@", messageString);
[_socket send:messageString];
} else {
NSString *dataString =
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding];
RTCLog(@"C->WSS POST: %@", dataString);
NSString *urlString =
[NSString stringWithFormat:@"%@/%@/%@",
[_restURL absoluteString], _roomId, _clientId];
NSURL *url = [NSURL URLWithString:urlString];
[NSURLConnection sendAsyncPostToURL:url
withData:data
completionHandler:nil];
}
}
- (void)disconnect {
if (_state == kARDSignalingChannelStateClosed ||
_state == kARDSignalingChannelStateError) {
return;
}
[_socket close];
RTCLog(@"C->WSS DELETE rid:%@ cid:%@", _roomId, _clientId);
NSString *urlString =
[NSString stringWithFormat:@"%@/%@/%@",
[_restURL absoluteString], _roomId, _clientId];
NSURL *url = [NSURL URLWithString:urlString];
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"DELETE";
request.HTTPBody = nil;
[NSURLConnection sendAsyncRequest:request completionHandler:nil];
}
#pragma mark - SRWebSocketDelegate
- (void)webSocketDidOpen:(SRWebSocket *)webSocket {
RTCLog(@"WebSocket connection opened.");
self.state = kARDSignalingChannelStateOpen;
if (_roomId.length && _clientId.length) {
[self registerWithCollider];
}
}
- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message {
NSString *messageString = message;
NSData *messageData = [messageString dataUsingEncoding:NSUTF8StringEncoding];
id jsonObject = [NSJSONSerialization JSONObjectWithData:messageData
options:0
error:nil];
if (![jsonObject isKindOfClass:[NSDictionary class]]) {
RTCLogError(@"Unexpected message: %@", jsonObject);
return;
}
NSDictionary *wssMessage = jsonObject;
NSString *errorString = wssMessage[kARDWSSMessageErrorKey];
if (errorString.length) {
RTCLogError(@"WSS error: %@", errorString);
return;
}
NSString *payload = wssMessage[kARDWSSMessagePayloadKey];
ARDSignalingMessage *signalingMessage =
[ARDSignalingMessage messageFromJSONString:payload];
RTCLog(@"WSS->C: %@", payload);
[_delegate channel:self didReceiveMessage:signalingMessage];
}
- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error {
RTCLogError(@"WebSocket error: %@", error);
self.state = kARDSignalingChannelStateError;
}
- (void)webSocket:(SRWebSocket *)webSocket
didCloseWithCode:(NSInteger)code
reason:(NSString *)reason
wasClean:(BOOL)wasClean {
RTCLog(@"WebSocket closed with code: %ld reason:%@ wasClean:%d",
(long)code, reason, wasClean);
NSParameterAssert(_state != kARDSignalingChannelStateError);
self.state = kARDSignalingChannelStateClosed;
}
#pragma mark - Private
- (void)registerWithCollider {
if (_state == kARDSignalingChannelStateRegistered) {
return;
}
NSParameterAssert(_roomId.length);
NSParameterAssert(_clientId.length);
NSDictionary *registerMessage = @{
@"cmd": @"register",
@"roomid" : _roomId,
@"clientid" : _clientId,
};
NSData *message =
[NSJSONSerialization dataWithJSONObject:registerMessage
options:NSJSONWritingPrettyPrinted
error:nil];
NSString *messageString =
[[NSString alloc] initWithData:message encoding:NSUTF8StringEncoding];
RTCLog(@"Registering on WSS for rid:%@ cid:%@", _roomId, _clientId);
// Registration can fail if server rejects it. For example, if the room is
// full.
[_socket send:messageString];
self.state = kARDSignalingChannelStateRegistered;
}
@end
@interface ARDLoopbackWebSocketChannel () <ARDSignalingChannelDelegate>
@end
@implementation ARDLoopbackWebSocketChannel
- (instancetype)initWithURL:(NSURL *)url restURL:(NSURL *)restURL {
return [super initWithURL:url restURL:restURL delegate:self];
}
#pragma mark - ARDSignalingChannelDelegate
- (void)channel:(id<ARDSignalingChannel>)channel
didReceiveMessage:(ARDSignalingMessage *)message {
switch (message.type) {
case kARDSignalingMessageTypeOffer: {
// Change message to answer, send back to server.
ARDSessionDescriptionMessage *sdpMessage =
(ARDSessionDescriptionMessage *)message;
RTCSessionDescription *description = sdpMessage.sessionDescription;
NSString *dsc = description.sdp;
dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
withString:@"answer"];
RTCSessionDescription *answerDescription =
[[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
ARDSignalingMessage *answer =
[[ARDSessionDescriptionMessage alloc]
initWithDescription:answerDescription];
[self sendMessage:answer];
break;
}
case kARDSignalingMessageTypeAnswer:
// Should not receive answer in loopback scenario.
break;
case kARDSignalingMessageTypeCandidate:
case kARDSignalingMessageTypeCandidateRemoval:
// Send back to server.
[self sendMessage:message];
break;
case kARDSignalingMessageTypeBye:
// Nothing to do.
return;
}
}
- (void)channel:(id<ARDSignalingChannel>)channel
didChangeState:(ARDSignalingChannelState)state {
}
@end

View File

@ -0,0 +1,22 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCIceCandidate.h"
@interface RTCIceCandidate (JSON)
+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary;
+ (NSArray<RTCIceCandidate *> *)candidatesFromJSONDictionary:
(NSDictionary *)dictionary;
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTCIceCandidate *> *)candidates
withType:(NSString *)typeValue;
- (NSData *)JSONData;
@end

View File

@ -0,0 +1,100 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCIceCandidate+JSON.h"
#import "WebRTC/RTCLogging.h"
static NSString const *kRTCICECandidateTypeKey = @"type";
static NSString const *kRTCICECandidateTypeValue = @"candidate";
static NSString const *kRTCICECandidateMidKey = @"id";
static NSString const *kRTCICECandidateMLineIndexKey = @"label";
static NSString const *kRTCICECandidateSdpKey = @"candidate";
static NSString const *kRTCICECandidatesTypeKey = @"candidates";
@implementation RTCIceCandidate (JSON)
+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary {
NSString *mid = dictionary[kRTCICECandidateMidKey];
NSString *sdp = dictionary[kRTCICECandidateSdpKey];
NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
NSInteger mLineIndex = [num integerValue];
return [[RTCIceCandidate alloc] initWithSdp:sdp
sdpMLineIndex:mLineIndex
sdpMid:mid];
}
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTCIceCandidate *> *)candidates
withType:(NSString *)typeValue {
NSMutableArray *jsonCandidates =
[NSMutableArray arrayWithCapacity:candidates.count];
for (RTCIceCandidate *candidate in candidates) {
NSDictionary *jsonCandidate = [candidate JSONDictionary];
[jsonCandidates addObject:jsonCandidate];
}
NSDictionary *json = @{
kRTCICECandidateTypeKey : typeValue,
kRTCICECandidatesTypeKey : jsonCandidates
};
NSError *error = nil;
NSData *data =
[NSJSONSerialization dataWithJSONObject:json
options:NSJSONWritingPrettyPrinted
error:&error];
if (error) {
RTCLogError(@"Error serializing JSON: %@", error);
return nil;
}
return data;
}
+ (NSArray<RTCIceCandidate *> *)candidatesFromJSONDictionary:
(NSDictionary *)dictionary {
NSArray *jsonCandidates = dictionary[kRTCICECandidatesTypeKey];
NSMutableArray<RTCIceCandidate *> *candidates =
[NSMutableArray arrayWithCapacity:jsonCandidates.count];
for (NSDictionary *jsonCandidate in jsonCandidates) {
RTCIceCandidate *candidate =
[RTCIceCandidate candidateFromJSONDictionary:jsonCandidate];
[candidates addObject:candidate];
}
return candidates;
}
- (NSData *)JSONData {
NSDictionary *json = @{
kRTCICECandidateTypeKey : kRTCICECandidateTypeValue,
kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex),
kRTCICECandidateMidKey : self.sdpMid,
kRTCICECandidateSdpKey : self.sdp
};
NSError *error = nil;
NSData *data =
[NSJSONSerialization dataWithJSONObject:json
options:NSJSONWritingPrettyPrinted
error:&error];
if (error) {
RTCLogError(@"Error serializing JSON: %@", error);
return nil;
}
return data;
}
- (NSDictionary *)JSONDictionary{
NSDictionary *json = @{
kRTCICECandidateMLineIndexKey : @(self.sdpMLineIndex),
kRTCICECandidateMidKey : self.sdpMid,
kRTCICECandidateSdpKey : self.sdp
};
return json;
}
@end

View File

@ -0,0 +1,17 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCIceServer.h"
@interface RTCIceServer (JSON)
+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary;
@end

View File

@ -0,0 +1,24 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCIceServer+JSON.h"
@implementation RTCIceServer (JSON)
+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary {
NSArray *turnUrls = dictionary[@"urls"];
NSString *username = dictionary[@"username"] ?: @"";
NSString *credential = dictionary[@"credential"] ?: @"";
return [[RTCIceServer alloc] initWithURLStrings:turnUrls
username:username
credential:credential];
}
@end

View File

@ -0,0 +1,19 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCMediaConstraints.h"
@interface RTCMediaConstraints (JSON)
+ (RTCMediaConstraints *)constraintsFromJSONDictionary:
(NSDictionary *)dictionary;
@end

View File

@ -0,0 +1,34 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCMediaConstraints+JSON.h"
static NSString const *kRTCMediaConstraintsMandatoryKey = @"mandatory";
@implementation RTCMediaConstraints (JSON)
+ (RTCMediaConstraints *)constraintsFromJSONDictionary:
(NSDictionary *)dictionary {
NSDictionary *mandatory = dictionary[kRTCMediaConstraintsMandatoryKey];
NSMutableDictionary *mandatoryContraints =
[NSMutableDictionary dictionaryWithCapacity:[mandatory count]];
[mandatory enumerateKeysAndObjectsUsingBlock:^(
id key, id obj, BOOL *stop) {
mandatoryContraints[key] = obj;
}];
// TODO(tkchin): figure out json formats for optional constraints.
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatoryContraints
optionalConstraints:nil];
return constraints;
}
@end

View File

@ -0,0 +1,19 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "WebRTC/RTCSessionDescription.h"
@interface RTCSessionDescription (JSON)
+ (RTCSessionDescription *)descriptionFromJSONDictionary:
(NSDictionary *)dictionary;
- (NSData *)JSONData;
@end

View File

@ -0,0 +1,35 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCSessionDescription+JSON.h"
static NSString const *kRTCSessionDescriptionTypeKey = @"type";
static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
@implementation RTCSessionDescription (JSON)
+ (RTCSessionDescription *)descriptionFromJSONDictionary:
(NSDictionary *)dictionary {
NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
RTCSdpType type = [[self class] typeForString:typeString];
NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
return [[RTCSessionDescription alloc] initWithType:type sdp:sdp];
}
- (NSData *)JSONData {
NSString *type = [[self class] stringForType:self.type];
NSDictionary *json = @{
kRTCSessionDescriptionTypeKey : type,
kRTCSessionDescriptionSdpKey : self.sdp
};
return [NSJSONSerialization dataWithJSONObject:json options:0 error:nil];
}
@end

View File

@ -0,0 +1,38 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
@interface NSDictionary (ARDUtilites)
// Creates a dictionary with the keys and values in the JSON object.
+ (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString;
+ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData;
@end
@interface NSURLConnection (ARDUtilities)
// Issues an asynchronous request that calls back on main queue.
+ (void)sendAsyncRequest:(NSURLRequest *)request
completionHandler:(void (^)(NSURLResponse *response,
NSData *data,
NSError *error))completionHandler;
// Posts data to the specified URL.
+ (void)sendAsyncPostToURL:(NSURL *)url
withData:(NSData *)data
completionHandler:(void (^)(BOOL succeeded,
NSData *data))completionHandler;
@end
NSInteger ARDGetCpuUsagePercentage();

View File

@ -0,0 +1,126 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDUtilities.h"
#import <mach/mach.h>
#import "WebRTC/RTCLogging.h"
@implementation NSDictionary (ARDUtilites)
+ (NSDictionary *)dictionaryWithJSONString:(NSString *)jsonString {
NSParameterAssert(jsonString.length > 0);
NSData *data = [jsonString dataUsingEncoding:NSUTF8StringEncoding];
NSError *error = nil;
NSDictionary *dict =
[NSJSONSerialization JSONObjectWithData:data options:0 error:&error];
if (error) {
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
}
return dict;
}
+ (NSDictionary *)dictionaryWithJSONData:(NSData *)jsonData {
NSError *error = nil;
NSDictionary *dict =
[NSJSONSerialization JSONObjectWithData:jsonData options:0 error:&error];
if (error) {
RTCLogError(@"Error parsing JSON: %@", error.localizedDescription);
}
return dict;
}
@end
@implementation NSURLConnection (ARDUtilities)
+ (void)sendAsyncRequest:(NSURLRequest *)request
completionHandler:(void (^)(NSURLResponse *response,
NSData *data,
NSError *error))completionHandler {
// Kick off an async request which will call back on main thread.
NSURLSession *session = [NSURLSession sharedSession];
[[session dataTaskWithRequest:request
completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
if (completionHandler) {
completionHandler(response, data, error);
}
}] resume];
}
// Posts data to the specified URL.
+ (void)sendAsyncPostToURL:(NSURL *)url
withData:(NSData *)data
completionHandler:(void (^)(BOOL succeeded,
NSData *data))completionHandler {
NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url];
request.HTTPMethod = @"POST";
request.HTTPBody = data;
[[self class] sendAsyncRequest:request
completionHandler:^(NSURLResponse *response,
NSData *data,
NSError *error) {
if (error) {
RTCLogError(@"Error posting data: %@", error.localizedDescription);
if (completionHandler) {
completionHandler(NO, data);
}
return;
}
NSHTTPURLResponse *httpResponse = (NSHTTPURLResponse *)response;
if (httpResponse.statusCode != 200) {
NSString *serverResponse = data.length > 0 ?
[[NSString alloc] initWithData:data encoding:NSUTF8StringEncoding] :
nil;
RTCLogError(@"Received bad response: %@", serverResponse);
if (completionHandler) {
completionHandler(NO, data);
}
return;
}
if (completionHandler) {
completionHandler(YES, data);
}
}];
}
@end
NSInteger ARDGetCpuUsagePercentage() {
// Create an array of thread ports for the current task.
const task_t task = mach_task_self();
thread_act_array_t thread_array;
mach_msg_type_number_t thread_count;
if (task_threads(task, &thread_array, &thread_count) != KERN_SUCCESS) {
return -1;
}
// Sum cpu usage from all threads.
float cpu_usage_percentage = 0;
thread_basic_info_data_t thread_info_data = {};
mach_msg_type_number_t thread_info_count;
for (size_t i = 0; i < thread_count; ++i) {
thread_info_count = THREAD_BASIC_INFO_COUNT;
kern_return_t ret = thread_info(thread_array[i],
THREAD_BASIC_INFO,
(thread_info_t)&thread_info_data,
&thread_info_count);
if (ret == KERN_SUCCESS) {
cpu_usage_percentage +=
100.f * (float)thread_info_data.cpu_usage / TH_USAGE_SCALE;
}
}
// Dealloc the created array.
vm_deallocate(task, (vm_address_t)thread_array,
sizeof(thread_act_t) * thread_count);
return lroundf(cpu_usage_percentage);
}

View File

@ -0,0 +1,17 @@
/*
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
// The main application class of the AppRTCMobile iOS app demonstrating
// interoperability between the Objective C implementation of PeerConnection
// and the appr.tc demo webapp.
@interface ARDAppDelegate : NSObject <UIApplicationDelegate>
@end

View File

@ -0,0 +1,58 @@
/*
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDAppDelegate.h"
#import "WebRTC/RTCFieldTrials.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCSSLAdapter.h"
#import "WebRTC/RTCTracing.h"
#import "ARDMainViewController.h"
@implementation ARDAppDelegate {
UIWindow *_window;
}
#pragma mark - UIApplicationDelegate methods
- (BOOL)application:(UIApplication *)application
didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
NSDictionary *fieldTrials = @{
kRTCFieldTrialH264HighProfileKey: kRTCFieldTrialEnabledValue,
};
RTCInitFieldTrialDictionary(fieldTrials);
RTCInitializeSSL();
RTCSetupInternalTracer();
_window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
[_window makeKeyAndVisible];
ARDMainViewController *viewController = [[ARDMainViewController alloc] init];
UINavigationController *root =
[[UINavigationController alloc] initWithRootViewController:viewController];
root.navigationBar.translucent = NO;
_window.rootViewController = root;
#if defined(NDEBUG)
// In debug builds the default level is LS_INFO and in non-debug builds it is
// disabled. Continue to log to console in non-debug builds, but only
// warnings and errors.
RTCSetMinDebugLogLevel(RTCLoggingSeverityWarning);
#endif
return YES;
}
- (void)applicationWillTerminate:(UIApplication *)application {
RTCShutdownInternalTracer();
RTCCleanupSSL();
}
@end

View File

@ -0,0 +1,30 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@class ARDMainView;
@protocol ARDMainViewDelegate <NSObject>
- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback;
- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView;
@end
// The main view of AppRTCMobile. It contains an input field for entering a room
// name on apprtc to connect to.
@interface ARDMainView : UIView
@property(nonatomic, weak) id<ARDMainViewDelegate> delegate;
// Updates the audio loop button as needed.
@property(nonatomic, assign) BOOL isAudioLoopPlaying;
@end

View File

@ -0,0 +1,205 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDMainView.h"
#import "UIImage+ARDUtilities.h"
static CGFloat const kRoomTextFieldHeight = 40;
static CGFloat const kRoomTextFieldMargin = 8;
static CGFloat const kCallControlMargin = 8;
// Helper view that contains a text field and a clear button.
@interface ARDRoomTextField : UIView <UITextFieldDelegate>
@property(nonatomic, readonly) NSString *roomText;
@end
@implementation ARDRoomTextField {
UITextField *_roomText;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
_roomText.borderStyle = UITextBorderStyleNone;
_roomText.font = [UIFont fontWithName:@"Roboto" size:12];
_roomText.placeholder = @"Room name";
_roomText.autocorrectionType = UITextAutocorrectionTypeNo;
_roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
_roomText.clearButtonMode = UITextFieldViewModeAlways;
_roomText.delegate = self;
[self addSubview:_roomText];
// Give rounded corners and a light gray border.
self.layer.borderWidth = 1;
self.layer.borderColor = [[UIColor lightGrayColor] CGColor];
self.layer.cornerRadius = 2;
}
return self;
}
- (void)layoutSubviews {
_roomText.frame =
CGRectMake(kRoomTextFieldMargin, 0, CGRectGetWidth(self.bounds) - kRoomTextFieldMargin,
kRoomTextFieldHeight);
}
- (CGSize)sizeThatFits:(CGSize)size {
size.height = kRoomTextFieldHeight;
return size;
}
- (NSString *)roomText {
return _roomText.text;
}
#pragma mark - UITextFieldDelegate
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
// There is no other control that can take focus, so manually resign focus
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
[textField resignFirstResponder];
return YES;
}
@end
@implementation ARDMainView {
ARDRoomTextField *_roomText;
UILabel *_callOptionsLabel;
UISwitch *_loopbackSwitch;
UILabel *_loopbackLabel;
UIButton *_startCallButton;
UIButton *_audioLoopButton;
}
@synthesize delegate = _delegate;
@synthesize isAudioLoopPlaying = _isAudioLoopPlaying;
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
[self addSubview:_roomText];
UIFont *controlFont = [UIFont fontWithName:@"Roboto" size:20];
UIColor *controlFontColor = [UIColor colorWithWhite:0 alpha:.6];
_callOptionsLabel = [[UILabel alloc] initWithFrame:CGRectZero];
_callOptionsLabel.text = @"Call Options";
_callOptionsLabel.font = controlFont;
_callOptionsLabel.textColor = controlFontColor;
[_callOptionsLabel sizeToFit];
[self addSubview:_callOptionsLabel];
_loopbackSwitch = [[UISwitch alloc] initWithFrame:CGRectZero];
[_loopbackSwitch sizeToFit];
[self addSubview:_loopbackSwitch];
_loopbackLabel = [[UILabel alloc] initWithFrame:CGRectZero];
_loopbackLabel.text = @"Loopback mode";
_loopbackLabel.font = controlFont;
_loopbackLabel.textColor = controlFontColor;
[_loopbackLabel sizeToFit];
[self addSubview:_loopbackLabel];
_startCallButton = [UIButton buttonWithType:UIButtonTypeSystem];
[_startCallButton setTitle:@"Start call"
forState:UIControlStateNormal];
_startCallButton.titleLabel.font = controlFont;
[_startCallButton sizeToFit];
[_startCallButton addTarget:self
action:@selector(onStartCall:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_startCallButton];
// Used to test what happens to sounds when calls are in progress.
_audioLoopButton = [UIButton buttonWithType:UIButtonTypeSystem];
_audioLoopButton.titleLabel.font = controlFont;
[self updateAudioLoopButton];
[_audioLoopButton addTarget:self
action:@selector(onToggleAudioLoop:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_audioLoopButton];
self.backgroundColor = [UIColor whiteColor];
}
return self;
}
- (void)setIsAudioLoopPlaying:(BOOL)isAudioLoopPlaying {
if (_isAudioLoopPlaying == isAudioLoopPlaying) {
return;
}
_isAudioLoopPlaying = isAudioLoopPlaying;
[self updateAudioLoopButton];
}
- (void)layoutSubviews {
CGRect bounds = self.bounds;
CGFloat roomTextWidth = bounds.size.width - 2 * kRoomTextFieldMargin;
CGFloat roomTextHeight = [_roomText sizeThatFits:bounds.size].height;
_roomText.frame =
CGRectMake(kRoomTextFieldMargin, kRoomTextFieldMargin, roomTextWidth, roomTextHeight);
CGFloat callOptionsLabelTop =
CGRectGetMaxY(_roomText.frame) + kCallControlMargin * 4;
_callOptionsLabel.frame = CGRectMake(kCallControlMargin,
callOptionsLabelTop,
_callOptionsLabel.frame.size.width,
_callOptionsLabel.frame.size.height);
CGFloat loopbackModeTop = CGRectGetMaxY(_callOptionsLabel.frame) + kCallControlMargin * 2;
CGRect loopbackModeRect = CGRectMake(kCallControlMargin * 3,
loopbackModeTop,
_loopbackSwitch.frame.size.width,
_loopbackSwitch.frame.size.height);
_loopbackSwitch.frame = loopbackModeRect;
CGFloat loopbackModeLabelCenterX = CGRectGetMaxX(loopbackModeRect) +
kCallControlMargin + _loopbackLabel.frame.size.width / 2;
_loopbackLabel.center = CGPointMake(loopbackModeLabelCenterX,
CGRectGetMidY(loopbackModeRect));
CGFloat audioLoopTop = CGRectGetMaxY(loopbackModeRect) + kCallControlMargin * 3;
_audioLoopButton.frame = CGRectMake(kCallControlMargin,
audioLoopTop,
_audioLoopButton.frame.size.width,
_audioLoopButton.frame.size.height);
CGFloat startCallTop =
CGRectGetMaxY(_audioLoopButton.frame) + kCallControlMargin * 3;
_startCallButton.frame = CGRectMake(kCallControlMargin,
startCallTop,
_startCallButton.frame.size.width,
_startCallButton.frame.size.height);
}
#pragma mark - Private
- (void)updateAudioLoopButton {
if (_isAudioLoopPlaying) {
[_audioLoopButton setTitle:@"Stop sound"
forState:UIControlStateNormal];
[_audioLoopButton sizeToFit];
} else {
[_audioLoopButton setTitle:@"Play sound"
forState:UIControlStateNormal];
[_audioLoopButton sizeToFit];
}
}
- (void)onToggleAudioLoop:(id)sender {
[_delegate mainViewDidToggleAudioLoop:self];
}
- (void)onStartCall:(id)sender {
[_delegate mainView:self didInputRoom:_roomText.roomText isLoopback:_loopbackSwitch.isOn];
}
@end

View File

@ -0,0 +1,14 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@interface ARDMainViewController : UIViewController
@end

View File

@ -0,0 +1,260 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDMainViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "WebRTC/RTCAudioSession.h"
#import "WebRTC/RTCAudioSessionConfiguration.h"
#import "WebRTC/RTCDispatcher.h"
#import "WebRTC/RTCLogging.h"
#import "ARDAppClient.h"
#import "ARDMainView.h"
#import "ARDSettingsModel.h"
#import "ARDSettingsViewController.h"
#import "ARDVideoCallViewController.h"
static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
// Launch argument to be passed to indicate that the app should start loopback immediatly
static NSString *const loopbackLaunchProcessArgument = @"loopback";
@interface ARDMainViewController () <
ARDMainViewDelegate,
ARDVideoCallViewControllerDelegate,
RTCAudioSessionDelegate>
@end
@implementation ARDMainViewController {
ARDMainView *_mainView;
AVAudioPlayer *_audioPlayer;
BOOL _useManualAudio;
}
- (void)viewDidLoad {
[super viewDidLoad];
if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) {
[self mainView:nil didInputRoom:@"" isLoopback:YES];
}
}
- (void)loadView {
self.title = @"AppRTC Mobile";
_mainView = [[ARDMainView alloc] initWithFrame:CGRectZero];
_mainView.delegate = self;
self.view = _mainView;
[self addSettingsBarButton];
RTCAudioSessionConfiguration *webRTCConfig =
[RTCAudioSessionConfiguration webRTCConfiguration];
webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
AVAudioSessionCategoryOptionDefaultToSpeaker;
[RTCAudioSessionConfiguration setWebRTCConfiguration:webRTCConfig];
RTCAudioSession *session = [RTCAudioSession sharedInstance];
[session addDelegate:self];
[self configureAudioSession];
[self setupAudioPlayer];
}
- (void)addSettingsBarButton {
UIBarButtonItem *settingsButton =
[[UIBarButtonItem alloc] initWithImage:[UIImage imageNamed:barButtonImageString]
style:UIBarButtonItemStylePlain
target:self
action:@selector(showSettings:)];
self.navigationItem.rightBarButtonItem = settingsButton;
}
+ (NSString *)loopbackRoomString {
NSString *loopbackRoomString =
[[NSUUID UUID].UUIDString stringByReplacingOccurrencesOfString:@"-" withString:@""];
return loopbackRoomString;
}
#pragma mark - ARDMainViewDelegate
- (void)mainView:(ARDMainView *)mainView didInputRoom:(NSString *)room isLoopback:(BOOL)isLoopback {
if (!room.length) {
if (isLoopback) {
// If this is a loopback call, allow a generated room name.
room = [[self class] loopbackRoomString];
} else {
[self showAlertWithMessage:@"Missing room name."];
return;
}
}
// Trim whitespaces.
NSCharacterSet *whitespaceSet = [NSCharacterSet whitespaceCharacterSet];
NSString *trimmedRoom = [room stringByTrimmingCharactersInSet:whitespaceSet];
// Check that room name is valid.
NSError *error = nil;
NSRegularExpressionOptions options = NSRegularExpressionCaseInsensitive;
NSRegularExpression *regex =
[NSRegularExpression regularExpressionWithPattern:@"\\w+"
options:options
error:&error];
if (error) {
[self showAlertWithMessage:error.localizedDescription];
return;
}
NSRange matchRange =
[regex rangeOfFirstMatchInString:trimmedRoom
options:0
range:NSMakeRange(0, trimmedRoom.length)];
if (matchRange.location == NSNotFound ||
matchRange.length != trimmedRoom.length) {
[self showAlertWithMessage:@"Invalid room name."];
return;
}
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
RTCAudioSession *session = [RTCAudioSession sharedInstance];
session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
session.isAudioEnabled = NO;
// Kick off the video call.
ARDVideoCallViewController *videoCallViewController =
[[ARDVideoCallViewController alloc] initForRoom:trimmedRoom
isLoopback:isLoopback
delegate:self];
videoCallViewController.modalTransitionStyle =
UIModalTransitionStyleCrossDissolve;
[self presentViewController:videoCallViewController
animated:YES
completion:nil];
}
- (void)mainViewDidToggleAudioLoop:(ARDMainView *)mainView {
if (mainView.isAudioLoopPlaying) {
[_audioPlayer stop];
} else {
[_audioPlayer play];
}
mainView.isAudioLoopPlaying = _audioPlayer.playing;
}
#pragma mark - ARDVideoCallViewControllerDelegate
- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController {
if (![viewController isBeingDismissed]) {
RTCLog(@"Dismissing VC");
[self dismissViewControllerAnimated:YES completion:^{
[self restartAudioPlayerIfNeeded];
}];
}
RTCAudioSession *session = [RTCAudioSession sharedInstance];
session.isAudioEnabled = NO;
}
#pragma mark - RTCAudioSessionDelegate
- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
// Stop playback on main queue and then configure WebRTC.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
if (_mainView.isAudioLoopPlaying) {
RTCLog(@"Stopping audio loop due to WebRTC start.");
[_audioPlayer stop];
}
RTCLog(@"Setting isAudioEnabled to YES.");
session.isAudioEnabled = YES;
}];
}
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
// WebRTC is done with the audio session. Restart playback.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
RTCLog(@"audioSessionDidStopPlayOrRecord");
[self restartAudioPlayerIfNeeded];
}];
}
#pragma mark - Private
- (void)showSettings:(id)sender {
ARDSettingsViewController *settingsController =
[[ARDSettingsViewController alloc] initWithStyle:UITableViewStyleGrouped
settingsModel:[[ARDSettingsModel alloc] init]];
UINavigationController *navigationController =
[[UINavigationController alloc] initWithRootViewController:settingsController];
[self presentViewControllerAsModal:navigationController];
}
- (void)presentViewControllerAsModal:(UIViewController *)viewController {
[self presentViewController:viewController animated:YES completion:nil];
}
- (void)configureAudioSession {
RTCAudioSessionConfiguration *configuration =
[[RTCAudioSessionConfiguration alloc] init];
configuration.category = AVAudioSessionCategoryAmbient;
configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
configuration.mode = AVAudioSessionModeDefault;
RTCAudioSession *session = [RTCAudioSession sharedInstance];
[session lockForConfiguration];
BOOL hasSucceeded = NO;
NSError *error = nil;
if (session.isActive) {
hasSucceeded = [session setConfiguration:configuration error:&error];
} else {
hasSucceeded = [session setConfiguration:configuration
active:YES
error:&error];
}
if (!hasSucceeded) {
RTCLogError(@"Error setting configuration: %@", error.localizedDescription);
}
[session unlockForConfiguration];
}
- (void)setupAudioPlayer {
NSString *audioFilePath =
[[NSBundle mainBundle] pathForResource:@"mozart" ofType:@"mp3"];
NSURL *audioFileURL = [NSURL URLWithString:audioFilePath];
_audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:audioFileURL
error:nil];
_audioPlayer.numberOfLoops = -1;
_audioPlayer.volume = 1.0;
[_audioPlayer prepareToPlay];
}
- (void)restartAudioPlayerIfNeeded {
[self configureAudioSession];
if (_mainView.isAudioLoopPlaying && !self.presentedViewController) {
RTCLog(@"Starting audio loop due to WebRTC end.");
[_audioPlayer play];
}
}
- (void)showAlertWithMessage:(NSString*)message {
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil
message:message
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action){
}];
[alert addAction:defaultAction];
[self presentViewController:alert animated:YES completion:nil];
}
@end

View File

@ -0,0 +1,37 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@class ARDSettingsModel;
NS_ASSUME_NONNULL_BEGIN
/**
* Displays settings options.
*/
@interface ARDSettingsViewController : UITableViewController
/**
* Creates new instance.
*
* @param style the table view style that should be used
* @param settingsModel model class for the user settings.
*/
- (instancetype)initWithStyle:(UITableViewStyle)style
settingsModel:(ARDSettingsModel *)settingsModel;
#pragma mark - Unavailable
- (instancetype)initWithStyle:(UITableViewStyle)style NS_UNAVAILABLE;
- (instancetype)init NS_UNAVAILABLE;
+ (instancetype) new NS_UNAVAILABLE;
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,369 @@
/*
* Copyright 2016 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDSettingsViewController.h"
#import "ARDSettingsModel.h"
NS_ASSUME_NONNULL_BEGIN
typedef NS_ENUM(int, ARDSettingsSections) {
ARDSettingsSectionAudioSettings = 0,
ARDSettingsSectionVideoResolution,
ARDSettingsSectionVideoCodec,
ARDSettingsSectionBitRate,
};
typedef NS_ENUM(int, ARDAudioSettingsOptions) {
ARDAudioSettingsAudioOnly = 0,
ARDAudioSettingsCreateAecDump,
ARDAudioSettingsUseLevelController,
ARDAudioSettingsUseManualAudioConfig,
};
@interface ARDSettingsViewController () <UITextFieldDelegate> {
ARDSettingsModel *_settingsModel;
}
@end
@implementation ARDSettingsViewController
- (instancetype)initWithStyle:(UITableViewStyle)style
settingsModel:(ARDSettingsModel *)settingsModel {
self = [super initWithStyle:style];
if (self) {
_settingsModel = settingsModel;
}
return self;
}
#pragma mark - View lifecycle
- (void)viewDidLoad {
[super viewDidLoad];
self.title = @"Settings";
[self addDoneBarButton];
}
- (void)viewWillAppear:(BOOL)animated {
[super viewWillAppear:animated];
}
#pragma mark - Data source
- (NSArray<NSString *> *)videoResolutionArray {
return [_settingsModel availableVideoResolutions];
}
- (NSArray<NSString *> *)videoCodecArray {
return [_settingsModel availableVideoCodecs];
}
#pragma mark -
- (void)addDoneBarButton {
UIBarButtonItem *barItem =
[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone
target:self
action:@selector(dismissModally:)];
self.navigationItem.leftBarButtonItem = barItem;
}
#pragma mark - Dismissal of view controller
- (void)dismissModally:(id)sender {
[self dismissViewControllerAnimated:YES completion:nil];
}
#pragma mark - Table view data source
- (NSInteger)numberOfSectionsInTableView:(UITableView *)tableView {
return 4;
}
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
switch (section) {
case ARDSettingsSectionAudioSettings:
return 4;
case ARDSettingsSectionVideoResolution:
return self.videoResolutionArray.count;
case ARDSettingsSectionVideoCodec:
return self.videoCodecArray.count;
default:
return 1;
}
}
#pragma mark - Table view delegate helpers
- (void)removeAllAccessories:(UITableView *)tableView
inSection:(int)section
{
for (int i = 0; i < [tableView numberOfRowsInSection:section]; i++) {
NSIndexPath *rowPath = [NSIndexPath indexPathForRow:i inSection:section];
UITableViewCell *cell = [tableView cellForRowAtIndexPath:rowPath];
cell.accessoryType = UITableViewCellAccessoryNone;
}
}
- (void)tableView:(UITableView *)tableView
updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
inSection:(int)section {
[self removeAllAccessories:tableView inSection:section];
UITableViewCell *cell = [tableView cellForRowAtIndexPath:indexPath];
cell.accessoryType = UITableViewCellAccessoryCheckmark;
[tableView deselectRowAtIndexPath:indexPath animated:YES];
}
#pragma mark - Table view delegate
- (nullable NSString *)tableView:(UITableView *)tableView
titleForHeaderInSection:(NSInteger)section {
switch (section) {
case ARDSettingsSectionAudioSettings:
return @"Audio";
case ARDSettingsSectionVideoResolution:
return @"Video resolution";
case ARDSettingsSectionVideoCodec:
return @"Video codec";
case ARDSettingsSectionBitRate:
return @"Maximum bitrate";
default:
return @"";
}
}
- (UITableViewCell *)tableView:(UITableView *)tableView
cellForRowAtIndexPath:(NSIndexPath *)indexPath {
switch (indexPath.section) {
case ARDSettingsSectionAudioSettings:
return [self audioSettingsTableViewCellForTableView:tableView atIndexPath:indexPath];
case ARDSettingsSectionVideoResolution:
return [self videoResolutionTableViewCellForTableView:tableView atIndexPath:indexPath];
case ARDSettingsSectionVideoCodec:
return [self videoCodecTableViewCellForTableView:tableView atIndexPath:indexPath];
case ARDSettingsSectionBitRate:
return [self bitrateTableViewCellForTableView:tableView atIndexPath:indexPath];
default:
return [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:@"identifier"];
}
}
- (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath {
switch (indexPath.section) {
case ARDSettingsSectionVideoResolution:
[self tableView:tableView disSelectVideoResolutionAtIndex:indexPath];
break;
case ARDSettingsSectionVideoCodec:
[self tableView:tableView didSelectVideoCodecCellAtIndexPath:indexPath];
break;
}
}
#pragma mark - Table view delegate(Video Resolution)
- (UITableViewCell *)videoResolutionTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsVideoResolutionViewCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
}
NSString *resolution = self.videoResolutionArray[indexPath.row];
cell.textLabel.text = resolution;
if ([resolution isEqualToString:[_settingsModel currentVideoResolutionSettingFromStore]]) {
cell.accessoryType = UITableViewCellAccessoryCheckmark;
} else {
cell.accessoryType = UITableViewCellAccessoryNone;
}
return cell;
}
- (void)tableView:(UITableView *)tableView
disSelectVideoResolutionAtIndex:(NSIndexPath *)indexPath {
[self tableView:tableView
updateListSelectionAtIndexPath:indexPath
inSection:ARDSettingsSectionVideoResolution];
NSString *videoResolution = self.videoResolutionArray[indexPath.row];
[_settingsModel storeVideoResolutionSetting:videoResolution];
}
#pragma mark - Table view delegate(Video Codec)
- (UITableViewCell *)videoCodecTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsVideoCodecCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
}
NSString *codec = self.videoCodecArray[indexPath.row];
cell.textLabel.text = codec;
if ([codec isEqualToString:[_settingsModel currentVideoCodecSettingFromStore]]) {
cell.accessoryType = UITableViewCellAccessoryCheckmark;
} else {
cell.accessoryType = UITableViewCellAccessoryNone;
}
return cell;
}
- (void)tableView:(UITableView *)tableView
didSelectVideoCodecCellAtIndexPath:(NSIndexPath *)indexPath {
[self tableView:tableView
updateListSelectionAtIndexPath:indexPath
inSection:ARDSettingsSectionVideoCodec];
NSString *videoCodec = self.videoCodecArray[indexPath.row];
[_settingsModel storeVideoCodecSetting:videoCodec];
}
#pragma mark - Table view delegate(Bitrate)
- (UITableViewCell *)bitrateTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsBitrateCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
UITextField *textField = [[UITextField alloc]
initWithFrame:CGRectMake(10, 0, cell.bounds.size.width - 20, cell.bounds.size.height)];
NSString *currentMaxBitrate = [_settingsModel currentMaxBitrateSettingFromStore].stringValue;
textField.text = currentMaxBitrate;
textField.placeholder = @"Enter max bit rate (kbps)";
textField.keyboardType = UIKeyboardTypeNumberPad;
textField.delegate = self;
// Numerical keyboards have no return button, we need to add one manually.
UIToolbar *numberToolbar =
[[UIToolbar alloc] initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)];
numberToolbar.items = @[
[[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemFlexibleSpace
target:nil
action:nil],
[[UIBarButtonItem alloc] initWithTitle:@"Apply"
style:UIBarButtonItemStyleDone
target:self
action:@selector(numberTextFieldDidEndEditing:)]
];
[numberToolbar sizeToFit];
textField.inputAccessoryView = numberToolbar;
[cell addSubview:textField];
}
return cell;
}
- (void)numberTextFieldDidEndEditing:(id)sender {
[self.view endEditing:YES];
}
- (void)textFieldDidEndEditing:(UITextField *)textField {
NSNumber *bitrateNumber = nil;
if (textField.text.length != 0) {
bitrateNumber = [NSNumber numberWithInteger:textField.text.intValue];
}
[_settingsModel storeMaxBitrateSetting:bitrateNumber];
}
#pragma mark - Table view delegate(Audio settings)
- (UITableViewCell *)audioSettingsTableViewCellForTableView:(UITableView *)tableView
atIndexPath:(NSIndexPath *)indexPath {
NSString *dequeueIdentifier = @"ARDSettingsAudioSettingsCellIdentifier";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:dequeueIdentifier];
if (!cell) {
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
cell.selectionStyle = UITableViewCellSelectionStyleNone;
UISwitch *switchView = [[UISwitch alloc] initWithFrame:CGRectZero];
switchView.tag = indexPath.row;
[switchView addTarget:self
action:@selector(audioSettingSwitchChanged:)
forControlEvents:UIControlEventValueChanged];
cell.accessoryView = switchView;
}
cell.textLabel.text = [self labelForAudioSettingAtIndexPathRow:indexPath.row];
UISwitch *switchView = (UISwitch *)cell.accessoryView;
switchView.on = [self valueForAudioSettingAtIndexPathRow:indexPath.row];
return cell;
}
- (NSString *)labelForAudioSettingAtIndexPathRow:(NSInteger)setting {
switch (setting) {
case ARDAudioSettingsAudioOnly:
return @"Audio only";
case ARDAudioSettingsCreateAecDump:
return @"Create AecDump";
case ARDAudioSettingsUseLevelController:
return @"Use level controller";
case ARDAudioSettingsUseManualAudioConfig:
return @"Use manual audio config";
default:
return @"";
}
}
- (BOOL)valueForAudioSettingAtIndexPathRow:(NSInteger)setting {
switch (setting) {
case ARDAudioSettingsAudioOnly:
return [_settingsModel currentAudioOnlySettingFromStore];
case ARDAudioSettingsCreateAecDump:
return [_settingsModel currentCreateAecDumpSettingFromStore];
case ARDAudioSettingsUseLevelController:
return [_settingsModel currentUseLevelControllerSettingFromStore];
case ARDAudioSettingsUseManualAudioConfig:
return [_settingsModel currentUseManualAudioConfigSettingFromStore];
default:
return NO;
}
}
- (void)audioSettingSwitchChanged:(UISwitch *)sender {
switch (sender.tag) {
case ARDAudioSettingsAudioOnly: {
[_settingsModel storeAudioOnlySetting:sender.isOn];
break;
}
case ARDAudioSettingsCreateAecDump: {
[_settingsModel storeCreateAecDumpSetting:sender.isOn];
break;
}
case ARDAudioSettingsUseLevelController: {
[_settingsModel storeUseLevelControllerSetting:sender.isOn];
break;
}
case ARDAudioSettingsUseManualAudioConfig: {
[_settingsModel storeUseManualAudioConfigSetting:sender.isOn];
break;
}
default:
break;
}
}
@end
NS_ASSUME_NONNULL_END

View File

@ -0,0 +1,17 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@interface ARDStatsView : UIView
- (void)setStats:(NSArray *)stats;
@end

View File

@ -0,0 +1,52 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDStatsView.h"
#import "WebRTC/RTCLegacyStatsReport.h"
#import "ARDStatsBuilder.h"
@implementation ARDStatsView {
UILabel *_statsLabel;
ARDStatsBuilder *_statsBuilder;
}
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_statsLabel = [[UILabel alloc] initWithFrame:CGRectZero];
_statsLabel.numberOfLines = 0;
_statsLabel.font = [UIFont fontWithName:@"Roboto" size:12];
_statsLabel.adjustsFontSizeToFitWidth = YES;
_statsLabel.minimumScaleFactor = 0.6;
_statsLabel.textColor = [UIColor greenColor];
[self addSubview:_statsLabel];
self.backgroundColor = [UIColor colorWithWhite:0 alpha:.6];
_statsBuilder = [[ARDStatsBuilder alloc] init];
}
return self;
}
- (void)setStats:(NSArray *)stats {
for (RTCLegacyStatsReport *report in stats) {
[_statsBuilder parseStatsReport:report];
}
_statsLabel.text = _statsBuilder.statsString;
}
- (void)layoutSubviews {
_statsLabel.frame = self.bounds;
}
- (CGSize)sizeThatFits:(CGSize)size {
return [_statsLabel sizeThatFits:size];
}
@end

View File

@ -0,0 +1,45 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
#import <WebRTC/RTCCameraPreviewView.h>
#import <WebRTC/RTCVideoRenderer.h>
#import "ARDStatsView.h"
@class ARDVideoCallView;
@protocol ARDVideoCallViewDelegate <NSObject>
// Called when the camera switch button is pressed.
- (void)videoCallViewDidSwitchCamera:(ARDVideoCallView *)view;
// Called when the route change button is pressed.
- (void)videoCallViewDidChangeRoute:(ARDVideoCallView *)view;
// Called when the hangup button is pressed.
- (void)videoCallViewDidHangup:(ARDVideoCallView *)view;
// Called when stats are enabled by triple tapping.
- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view;
@end
// Video call view that shows local and remote video, provides a label to
// display status, and also a hangup button.
@interface ARDVideoCallView : UIView
@property(nonatomic, readonly) UILabel *statusLabel;
@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
@property(nonatomic, readonly) __kindof UIView<RTCVideoRenderer> *remoteVideoView;
@property(nonatomic, readonly) ARDStatsView *statsView;
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
@end

View File

@ -0,0 +1,203 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDVideoCallView.h"
#import <AVFoundation/AVFoundation.h>
#import <WebRTC/RTCEAGLVideoView.h>
#import <WebRTC/RTCMTLVideoView.h>
#import "UIImage+ARDUtilities.h"
static CGFloat const kButtonPadding = 16;
static CGFloat const kButtonSize = 48;
static CGFloat const kLocalVideoViewSize = 120;
static CGFloat const kLocalVideoViewPadding = 8;
static CGFloat const kStatusBarHeight = 20;
@interface ARDVideoCallView () <RTCEAGLVideoViewDelegate>
@end
@implementation ARDVideoCallView {
UIButton *_routeChangeButton;
UIButton *_cameraSwitchButton;
UIButton *_hangupButton;
CGSize _remoteVideoSize;
}
@synthesize statusLabel = _statusLabel;
@synthesize localVideoView = _localVideoView;
@synthesize remoteVideoView = _remoteVideoView;
@synthesize statsView = _statsView;
@synthesize delegate = _delegate;
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
#if defined(RTC_SUPPORTS_METAL)
_remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
#else
RTCEAGLVideoView *remoteView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
remoteView.delegate = self;
_remoteVideoView = remoteView;
#endif
[self addSubview:_remoteVideoView];
_localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
[self addSubview:_localVideoView];
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
_statsView.hidden = YES;
[self addSubview:_statsView];
_routeChangeButton = [UIButton buttonWithType:UIButtonTypeCustom];
_routeChangeButton.backgroundColor = [UIColor whiteColor];
_routeChangeButton.layer.cornerRadius = kButtonSize / 2;
_routeChangeButton.layer.masksToBounds = YES;
UIImage *image = [UIImage imageNamed:@"ic_surround_sound_black_24dp.png"];
[_routeChangeButton setImage:image forState:UIControlStateNormal];
[_routeChangeButton addTarget:self
action:@selector(onRouteChange:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_routeChangeButton];
// TODO(tkchin): don't display this if we can't actually do camera switch.
_cameraSwitchButton = [UIButton buttonWithType:UIButtonTypeCustom];
_cameraSwitchButton.backgroundColor = [UIColor whiteColor];
_cameraSwitchButton.layer.cornerRadius = kButtonSize / 2;
_cameraSwitchButton.layer.masksToBounds = YES;
image = [UIImage imageNamed:@"ic_switch_video_black_24dp.png"];
[_cameraSwitchButton setImage:image forState:UIControlStateNormal];
[_cameraSwitchButton addTarget:self
action:@selector(onCameraSwitch:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_cameraSwitchButton];
_hangupButton = [UIButton buttonWithType:UIButtonTypeCustom];
_hangupButton.backgroundColor = [UIColor redColor];
_hangupButton.layer.cornerRadius = kButtonSize / 2;
_hangupButton.layer.masksToBounds = YES;
image = [UIImage imageForName:@"ic_call_end_black_24dp.png"
color:[UIColor whiteColor]];
[_hangupButton setImage:image forState:UIControlStateNormal];
[_hangupButton addTarget:self
action:@selector(onHangup:)
forControlEvents:UIControlEventTouchUpInside];
[self addSubview:_hangupButton];
_statusLabel = [[UILabel alloc] initWithFrame:CGRectZero];
_statusLabel.font = [UIFont fontWithName:@"Roboto" size:16];
_statusLabel.textColor = [UIColor whiteColor];
[self addSubview:_statusLabel];
UITapGestureRecognizer *tapRecognizer =
[[UITapGestureRecognizer alloc]
initWithTarget:self
action:@selector(didTripleTap:)];
tapRecognizer.numberOfTapsRequired = 3;
[self addGestureRecognizer:tapRecognizer];
}
return self;
}
- (void)layoutSubviews {
CGRect bounds = self.bounds;
if (_remoteVideoSize.width > 0 && _remoteVideoSize.height > 0) {
// Aspect fill remote video into bounds.
CGRect remoteVideoFrame =
AVMakeRectWithAspectRatioInsideRect(_remoteVideoSize, bounds);
CGFloat scale = 1;
if (remoteVideoFrame.size.width > remoteVideoFrame.size.height) {
// Scale by height.
scale = bounds.size.height / remoteVideoFrame.size.height;
} else {
// Scale by width.
scale = bounds.size.width / remoteVideoFrame.size.width;
}
remoteVideoFrame.size.height *= scale;
remoteVideoFrame.size.width *= scale;
_remoteVideoView.frame = remoteVideoFrame;
_remoteVideoView.center =
CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
} else {
_remoteVideoView.frame = bounds;
}
// Aspect fit local video view into a square box.
CGRect localVideoFrame =
CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
// Place the view in the bottom right.
localVideoFrame.origin.x = CGRectGetMaxX(bounds)
- localVideoFrame.size.width - kLocalVideoViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(bounds)
- localVideoFrame.size.height - kLocalVideoViewPadding;
_localVideoView.frame = localVideoFrame;
// Place stats at the top.
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
_statsView.frame = CGRectMake(CGRectGetMinX(bounds),
CGRectGetMinY(bounds) + kStatusBarHeight,
statsSize.width, statsSize.height);
// Place hangup button in the bottom left.
_hangupButton.frame =
CGRectMake(CGRectGetMinX(bounds) + kButtonPadding,
CGRectGetMaxY(bounds) - kButtonPadding -
kButtonSize,
kButtonSize,
kButtonSize);
// Place button to the right of hangup button.
CGRect cameraSwitchFrame = _hangupButton.frame;
cameraSwitchFrame.origin.x =
CGRectGetMaxX(cameraSwitchFrame) + kButtonPadding;
_cameraSwitchButton.frame = cameraSwitchFrame;
// Place route button to the right of camera button.
CGRect routeChangeFrame = _cameraSwitchButton.frame;
routeChangeFrame.origin.x =
CGRectGetMaxX(routeChangeFrame) + kButtonPadding;
_routeChangeButton.frame = routeChangeFrame;
[_statusLabel sizeToFit];
_statusLabel.center =
CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
}
#pragma mark - RTCEAGLVideoViewDelegate
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
}
[self setNeedsLayout];
}
#pragma mark - Private
- (void)onCameraSwitch:(id)sender {
[_delegate videoCallViewDidSwitchCamera:self];
}
- (void)onRouteChange:(id)sender {
[_delegate videoCallViewDidChangeRoute:self];
}
- (void)onHangup:(id)sender {
[_delegate videoCallViewDidHangup:self];
}
- (void)didTripleTap:(UITapGestureRecognizer *)recognizer {
[_delegate videoCallViewDidEnableStats:self];
}
@end

View File

@ -0,0 +1,28 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@class ARDVideoCallViewController;
@protocol ARDVideoCallViewControllerDelegate <NSObject>
- (void)viewControllerDidFinish:(ARDVideoCallViewController *)viewController;
@end
@interface ARDVideoCallViewController : UIViewController
@property(nonatomic, weak) id<ARDVideoCallViewControllerDelegate> delegate;
- (instancetype)initForRoom:(NSString *)room
isLoopback:(BOOL)isLoopback
delegate:(id<ARDVideoCallViewControllerDelegate>)delegate;
@end

View File

@ -0,0 +1,224 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "ARDVideoCallViewController.h"
#import "WebRTC/RTCAudioSession.h"
#import "ARDAppClient.h"
#import "ARDCaptureController.h"
#import "ARDSettingsModel.h"
#import "ARDVideoCallView.h"
#import "WebRTC/RTCAVFoundationVideoSource.h"
#import "WebRTC/RTCDispatcher.h"
#import "WebRTC/RTCLogging.h"
#import "WebRTC/RTCMediaConstraints.h"
@interface ARDVideoCallViewController () <ARDAppClientDelegate,
ARDVideoCallViewDelegate,
RTCAudioSessionDelegate>
@property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack;
@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
@end
@implementation ARDVideoCallViewController {
ARDAppClient *_client;
RTCVideoTrack *_remoteVideoTrack;
ARDCaptureController *_captureController;
AVAudioSessionPortOverride _portOverride;
}
@synthesize videoCallView = _videoCallView;
@synthesize remoteVideoTrack = _remoteVideoTrack;
@synthesize delegate = _delegate;
- (instancetype)initForRoom:(NSString *)room
isLoopback:(BOOL)isLoopback
delegate:(id<ARDVideoCallViewControllerDelegate>)delegate {
if (self = [super init]) {
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
_delegate = delegate;
_client = [[ARDAppClient alloc] initWithDelegate:self];
[_client connectToRoomWithId:room settings:settingsModel isLoopback:isLoopback];
}
return self;
}
- (void)loadView {
_videoCallView = [[ARDVideoCallView alloc] initWithFrame:CGRectZero];
_videoCallView.delegate = self;
_videoCallView.statusLabel.text =
[self statusTextForState:RTCIceConnectionStateNew];
self.view = _videoCallView;
RTCAudioSession *session = [RTCAudioSession sharedInstance];
[session addDelegate:self];
}
#pragma mark - ARDAppClientDelegate
- (void)appClient:(ARDAppClient *)client
didChangeState:(ARDAppClientState)state {
switch (state) {
case kARDAppClientStateConnected:
RTCLog(@"Client connected.");
break;
case kARDAppClientStateConnecting:
RTCLog(@"Client connecting.");
break;
case kARDAppClientStateDisconnected:
RTCLog(@"Client disconnected.");
[self hangup];
break;
}
}
- (void)appClient:(ARDAppClient *)client
didChangeConnectionState:(RTCIceConnectionState)state {
RTCLog(@"ICE state changed: %ld", (long)state);
__weak ARDVideoCallViewController *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
ARDVideoCallViewController *strongSelf = weakSelf;
strongSelf.videoCallView.statusLabel.text =
[strongSelf statusTextForState:state];
});
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer {
_videoCallView.localVideoView.captureSession = localCapturer.captureSession;
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
_captureController =
[[ARDCaptureController alloc] initWithCapturer:localCapturer settings:settingsModel];
[_captureController startCapture];
}
- (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
self.remoteVideoTrack = remoteVideoTrack;
_videoCallView.statusLabel.hidden = YES;
}
- (void)appClient:(ARDAppClient *)client
didGetStats:(NSArray *)stats {
_videoCallView.statsView.stats = stats;
[_videoCallView setNeedsLayout];
}
- (void)appClient:(ARDAppClient *)client
didError:(NSError *)error {
NSString *message =
[NSString stringWithFormat:@"%@", error.localizedDescription];
[self hangup];
[self showAlertWithMessage:message];
}
#pragma mark - ARDVideoCallViewDelegate
- (void)videoCallViewDidHangup:(ARDVideoCallView *)view {
[self hangup];
}
- (void)videoCallViewDidSwitchCamera:(ARDVideoCallView *)view {
// TODO(tkchin): Rate limit this so you can't tap continously on it.
// Probably through an animation.
[_captureController switchCamera];
}
- (void)videoCallViewDidChangeRoute:(ARDVideoCallView *)view {
AVAudioSessionPortOverride override = AVAudioSessionPortOverrideNone;
if (_portOverride == AVAudioSessionPortOverrideNone) {
override = AVAudioSessionPortOverrideSpeaker;
}
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeAudioSession
block:^{
RTCAudioSession *session = [RTCAudioSession sharedInstance];
[session lockForConfiguration];
NSError *error = nil;
if ([session overrideOutputAudioPort:override error:&error]) {
_portOverride = override;
} else {
RTCLogError(@"Error overriding output port: %@",
error.localizedDescription);
}
[session unlockForConfiguration];
}];
}
- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view {
_client.shouldGetStats = YES;
_videoCallView.statsView.hidden = NO;
}
#pragma mark - RTCAudioSessionDelegate
- (void)audioSession:(RTCAudioSession *)audioSession
didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
RTCLog(@"Audio session detected glitch, total: %lld", totalNumberOfGlitches);
}
#pragma mark - Private
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
if (_remoteVideoTrack == remoteVideoTrack) {
return;
}
[_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
_remoteVideoTrack = nil;
[_videoCallView.remoteVideoView renderFrame:nil];
_remoteVideoTrack = remoteVideoTrack;
[_remoteVideoTrack addRenderer:_videoCallView.remoteVideoView];
}
- (void)hangup {
self.remoteVideoTrack = nil;
_videoCallView.localVideoView.captureSession = nil;
[_captureController stopCapture];
_captureController = nil;
[_client disconnect];
[_delegate viewControllerDidFinish:self];
}
- (NSString *)statusTextForState:(RTCIceConnectionState)state {
switch (state) {
case RTCIceConnectionStateNew:
case RTCIceConnectionStateChecking:
return @"Connecting...";
case RTCIceConnectionStateConnected:
case RTCIceConnectionStateCompleted:
case RTCIceConnectionStateFailed:
case RTCIceConnectionStateDisconnected:
case RTCIceConnectionStateClosed:
case RTCIceConnectionStateCount:
return nil;
}
}
- (void)showAlertWithMessage:(NSString*)message {
UIAlertController *alert =
[UIAlertController alertControllerWithTitle:nil
message:message
preferredStyle:UIAlertControllerStyleAlert];
UIAlertAction *defaultAction = [UIAlertAction actionWithTitle:@"OK"
style:UIAlertActionStyleDefault
handler:^(UIAlertAction *action){
}];
[alert addAction:defaultAction];
[self presentViewController:alert animated:YES completion:nil];
}
@end

View File

@ -0,0 +1,23 @@
/*
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
//
// Prefix header for all source files of the 'AppRTCMobile' target in the
// 'AppRTCMobile' project
//
#import <Availability.h>
#if __IPHONE_OS_VERSION_MIN_REQUIRED < __IPHONE_6_0
#warning "This project uses features only available in iOS SDK 6.0 and later."
#endif
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>

View File

@ -0,0 +1,104 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>BuildMachineOSBuild</key>
<string>12E55</string>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>AppRTCMobile</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIcons</key>
<dict>
<key>CFBundlePrimaryIcon</key>
<dict>
<key>CFBundleIconFiles</key>
<array>
<string>Icon.png</string>
<string>Icon-120.png</string>
<string>Icon-180.png</string>
</array>
</dict>
</dict>
<key>CFBundleIdentifier</key>
<string>com.google.AppRTCMobile</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleSupportedPlatforms</key>
<array>
<string>iPhoneOS</string>
</array>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>UIStatusBarTintParameters</key>
<dict>
<key>UINavigationBar</key>
<dict>
<key>Style</key>
<string>UIBarStyleDefault</string>
<key>Translucent</key>
<false/>
</dict>
</dict>
<key>UISupportedInterfaceOrientations</key>
<array>
<string>UIInterfaceOrientationPortrait</string>
</array>
<key>UIAppFonts</key>
<array>
<string>Roboto-Regular.ttf</string>
</array>
<key>UIBackgroundModes</key>
<array>
<string>audio</string>
<string>voip</string>
</array>
<key>NSCameraUsageDescription</key>
<string>Camera access needed for video calling</string>
<key>NSMicrophoneUsageDescription</key>
<string>Microphone access needed for video calling</string>
<key>UILaunchImages</key>
<array>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>7.0</string>
<key>UILaunchImageName</key>
<string>iPhone5</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{320, 568}</string>
</dict>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>8.0</string>
<key>UILaunchImageName</key>
<string>iPhone6</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{375, 667}</string>
</dict>
<dict>
<key>UILaunchImageMinimumOSVersion</key>
<string>8.0</string>
<key>UILaunchImageName</key>
<string>iPhone6p</string>
<key>UILaunchImageOrientation</key>
<string>Portrait</string>
<key>UILaunchImageSize</key>
<string>{414, 736}</string>
</dict>
</array>
</dict>
</plist>

View File

@ -0,0 +1,18 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
@interface UIImage (ARDUtilities)
// Returns an color tinted version for the given image resource.
+ (UIImage *)imageForName:(NSString *)name color:(UIColor *)color;
@end

View File

@ -0,0 +1,31 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "UIImage+ARDUtilities.h"
@implementation UIImage (ARDUtilities)
+ (UIImage *)imageForName:(NSString *)name color:(UIColor *)color {
UIImage *image = [UIImage imageNamed:name];
if (!image) {
return nil;
}
UIGraphicsBeginImageContextWithOptions(image.size, NO, 0.0f);
[color setFill];
CGRect bounds = CGRectMake(0, 0, image.size.width, image.size.height);
UIRectFill(bounds);
[image drawInRect:bounds blendMode:kCGBlendModeDestinationIn alpha:1.0f];
UIImage *coloredImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return coloredImage;
}
@end

View File

@ -0,0 +1,20 @@
/*
* Copyright 2013 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <UIKit/UIKit.h>
#import "ARDAppDelegate.h"
int main(int argc, char* argv[]) {
@autoreleasepool {
return UIApplicationMain(
argc, argv, nil, NSStringFromClass([ARDAppDelegate class]));
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 316 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 479 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 257 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 360 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 322 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 557 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 285 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 570 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 242 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 311 B

Binary file not shown.

View File

@ -0,0 +1,14 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Cocoa/Cocoa.h>
@interface APPRTCAppDelegate : NSObject<NSApplicationDelegate>
@end

View File

@ -0,0 +1,55 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "APPRTCAppDelegate.h"
#import "APPRTCViewController.h"
#import "WebRTC/RTCSSLAdapter.h"
@interface APPRTCAppDelegate () <NSWindowDelegate>
@end
@implementation APPRTCAppDelegate {
APPRTCViewController* _viewController;
NSWindow* _window;
}
#pragma mark - NSApplicationDelegate
- (void)applicationDidFinishLaunching:(NSNotification*)notification {
RTCInitializeSSL();
NSScreen* screen = [NSScreen mainScreen];
NSRect visibleRect = [screen visibleFrame];
NSRect windowRect = NSMakeRect(NSMidX(visibleRect),
NSMidY(visibleRect),
1320,
1140);
NSUInteger styleMask = NSTitledWindowMask | NSClosableWindowMask;
_window = [[NSWindow alloc] initWithContentRect:windowRect
styleMask:styleMask
backing:NSBackingStoreBuffered
defer:NO];
_window.delegate = self;
[_window makeKeyAndOrderFront:self];
[_window makeMainWindow];
_viewController = [[APPRTCViewController alloc] initWithNibName:nil
bundle:nil];
[_window setContentView:[_viewController view]];
}
#pragma mark - NSWindow
- (void)windowWillClose:(NSNotification*)notification {
[_viewController windowWillClose:notification];
RTCCleanupSSL();
[NSApp terminate:self];
}
@end

View File

@ -0,0 +1,17 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AppKit/AppKit.h>
@interface APPRTCViewController : NSViewController
- (void)windowWillClose:(NSNotification*)notification;
@end

View File

@ -0,0 +1,439 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "APPRTCViewController.h"
#import <AVFoundation/AVFoundation.h>
#import "WebRTC/RTCMTLNSVideoView.h"
#import "WebRTC/RTCNSGLVideoView.h"
#import "WebRTC/RTCVideoTrack.h"
#import "ARDAppClient.h"
#import "ARDCaptureController.h"
#import "ARDSettingsModel.h"
static NSUInteger const kContentWidth = 900;
static NSUInteger const kRoomFieldWidth = 200;
static NSUInteger const kActionItemHeight = 30;
static NSUInteger const kBottomViewHeight = 200;
@class APPRTCMainView;
@protocol APPRTCMainViewDelegate
- (void)appRTCMainView:(APPRTCMainView*)mainView
didEnterRoomId:(NSString*)roomId
loopback:(BOOL)isLoopback;
@end
@interface APPRTCMainView : NSView
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* localVideoView;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* remoteVideoView;
- (void)displayLogMessage:(NSString*)message;
@end
@interface APPRTCMainView () <NSTextFieldDelegate, RTCNSGLVideoViewDelegate>
@end
@implementation APPRTCMainView {
NSScrollView* _scrollView;
NSView* _actionItemsView;
NSButton* _connectButton;
NSButton* _loopbackButton;
NSTextField* _roomField;
NSTextView* _logView;
CGSize _localVideoSize;
CGSize _remoteVideoSize;
}
@synthesize delegate = _delegate;
@synthesize localVideoView = _localVideoView;
@synthesize remoteVideoView = _remoteVideoView;
- (void)displayLogMessage:(NSString *)message {
dispatch_async(dispatch_get_main_queue(), ^{
_logView.string =
[NSString stringWithFormat:@"%@%@\n", _logView.string, message];
NSRange range = NSMakeRange(_logView.string.length, 0);
[_logView scrollRangeToVisible:range];
});
}
#pragma mark - Private
- (instancetype)initWithFrame:(NSRect)frame {
if (self = [super initWithFrame:frame]) {
[self setupViews];
}
return self;
}
+ (BOOL)requiresConstraintBasedLayout {
return YES;
}
- (void)updateConstraints {
NSParameterAssert(
_roomField != nil &&
_scrollView != nil &&
_remoteVideoView != nil &&
_localVideoView != nil &&
_actionItemsView!= nil &&
_connectButton != nil &&
_loopbackButton != nil);
[self removeConstraints:[self constraints]];
NSDictionary* viewsDictionary =
NSDictionaryOfVariableBindings(_roomField,
_scrollView,
_remoteVideoView,
_localVideoView,
_actionItemsView,
_connectButton,
_loopbackButton);
NSSize remoteViewSize = [self remoteVideoViewSize];
NSDictionary* metrics = @{
@"remoteViewWidth" : @(remoteViewSize.width),
@"remoteViewHeight" : @(remoteViewSize.height),
@"kBottomViewHeight" : @(kBottomViewHeight),
@"localViewHeight" : @(remoteViewSize.height / 3),
@"localViewWidth" : @(remoteViewSize.width / 3),
@"kRoomFieldWidth" : @(kRoomFieldWidth),
@"kActionItemHeight" : @(kActionItemHeight)
};
// Declare this separately to avoid compiler warning about splitting string
// within an NSArray expression.
NSString* verticalConstraintLeft =
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_scrollView(kBottomViewHeight)]-|";
NSString* verticalConstraintRight =
@"V:|-[_remoteVideoView(remoteViewHeight)]-[_actionItemsView(kBottomViewHeight)]-|";
NSArray* constraintFormats = @[
verticalConstraintLeft,
verticalConstraintRight,
@"H:|-[_remoteVideoView(remoteViewWidth)]-|",
@"V:|-[_localVideoView(localViewHeight)]",
@"H:|-[_localVideoView(localViewWidth)]",
@"H:|-[_scrollView(==_actionItemsView)]-[_actionItemsView]-|"
];
NSArray* actionItemsConstraints = @[
@"H:|-[_roomField(kRoomFieldWidth)]-[_loopbackButton(kRoomFieldWidth)]",
@"H:|-[_connectButton(kRoomFieldWidth)]",
@"V:|-[_roomField(kActionItemHeight)]-[_connectButton(kActionItemHeight)]",
@"V:|-[_loopbackButton(kActionItemHeight)]",
];
[APPRTCMainView addConstraints:constraintFormats
toView:self
viewsDictionary:viewsDictionary
metrics:metrics];
[APPRTCMainView addConstraints:actionItemsConstraints
toView:_actionItemsView
viewsDictionary:viewsDictionary
metrics:metrics];
[super updateConstraints];
}
#pragma mark - Constraints helper
+ (void)addConstraints:(NSArray*)constraints toView:(NSView*)view
viewsDictionary:(NSDictionary*)viewsDictionary
metrics:(NSDictionary*)metrics {
for (NSString* constraintFormat in constraints) {
NSArray* constraints =
[NSLayoutConstraint constraintsWithVisualFormat:constraintFormat
options:0
metrics:metrics
views:viewsDictionary];
for (NSLayoutConstraint* constraint in constraints) {
[view addConstraint:constraint];
}
}
}
#pragma mark - Control actions
- (void)startCall:(id)sender {
NSString* roomString = _roomField.stringValue;
// Generate room id for loopback options.
if (_loopbackButton.intValue && [roomString isEqualToString:@""]) {
roomString = [NSUUID UUID].UUIDString;
roomString = [roomString stringByReplacingOccurrencesOfString:@"-" withString:@""];
}
[self.delegate appRTCMainView:self
didEnterRoomId:roomString
loopback:_loopbackButton.intValue];
[self setNeedsUpdateConstraints:YES];
}
#pragma mark - RTCNSGLVideoViewDelegate
- (void)videoView:(RTCNSGLVideoView*)videoView
didChangeVideoSize:(NSSize)size {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
} else if (videoView == _localVideoView) {
_localVideoSize = size;
} else {
return;
}
[self setNeedsUpdateConstraints:YES];
}
#pragma mark - Private
- (void)setupViews {
NSParameterAssert([[self subviews] count] == 0);
_logView = [[NSTextView alloc] initWithFrame:NSZeroRect];
[_logView setMinSize:NSMakeSize(0, kBottomViewHeight)];
[_logView setMaxSize:NSMakeSize(FLT_MAX, FLT_MAX)];
[_logView setVerticallyResizable:YES];
[_logView setAutoresizingMask:NSViewWidthSizable];
NSTextContainer* textContainer = [_logView textContainer];
NSSize containerSize = NSMakeSize(kContentWidth, FLT_MAX);
[textContainer setContainerSize:containerSize];
[textContainer setWidthTracksTextView:YES];
[_logView setEditable:NO];
[self setupActionItemsView];
_scrollView = [[NSScrollView alloc] initWithFrame:NSZeroRect];
[_scrollView setTranslatesAutoresizingMaskIntoConstraints:NO];
[_scrollView setHasVerticalScroller:YES];
[_scrollView setDocumentView:_logView];
[self addSubview:_scrollView];
// NOTE (daniela): Ignoring Clang diagonstic here.
// We're performing run time check to make sure class is available on runtime.
// If not we're providing sensible default.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpartial-availability"
if ([RTCMTLNSVideoView class] && [RTCMTLNSVideoView isMetalAvailable]) {
_remoteVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
_localVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
}
#pragma clang diagnostic pop
if (_remoteVideoView == nil) {
NSOpenGLPixelFormatAttribute attributes[] = {
NSOpenGLPFADoubleBuffer,
NSOpenGLPFADepthSize, 24,
NSOpenGLPFAOpenGLProfile,
NSOpenGLProfileVersion3_2Core,
0
};
NSOpenGLPixelFormat* pixelFormat =
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
RTCNSGLVideoView* remote =
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
remote.delegate = self;
_remoteVideoView = remote;
RTCNSGLVideoView* local =
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
local.delegate = self;
_localVideoView = local;
}
[_remoteVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
[self addSubview:_remoteVideoView];
[_localVideoView setTranslatesAutoresizingMaskIntoConstraints:NO];
[self addSubview:_localVideoView];
}
- (void)setupActionItemsView {
_actionItemsView = [[NSView alloc] initWithFrame:NSZeroRect];
[_actionItemsView setTranslatesAutoresizingMaskIntoConstraints:NO];
[self addSubview:_actionItemsView];
_roomField = [[NSTextField alloc] initWithFrame:NSZeroRect];
[_roomField setTranslatesAutoresizingMaskIntoConstraints:NO];
[[_roomField cell] setPlaceholderString: @"Enter AppRTC room id"];
[_actionItemsView addSubview:_roomField];
[_roomField setEditable:YES];
_connectButton = [[NSButton alloc] initWithFrame:NSZeroRect];
[_connectButton setTranslatesAutoresizingMaskIntoConstraints:NO];
_connectButton.title = @"Start call";
_connectButton.bezelStyle = NSRoundedBezelStyle;
_connectButton.target = self;
_connectButton.action = @selector(startCall:);
[_actionItemsView addSubview:_connectButton];
_loopbackButton = [[NSButton alloc] initWithFrame:NSZeroRect];
[_loopbackButton setTranslatesAutoresizingMaskIntoConstraints:NO];
_loopbackButton.title = @"Loopback";
[_loopbackButton setButtonType:NSSwitchButton];
[_actionItemsView addSubview:_loopbackButton];
}
- (NSSize)remoteVideoViewSize {
if (!_remoteVideoView.bounds.size.width) {
return NSMakeSize(kContentWidth, 0);
}
NSInteger width = MAX(_remoteVideoView.bounds.size.width, kContentWidth);
NSInteger height = (width/16) * 9;
return NSMakeSize(width, height);
}
@end
@interface APPRTCViewController ()
<ARDAppClientDelegate, APPRTCMainViewDelegate>
@property(nonatomic, readonly) APPRTCMainView* mainView;
@end
@implementation APPRTCViewController {
ARDAppClient* _client;
RTCVideoTrack* _localVideoTrack;
RTCVideoTrack* _remoteVideoTrack;
ARDCaptureController* _captureController;
}
- (void)dealloc {
[self disconnect];
}
- (void)viewDidAppear {
[super viewDidAppear];
[self displayUsageInstructions];
}
- (void)loadView {
APPRTCMainView* view = [[APPRTCMainView alloc] initWithFrame:NSZeroRect];
[view setTranslatesAutoresizingMaskIntoConstraints:NO];
view.delegate = self;
self.view = view;
}
- (void)windowWillClose:(NSNotification*)notification {
[self disconnect];
}
#pragma mark - Usage
- (void)displayUsageInstructions {
[self.mainView displayLogMessage:
@"To start call:\n"
@"• Enter AppRTC room id (not neccessary for loopback)\n"
@"• Start call"];
}
#pragma mark - ARDAppClientDelegate
- (void)appClient:(ARDAppClient *)client
didChangeState:(ARDAppClientState)state {
switch (state) {
case kARDAppClientStateConnected:
[self.mainView displayLogMessage:@"Client connected."];
break;
case kARDAppClientStateConnecting:
[self.mainView displayLogMessage:@"Client connecting."];
break;
case kARDAppClientStateDisconnected:
[self.mainView displayLogMessage:@"Client disconnected."];
[self resetUI];
_client = nil;
break;
}
}
- (void)appClient:(ARDAppClient *)client
didChangeConnectionState:(RTCIceConnectionState)state {
}
- (void)appClient:(ARDAppClient*)client
didCreateLocalCapturer:(RTCCameraVideoCapturer*)localCapturer {
_captureController =
[[ARDCaptureController alloc] initWithCapturer:localCapturer
settings:[[ARDSettingsModel alloc] init]];
[_captureController startCapture];
}
- (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
_localVideoTrack = localVideoTrack;
[_localVideoTrack addRenderer:self.mainView.localVideoView];
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
_remoteVideoTrack = remoteVideoTrack;
[_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
}
- (void)appClient:(ARDAppClient *)client
didError:(NSError *)error {
[self showAlertWithMessage:[NSString stringWithFormat:@"%@", error]];
[self disconnect];
}
- (void)appClient:(ARDAppClient *)client
didGetStats:(NSArray *)stats {
}
#pragma mark - APPRTCMainViewDelegate
- (void)appRTCMainView:(APPRTCMainView*)mainView
didEnterRoomId:(NSString*)roomId
loopback:(BOOL)isLoopback {
if ([roomId isEqualToString:@""]) {
[self.mainView displayLogMessage:@"Missing room id"];
return;
}
[self disconnect];
ARDAppClient* client = [[ARDAppClient alloc] initWithDelegate:self];
[client connectToRoomWithId:roomId
settings:[[ARDSettingsModel alloc] init] // Use default settings.
isLoopback:isLoopback];
_client = client;
}
#pragma mark - Private
- (APPRTCMainView*)mainView {
return (APPRTCMainView*)self.view;
}
- (void)showAlertWithMessage:(NSString*)message {
dispatch_async(dispatch_get_main_queue(), ^{
NSAlert* alert = [[NSAlert alloc] init];
[alert setMessageText:message];
[alert runModal];
});
}
- (void)resetUI {
[_remoteVideoTrack removeRenderer:self.mainView.remoteVideoView];
[_localVideoTrack removeRenderer:self.mainView.localVideoView];
_remoteVideoTrack = nil;
_localVideoTrack = nil;
[self.mainView.remoteVideoView renderFrame:nil];
[self.mainView.localVideoView renderFrame:nil];
}
- (void)disconnect {
[self resetUI];
[_captureController stopCapture];
_captureController = nil;
[_client disconnect];
}
@end

View File

@ -0,0 +1,29 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple/DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleDisplayName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>com.Google.${PRODUCT_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1.0</string>
<key>LSMinimumSystemVersion</key>
<string>${MACOSX_DEPLOYMENT_TARGET}</string>
<key>NSPrincipalClass</key>
<string>NSApplication</string>
</dict>
</plist>

View File

@ -0,0 +1,22 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <AppKit/AppKit.h>
#import "APPRTCAppDelegate.h"
int main(int argc, char* argv[]) {
@autoreleasepool {
[NSApplication sharedApplication];
APPRTCAppDelegate* delegate = [[APPRTCAppDelegate alloc] init];
[NSApp setDelegate:delegate];
[NSApp run];
}
}

View File

@ -0,0 +1,265 @@
/*
* Copyright 2014 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#import <QuartzCore/CoreAnimation.h>
#import <XCTest/XCTest.h>
#include "webrtc/rtc_base/ssladapter.h"
#import "WebRTC/RTCMediaConstraints.h"
#import "WebRTC/RTCPeerConnectionFactory.h"
#import "ARDAppClient+Internal.h"
#import "ARDJoinResponse+Internal.h"
#import "ARDMessageResponse+Internal.h"
#import "ARDSDPUtils.h"
#import "ARDSettingsModel.h"
@interface ARDAppClientTest : XCTestCase
@end
@implementation ARDAppClientTest
#pragma mark - Mock helpers
- (id)mockRoomServerClientForRoomId:(NSString *)roomId
clientId:(NSString *)clientId
isInitiator:(BOOL)isInitiator
messages:(NSArray *)messages
messageHandler:
(void (^)(ARDSignalingMessage *))messageHandler {
id mockRoomServerClient =
[OCMockObject mockForProtocol:@protocol(ARDRoomServerClient)];
// Successful join response.
ARDJoinResponse *joinResponse = [[ARDJoinResponse alloc] init];
joinResponse.result = kARDJoinResultTypeSuccess;
joinResponse.roomId = roomId;
joinResponse.clientId = clientId;
joinResponse.isInitiator = isInitiator;
joinResponse.messages = messages;
// Successful message response.
ARDMessageResponse *messageResponse = [[ARDMessageResponse alloc] init];
messageResponse.result = kARDMessageResultTypeSuccess;
// Return join response from above on join.
[[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
__unsafe_unretained void (^completionHandler)(ARDJoinResponse *response,
NSError *error);
[invocation getArgument:&completionHandler atIndex:4];
completionHandler(joinResponse, nil);
}] joinRoomWithRoomId:roomId isLoopback:NO completionHandler:[OCMArg any]];
// Return message response from above on join.
[[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
__unsafe_unretained ARDSignalingMessage *message;
__unsafe_unretained void (^completionHandler)(ARDMessageResponse *response,
NSError *error);
[invocation getArgument:&message atIndex:2];
[invocation getArgument:&completionHandler atIndex:5];
messageHandler(message);
completionHandler(messageResponse, nil);
}] sendMessage:[OCMArg any]
forRoomId:roomId
clientId:clientId
completionHandler:[OCMArg any]];
// Do nothing on leave.
[[[mockRoomServerClient stub] andDo:^(NSInvocation *invocation) {
__unsafe_unretained void (^completionHandler)(NSError *error);
[invocation getArgument:&completionHandler atIndex:4];
if (completionHandler) {
completionHandler(nil);
}
}] leaveRoomWithRoomId:roomId
clientId:clientId
completionHandler:[OCMArg any]];
return mockRoomServerClient;
}
- (id)mockSignalingChannelForRoomId:(NSString *)roomId
clientId:(NSString *)clientId
messageHandler:
(void (^)(ARDSignalingMessage *message))messageHandler {
id mockSignalingChannel =
[OCMockObject niceMockForProtocol:@protocol(ARDSignalingChannel)];
[[mockSignalingChannel stub] registerForRoomId:roomId clientId:clientId];
[[[mockSignalingChannel stub] andDo:^(NSInvocation *invocation) {
__unsafe_unretained ARDSignalingMessage *message;
[invocation getArgument:&message atIndex:2];
messageHandler(message);
}] sendMessage:[OCMArg any]];
return mockSignalingChannel;
}
- (id)mockTURNClient {
id mockTURNClient =
[OCMockObject mockForProtocol:@protocol(ARDTURNClient)];
[[[mockTURNClient stub] andDo:^(NSInvocation *invocation) {
// Don't return anything in TURN response.
__unsafe_unretained void (^completionHandler)(NSArray *turnServers,
NSError *error);
[invocation getArgument:&completionHandler atIndex:2];
completionHandler([NSArray array], nil);
}] requestServersWithCompletionHandler:[OCMArg any]];
return mockTURNClient;
}
- (id)mockSettingsModel {
ARDSettingsModel *model = [[ARDSettingsModel alloc] init];
id partialMock = [OCMockObject partialMockForObject:model];
[[[partialMock stub] andReturn:@[ @"640x480", @"960x540", @"1280x720" ]]
availableVideoResolutions];
return model;
}
- (ARDAppClient *)createAppClientForRoomId:(NSString *)roomId
clientId:(NSString *)clientId
isInitiator:(BOOL)isInitiator
messages:(NSArray *)messages
messageHandler:
(void (^)(ARDSignalingMessage *message))messageHandler
connectedHandler:(void (^)(void))connectedHandler
localVideoTrackHandler:(void (^)(void))localVideoTrackHandler {
id turnClient = [self mockTURNClient];
id signalingChannel = [self mockSignalingChannelForRoomId:roomId
clientId:clientId
messageHandler:messageHandler];
id roomServerClient =
[self mockRoomServerClientForRoomId:roomId
clientId:clientId
isInitiator:isInitiator
messages:messages
messageHandler:messageHandler];
id delegate =
[OCMockObject niceMockForProtocol:@protocol(ARDAppClientDelegate)];
[[[delegate stub] andDo:^(NSInvocation *invocation) {
connectedHandler();
}] appClient:[OCMArg any]
didChangeConnectionState:RTCIceConnectionStateConnected];
[[[delegate stub] andDo:^(NSInvocation *invocation) {
localVideoTrackHandler();
}] appClient:[OCMArg any]
didReceiveLocalVideoTrack:[OCMArg any]];
return [[ARDAppClient alloc] initWithRoomServerClient:roomServerClient
signalingChannel:signalingChannel
turnClient:turnClient
delegate:delegate];
}
// Tests that an ICE connection is established between two ARDAppClient objects
// where one is set up as a caller and the other the answerer. Network
// components are mocked out and messages are relayed directly from object to
// object. It's expected that both clients reach the
// RTCIceConnectionStateConnected state within a reasonable amount of time.
- (void)testSession {
// Need block arguments here because we're setting up a callbacks before we
// create the clients.
ARDAppClient *caller = nil;
ARDAppClient *answerer = nil;
__block __weak ARDAppClient *weakCaller = nil;
__block __weak ARDAppClient *weakAnswerer = nil;
NSString *roomId = @"testRoom";
NSString *callerId = @"testCallerId";
NSString *answererId = @"testAnswererId";
XCTestExpectation *callerConnectionExpectation =
[self expectationWithDescription:@"Caller PC connected."];
XCTestExpectation *answererConnectionExpectation =
[self expectationWithDescription:@"Answerer PC connected."];
caller = [self createAppClientForRoomId:roomId
clientId:callerId
isInitiator:YES
messages:[NSArray array]
messageHandler:^(ARDSignalingMessage *message) {
ARDAppClient *strongAnswerer = weakAnswerer;
[strongAnswerer channel:strongAnswerer.channel didReceiveMessage:message];
} connectedHandler:^{
[callerConnectionExpectation fulfill];
} localVideoTrackHandler:^{
}];
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
// crash in Debug.
caller.defaultPeerConnectionConstraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
weakCaller = caller;
answerer = [self createAppClientForRoomId:roomId
clientId:answererId
isInitiator:NO
messages:[NSArray array]
messageHandler:^(ARDSignalingMessage *message) {
ARDAppClient *strongCaller = weakCaller;
[strongCaller channel:strongCaller.channel didReceiveMessage:message];
} connectedHandler:^{
[answererConnectionExpectation fulfill];
} localVideoTrackHandler:^{
}];
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
// crash in Debug.
answerer.defaultPeerConnectionConstraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
weakAnswerer = answerer;
// Kick off connection.
[caller connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO];
[answerer connectToRoomWithId:roomId settings:[self mockSettingsModel] isLoopback:NO];
[self waitForExpectationsWithTimeout:20 handler:^(NSError *error) {
if (error) {
XCTFail(@"Expectation failed with error %@.", error);
}
}];
}
// Test to see that we get a local video connection
// Note this will currently pass even when no camera is connected as a local
// video track is created regardless (Perhaps there should be a test for that...)
#if !TARGET_IPHONE_SIMULATOR // Expect to fail on simulator due to no camera support
- (void)testSessionShouldGetLocalVideoTrackCallback {
ARDAppClient *caller = nil;
NSString *roomId = @"testRoom";
NSString *callerId = @"testCallerId";
XCTestExpectation *localVideoTrackExpectation =
[self expectationWithDescription:@"Caller got local video."];
caller = [self createAppClientForRoomId:roomId
clientId:callerId
isInitiator:YES
messages:[NSArray array]
messageHandler:^(ARDSignalingMessage *message) {}
connectedHandler:^{}
localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }];
caller.defaultPeerConnectionConstraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
// Kick off connection.
[caller connectToRoomWithId:roomId
settings:[self mockSettingsModel]
isLoopback:NO];
[self waitForExpectationsWithTimeout:20 handler:^(NSError *error) {
if (error) {
XCTFail("Expectation timed out with error: %@.", error);
}
}];
}
#endif
@end

View File

@ -0,0 +1,64 @@
/*
* Copyright 2017 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <XCTest/XCTest.h>
#import "WebRTC/RTCSessionDescription.h"
#import "ARDSDPUtils.h"
@interface ARDSDPUtilsTest : XCTestCase
@end
@implementation ARDSDPUtilsTest
- (void)testPreferVideoCodecH264 {
NSString *sdp = @("m=video 9 RTP/SAVPF 100 116 117 96 120 97\r\n"
"a=rtpmap:120 H264/90000\r\n"
"a=rtpmap:97 H264/90000\r\n");
NSString *expectedSdp = @("m=video 9 RTP/SAVPF 120 97 100 116 117 96\r\n"
"a=rtpmap:120 H264/90000\r\n"
"a=rtpmap:97 H264/90000\r\n");
[self preferVideoCodec:@"H264" sdp:sdp expected:expectedSdp];
}
- (void)testPreferVideoCodecVP8 {
NSString *sdp = @("m=video 9 RTP/SAVPF 100 116 117 96 120 97\r\n"
"a=rtpmap:116 VP8/90000\r\n");
NSString *expectedSdp = @("m=video 9 RTP/SAVPF 116 100 117 96 120 97\r\n"
"a=rtpmap:116 VP8/90000\r\n");
[self preferVideoCodec:@"VP8" sdp:sdp expected:expectedSdp];
}
- (void)testNoMLine {
NSString *sdp = @("a=rtpmap:116 VP8/90000\r\n");
[self preferVideoCodec:@"VP8" sdp:sdp expected:sdp];
}
- (void)testMissingCodec {
NSString *sdp = @("m=video 9 RTP/SAVPF 100 116 117 96 120 97\r\n"
"a=rtpmap:116 VP8/90000\r\n");
[self preferVideoCodec:@"foo" sdp:sdp expected:sdp];
}
#pragma mark - Helpers
- (void)preferVideoCodec:(NSString *)codec
sdp:(NSString *)sdp
expected:(NSString *)expectedSdp{
RTCSessionDescription* desc =
[[RTCSessionDescription alloc] initWithType:RTCSdpTypeOffer sdp:sdp];
RTCSessionDescription *outputDesc =
[ARDSDPUtils descriptionForDescription:desc
preferredVideoCodec:codec];
XCTAssertTrue([outputDesc.description rangeOfString:expectedSdp].location != NSNotFound);
}
@end

View File

@ -0,0 +1,96 @@
/*
* Copyright 2016 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <OCMock/OCMock.h>
#import <XCTest/XCTest.h>
#import "WebRTC/RTCMediaConstraints.h"
#import "ARDSettingsModel+Private.h"
#import "ARDSettingsStore.h"
@interface ARDSettingsModelTests : XCTestCase {
ARDSettingsModel *_model;
}
@end
@implementation ARDSettingsModelTests
- (id)setupMockStore {
id storeMock = [OCMockObject mockForClass:[ARDSettingsStore class]];
id partialMock = [OCMockObject partialMockForObject:_model];
[[[partialMock stub] andReturn:storeMock] settingsStore];
[[[partialMock stub] andReturn:@[ @"640x480", @"960x540", @"1280x720" ]]
availableVideoResolutions];
return storeMock;
}
- (void)setUp {
_model = [[ARDSettingsModel alloc] init];
}
- (void)testRetrievingSetting {
id storeMock = [self setupMockStore];
[[[storeMock expect] andReturn:@"640x480"] videoResolution];
NSString *string = [_model currentVideoResolutionSettingFromStore];
XCTAssertEqualObjects(string, @"640x480");
}
- (void)testStoringInvalidConstraintReturnsNo {
id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"960x480"])videoResolution];
XCTAssertFalse([_model storeVideoResolutionSetting:@"960x480"]);
}
- (void)testWidthConstraintFromStore {
id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"1270x480"])videoResolution];
int width = [_model currentVideoResolutionWidthFromStore];
XCTAssertEqual(width, 1270);
}
- (void)testHeightConstraintFromStore {
id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"960x540"])videoResolution];
int height = [_model currentVideoResolutionHeightFromStore];
XCTAssertEqual(height, 540);
}
- (void)testConstraintComponentIsNilWhenInvalidConstraintString {
id storeMock = [self setupMockStore];
[([[storeMock stub] andReturn:@"invalid"])videoResolution];
int width = [_model currentVideoResolutionWidthFromStore];
XCTAssertEqual(width, 0);
}
- (void)testStoringAudioSetting {
id storeMock = [self setupMockStore];
[[storeMock expect] setAudioOnly:YES];
[_model storeAudioOnlySetting:YES];
[storeMock verify];
}
- (void)testReturningDefaultCallOption {
id storeMock = [self setupMockStore];
[[[storeMock stub] andReturnValue:@YES] useManualAudioConfig];
XCTAssertTrue([_model currentUseManualAudioConfigSettingFromStore]);
}
@end

View File

@ -0,0 +1,15 @@
Copyright 2012 Square Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -0,0 +1,132 @@
//
// Copyright 2012 Square Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <Foundation/Foundation.h>
#import <Security/SecCertificate.h>
typedef enum {
SR_CONNECTING = 0,
SR_OPEN = 1,
SR_CLOSING = 2,
SR_CLOSED = 3,
} SRReadyState;
typedef enum SRStatusCode : NSInteger {
SRStatusCodeNormal = 1000,
SRStatusCodeGoingAway = 1001,
SRStatusCodeProtocolError = 1002,
SRStatusCodeUnhandledType = 1003,
// 1004 reserved.
SRStatusNoStatusReceived = 1005,
// 1004-1006 reserved.
SRStatusCodeInvalidUTF8 = 1007,
SRStatusCodePolicyViolated = 1008,
SRStatusCodeMessageTooBig = 1009,
} SRStatusCode;
@class SRWebSocket;
extern NSString *const SRWebSocketErrorDomain;
extern NSString *const SRHTTPResponseErrorKey;
#pragma mark - SRWebSocketDelegate
@protocol SRWebSocketDelegate;
#pragma mark - SRWebSocket
@interface SRWebSocket : NSObject <NSStreamDelegate>
@property (nonatomic, weak) id <SRWebSocketDelegate> delegate;
@property (nonatomic, readonly) SRReadyState readyState;
@property (nonatomic, readonly, retain) NSURL *url;
// This returns the negotiated protocol.
// It will be nil until after the handshake completes.
@property (nonatomic, readonly, copy) NSString *protocol;
// Protocols should be an array of strings that turn into Sec-WebSocket-Protocol.
- (id)initWithURLRequest:(NSURLRequest *)request protocols:(NSArray *)protocols;
- (id)initWithURLRequest:(NSURLRequest *)request;
// Some helper constructors.
- (id)initWithURL:(NSURL *)url protocols:(NSArray *)protocols;
- (id)initWithURL:(NSURL *)url;
// Delegate queue will be dispatch_main_queue by default.
// You cannot set both OperationQueue and dispatch_queue.
- (void)setDelegateOperationQueue:(NSOperationQueue*) queue;
- (void)setDelegateDispatchQueue:(dispatch_queue_t) queue;
// By default, it will schedule itself on +[NSRunLoop SR_networkRunLoop] using defaultModes.
- (void)scheduleInRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
- (void)unscheduleFromRunLoop:(NSRunLoop *)aRunLoop forMode:(NSString *)mode;
// SRWebSockets are intended for one-time-use only. Open should be called once and only once.
- (void)open;
- (void)close;
- (void)closeWithCode:(NSInteger)code reason:(NSString *)reason;
// Send a UTF8 String or Data.
- (void)send:(id)data;
// Send Data (can be nil) in a ping message.
- (void)sendPing:(NSData *)data;
@end
#pragma mark - SRWebSocketDelegate
@protocol SRWebSocketDelegate <NSObject>
// message will either be an NSString if the server is using text
// or NSData if the server is using binary.
- (void)webSocket:(SRWebSocket *)webSocket didReceiveMessage:(id)message;
@optional
- (void)webSocketDidOpen:(SRWebSocket *)webSocket;
- (void)webSocket:(SRWebSocket *)webSocket didFailWithError:(NSError *)error;
- (void)webSocket:(SRWebSocket *)webSocket didCloseWithCode:(NSInteger)code reason:(NSString *)reason wasClean:(BOOL)wasClean;
- (void)webSocket:(SRWebSocket *)webSocket didReceivePong:(NSData *)pongPayload;
@end
#pragma mark - NSURLRequest (CertificateAdditions)
@interface NSURLRequest (CertificateAdditions)
@property (nonatomic, retain, readonly) NSArray *SR_SSLPinnedCertificates;
@end
#pragma mark - NSMutableURLRequest (CertificateAdditions)
@interface NSMutableURLRequest (CertificateAdditions)
@property (nonatomic, retain) NSArray *SR_SSLPinnedCertificates;
@end
#pragma mark - NSRunLoop (SRWebSocket)
@interface NSRunLoop (SRWebSocket)
+ (NSRunLoop *)SR_networkRunLoop;
@end

File diff suppressed because it is too large Load Diff

BIN
examples/objc/Icon-120.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.9 KiB

BIN
examples/objc/Icon-180.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
examples/objc/Icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

3
examples/objc/README Normal file
View File

@ -0,0 +1,3 @@
This directory contains sample iOS and mac clients for http://apprtc.appspot.com
See ../../app/webrtc/objc/README for information on how to use it.