Wrap WebRTC OBJC API types with RTC_OBJC_TYPE.

This CL introduced 2 new macros that affect the WebRTC OBJC API symbols:

- RTC_OBJC_TYPE_PREFIX:
  Macro used to prepend a prefix to the API types that are exported with
  RTC_OBJC_EXPORT.

  Clients can patch the definition of this macro locally and build
  WebRTC.framework with their own prefix in case symbol clashing is a
  problem.

  This macro must only be defined by changing the value in
  sdk/objc/base/RTCMacros.h  and not on via compiler flag to ensure
  it has a unique value.

- RCT_OBJC_TYPE:
  Macro used internally to reference API types. Declaring an API type
  without using this macro will not include the declared type in the
  set of types that will be affected by the configurable
  RTC_OBJC_TYPE_PREFIX.

Manual changes:
https://webrtc-review.googlesource.com/c/src/+/173781/5..10

The auto-generated changes in PS#5 have been done with:
https://webrtc-review.googlesource.com/c/src/+/174061.

Bug: None
Change-Id: I0d54ca94db764fb3b6cb4365873f79e14cd879b8
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/173781
Commit-Queue: Mirko Bonadei <mbonadei@webrtc.org>
Reviewed-by: Karl Wiberg <kwiberg@webrtc.org>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#31153}
This commit is contained in:
Mirko Bonadei
2020-05-04 16:14:32 +02:00
committed by Commit Bot
parent ce1320cc4d
commit a81e9c82fc
303 changed files with 2534 additions and 2189 deletions

View File

@ -16,9 +16,9 @@
#import "ARDSignalingChannel.h"
#import "ARDTURNClient.h"
@class RTCPeerConnectionFactory;
@class RTC_OBJC_TYPE(RTCPeerConnectionFactory);
@interface ARDAppClient () <ARDSignalingChannelDelegate, RTCPeerConnectionDelegate>
@interface ARDAppClient () <ARDSignalingChannelDelegate, RTC_OBJC_TYPE (RTCPeerConnectionDelegate)>
// All properties should only be mutated from the main queue.
@property(nonatomic, strong) id<ARDRoomServerClient> roomServerClient;
@ -26,8 +26,8 @@
@property(nonatomic, strong) id<ARDSignalingChannel> loopbackChannel;
@property(nonatomic, strong) id<ARDTURNClient> turnClient;
@property(nonatomic, strong) RTCPeerConnection *peerConnection;
@property(nonatomic, strong) RTCPeerConnectionFactory *factory;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnection) * peerConnection;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory;
@property(nonatomic, strong) NSMutableArray *messageQueue;
@property(nonatomic, assign) BOOL isTurnComplete;
@ -42,7 +42,7 @@
@property(nonatomic, strong) NSURL *webSocketRestURL;
@property(nonatomic, readonly) BOOL isLoopback;
@property(nonatomic, strong) RTCMediaConstraints *defaultPeerConnectionConstraints;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCMediaConstraints) * defaultPeerConnectionConstraints;
- (instancetype)initWithRoomServerClient:(id<ARDRoomServerClient>)rsClient
signalingChannel:(id<ARDSignalingChannel>)channel

View File

@ -24,9 +24,9 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
@class ARDAppClient;
@class ARDSettingsModel;
@class ARDExternalSampleCapturer;
@class RTCMediaConstraints;
@class RTCCameraVideoCapturer;
@class RTCFileVideoCapturer;
@class RTC_OBJC_TYPE(RTCMediaConstraints);
@class RTC_OBJC_TYPE(RTCCameraVideoCapturer);
@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
// The delegate is informed of pertinent events and will be called on the
// main queue.
@ -37,12 +37,13 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
- (void)appClient:(ARDAppClient *)client didChangeConnectionState:(RTCIceConnectionState)state;
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer;
- (void)appClient:(ARDAppClient *)client didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack;
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer;
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack;
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack;
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack;
- (void)appClient:(ARDAppClient *)client didError:(NSError *)error;
@ -50,7 +51,7 @@ typedef NS_ENUM(NSInteger, ARDAppClientState) {
@optional
- (void)appClient:(ARDAppClient *)client
didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer;
didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer;
- (void)appClient:(ARDAppClient *)client
didCreateLocalExternalSampleCapturer:(ARDExternalSampleCapturer *)externalSampleCapturer;

View File

@ -105,10 +105,10 @@ static int const kKbpsMultiplier = 1000;
@end
@implementation ARDAppClient {
RTCFileLogger *_fileLogger;
RTC_OBJC_TYPE(RTCFileLogger) * _fileLogger;
ARDTimerProxy *_statsTimer;
ARDSettingsModel *_settings;
RTCVideoTrack *_localVideoTrack;
RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
}
@synthesize shouldGetStats = _shouldGetStats;
@ -172,7 +172,7 @@ static int const kKbpsMultiplier = 1000;
- (void)configure {
_messageQueue = [NSMutableArray array];
_iceServers = [NSMutableArray array];
_fileLogger = [[RTCFileLogger alloc] init];
_fileLogger = [[RTC_OBJC_TYPE(RTCFileLogger) alloc] init];
[_fileLogger start];
}
@ -224,11 +224,14 @@ static int const kKbpsMultiplier = 1000;
_isLoopback = isLoopback;
self.state = kARDAppClientStateConnecting;
RTCDefaultVideoDecoderFactory *decoderFactory = [[RTCDefaultVideoDecoderFactory alloc] init];
RTCDefaultVideoEncoderFactory *encoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init];
RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) *decoderFactory =
[[RTC_OBJC_TYPE(RTCDefaultVideoDecoderFactory) alloc] init];
RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) *encoderFactory =
[[RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) alloc] init];
encoderFactory.preferredCodec = [settings currentVideoCodecSettingFromStore];
_factory = [[RTCPeerConnectionFactory alloc] initWithEncoderFactory:encoderFactory
decoderFactory:decoderFactory];
_factory =
[[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] initWithEncoderFactory:encoderFactory
decoderFactory:decoderFactory];
#if defined(WEBRTC_IOS)
if (kARDAppClientEnableTracing) {
@ -365,38 +368,38 @@ static int const kKbpsMultiplier = 1000;
}
}
#pragma mark - RTCPeerConnectionDelegate
#pragma mark - RTC_OBJC_TYPE(RTCPeerConnectionDelegate)
// Callbacks for this delegate occur on non-main thread and need to be
// dispatched back to main queue as needed.
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didChangeSignalingState:(RTCSignalingState)stateChanged {
RTCLog(@"Signaling state changed: %ld", (long)stateChanged);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didAddStream:(RTCMediaStream *)stream {
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didAddStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
RTCLog(@"Stream with %lu video tracks and %lu audio tracks was added.",
(unsigned long)stream.videoTracks.count,
(unsigned long)stream.audioTracks.count);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didStartReceivingOnTransceiver:(RTCRtpTransceiver *)transceiver {
RTCMediaStreamTrack *track = transceiver.receiver.track;
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver {
RTC_OBJC_TYPE(RTCMediaStreamTrack) *track = transceiver.receiver.track;
RTCLog(@"Now receiving %@ on track %@.", track.kind, track.trackId);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didRemoveStream:(RTCMediaStream *)stream {
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didRemoveStream:(RTC_OBJC_TYPE(RTCMediaStream) *)stream {
RTCLog(@"Stream was removed.");
}
- (void)peerConnectionShouldNegotiate:(RTCPeerConnection *)peerConnection {
- (void)peerConnectionShouldNegotiate:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection {
RTCLog(@"WARNING: Renegotiation needed but unimplemented.");
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didChangeIceConnectionState:(RTCIceConnectionState)newState {
RTCLog(@"ICE state changed: %ld", (long)newState);
dispatch_async(dispatch_get_main_queue(), ^{
@ -404,18 +407,18 @@ static int const kKbpsMultiplier = 1000;
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didChangeConnectionState:(RTCPeerConnectionState)newState {
RTCLog(@"ICE+DTLS state changed: %ld", (long)newState);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didChangeIceGatheringState:(RTCIceGatheringState)newState {
RTCLog(@"ICE gathering state changed: %ld", (long)newState);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didGenerateIceCandidate:(RTCIceCandidate *)candidate {
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didGenerateIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
dispatch_async(dispatch_get_main_queue(), ^{
ARDICECandidateMessage *message =
[[ARDICECandidateMessage alloc] initWithCandidate:candidate];
@ -423,8 +426,8 @@ static int const kKbpsMultiplier = 1000;
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didRemoveIceCandidates:(NSArray<RTCIceCandidate *> *)candidates {
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didRemoveIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
dispatch_async(dispatch_get_main_queue(), ^{
ARDICECandidateRemovalMessage *message =
[[ARDICECandidateRemovalMessage alloc]
@ -433,24 +436,24 @@ static int const kKbpsMultiplier = 1000;
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didChangeLocalCandidate:(RTCIceCandidate *)local
didChangeRemoteCandidate:(RTCIceCandidate *)remote
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didChangeLocalCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)local
didChangeRemoteCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)remote
lastReceivedMs:(int)lastDataReceivedMs
didHaveReason:(NSString *)reason {
RTCLog(@"ICE candidate pair changed because: %@", reason);
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didOpenDataChannel:(RTCDataChannel *)dataChannel {
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didOpenDataChannel:(RTC_OBJC_TYPE(RTCDataChannel) *)dataChannel {
}
#pragma mark - RTCSessionDescriptionDelegate
// Callbacks for this delegate occur on non-main thread and need to be
// dispatched back to main queue as needed.
- (void)peerConnection:(RTCPeerConnection *)peerConnection
didCreateSessionDescription:(RTCSessionDescription *)sdp
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didCreateSessionDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)sdp
error:(NSError *)error {
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
@ -480,7 +483,7 @@ static int const kKbpsMultiplier = 1000;
});
}
- (void)peerConnection:(RTCPeerConnection *)peerConnection
- (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection
didSetSessionDescriptionWithError:(NSError *)error {
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
@ -499,15 +502,16 @@ static int const kKbpsMultiplier = 1000;
// If we're answering and we've just set the remote offer we need to create
// an answer and set the local description.
if (!self.isInitiator && !self.peerConnection.localDescription) {
RTCMediaConstraints *constraints = [self defaultAnswerConstraints];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultAnswerConstraints];
__weak ARDAppClient *weakSelf = self;
[self.peerConnection answerForConstraints:constraints
completionHandler:^(RTCSessionDescription *sdp, NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
[self.peerConnection
answerForConstraints:constraints
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
}
});
}
@ -544,12 +548,10 @@ static int const kKbpsMultiplier = 1000;
self.state = kARDAppClientStateConnected;
// Create peer connection.
RTCMediaConstraints *constraints = [self defaultPeerConnectionConstraints];
RTCConfiguration *config = [[RTCConfiguration alloc] init];
RTCCertificate *pcert = [RTCCertificate generateCertificateWithParams:@{
@"expires" : @100000,
@"name" : @"RSASSA-PKCS1-v1_5"
}];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultPeerConnectionConstraints];
RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init];
RTC_OBJC_TYPE(RTCCertificate) *pcert = [RTC_OBJC_TYPE(RTCCertificate)
generateCertificateWithParams:@{@"expires" : @100000, @"name" : @"RSASSA-PKCS1-v1_5"}];
config.iceServers = _iceServers;
config.sdpSemantics = RTCSdpSemanticsUnifiedPlan;
config.certificate = pcert;
@ -562,14 +564,14 @@ static int const kKbpsMultiplier = 1000;
if (_isInitiator) {
// Send offer.
__weak ARDAppClient *weakSelf = self;
[_peerConnection offerForConstraints:[self defaultOfferConstraints]
completionHandler:^(RTCSessionDescription *sdp,
NSError *error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
[_peerConnection
offerForConstraints:[self defaultOfferConstraints]
completionHandler:^(RTC_OBJC_TYPE(RTCSessionDescription) * sdp, NSError * error) {
ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp
error:error];
}];
} else {
// Check if we've received an offer.
[self drainMessageQueueIfReady];
@ -619,7 +621,7 @@ static int const kKbpsMultiplier = 1000;
case kARDSignalingMessageTypeAnswer: {
ARDSessionDescriptionMessage *sdpMessage =
(ARDSessionDescriptionMessage *)message;
RTCSessionDescription *description = sdpMessage.sessionDescription;
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
__weak ARDAppClient *weakSelf = self;
[_peerConnection setRemoteDescription:description
completionHandler:^(NSError *error) {
@ -679,7 +681,7 @@ static int const kKbpsMultiplier = 1000;
}
- (void)setMaxBitrateForPeerConnectionVideoSender {
for (RTCRtpSender *sender in _peerConnection.senders) {
for (RTC_OBJC_TYPE(RTCRtpSender) * sender in _peerConnection.senders) {
if (sender.track != nil) {
if ([sender.track.kind isEqualToString:kARDVideoTrackKind]) {
[self setMaxBitrate:[_settings currentMaxBitrateSettingFromStore] forVideoSender:sender];
@ -688,20 +690,20 @@ static int const kKbpsMultiplier = 1000;
}
}
- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTCRtpSender *)sender {
- (void)setMaxBitrate:(NSNumber *)maxBitrate forVideoSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender {
if (maxBitrate.intValue <= 0) {
return;
}
RTCRtpParameters *parametersToModify = sender.parameters;
for (RTCRtpEncodingParameters *encoding in parametersToModify.encodings) {
RTC_OBJC_TYPE(RTCRtpParameters) *parametersToModify = sender.parameters;
for (RTC_OBJC_TYPE(RTCRtpEncodingParameters) * encoding in parametersToModify.encodings) {
encoding.maxBitrateBps = @(maxBitrate.intValue * kKbpsMultiplier);
}
[sender setParameters:parametersToModify];
}
- (RTCRtpTransceiver *)videoTransceiver {
for (RTCRtpTransceiver *transceiver in _peerConnection.transceivers) {
- (RTC_OBJC_TYPE(RTCRtpTransceiver) *)videoTransceiver {
for (RTC_OBJC_TYPE(RTCRtpTransceiver) * transceiver in _peerConnection.transceivers) {
if (transceiver.mediaType == RTCRtpMediaTypeVideo) {
return transceiver;
}
@ -710,29 +712,30 @@ static int const kKbpsMultiplier = 1000;
}
- (void)createMediaSenders {
RTCMediaConstraints *constraints = [self defaultMediaAudioConstraints];
RTCAudioSource *source = [_factory audioSourceWithConstraints:constraints];
RTCAudioTrack *track = [_factory audioTrackWithSource:source
trackId:kARDAudioTrackId];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [self defaultMediaAudioConstraints];
RTC_OBJC_TYPE(RTCAudioSource) *source = [_factory audioSourceWithConstraints:constraints];
RTC_OBJC_TYPE(RTCAudioTrack) *track = [_factory audioTrackWithSource:source
trackId:kARDAudioTrackId];
[_peerConnection addTrack:track streamIds:@[ kARDMediaStreamId ]];
_localVideoTrack = [self createLocalVideoTrack];
if (_localVideoTrack) {
[_peerConnection addTrack:_localVideoTrack streamIds:@[ kARDMediaStreamId ]];
[_delegate appClient:self didReceiveLocalVideoTrack:_localVideoTrack];
// We can set up rendering for the remote track right away since the transceiver already has an
// RTCRtpReceiver with a track. The track will automatically get unmuted and produce frames
// once RTP is received.
RTCVideoTrack *track = (RTCVideoTrack *)([self videoTransceiver].receiver.track);
// RTC_OBJC_TYPE(RTCRtpReceiver) with a track. The track will automatically get unmuted and
// produce frames once RTP is received.
RTC_OBJC_TYPE(RTCVideoTrack) *track =
(RTC_OBJC_TYPE(RTCVideoTrack) *)([self videoTransceiver].receiver.track);
[_delegate appClient:self didReceiveRemoteVideoTrack:track];
}
}
- (RTCVideoTrack *)createLocalVideoTrack {
- (RTC_OBJC_TYPE(RTCVideoTrack) *)createLocalVideoTrack {
if ([_settings currentAudioOnlySettingFromStore]) {
return nil;
}
RTCVideoSource *source = [_factory videoSource];
RTC_OBJC_TYPE(RTCVideoSource) *source = [_factory videoSource];
#if !TARGET_IPHONE_SIMULATOR
if (self.isBroadcast) {
@ -740,13 +743,15 @@ static int const kKbpsMultiplier = 1000;
[[ARDExternalSampleCapturer alloc] initWithDelegate:source];
[_delegate appClient:self didCreateLocalExternalSampleCapturer:capturer];
} else {
RTCCameraVideoCapturer *capturer = [[RTCCameraVideoCapturer alloc] initWithDelegate:source];
RTC_OBJC_TYPE(RTCCameraVideoCapturer) *capturer =
[[RTC_OBJC_TYPE(RTCCameraVideoCapturer) alloc] initWithDelegate:source];
[_delegate appClient:self didCreateLocalCapturer:capturer];
}
#else
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
if (@available(iOS 10, *)) {
RTCFileVideoCapturer *fileCapturer = [[RTCFileVideoCapturer alloc] initWithDelegate:source];
RTC_OBJC_TYPE(RTCFileVideoCapturer) *fileCapturer =
[[RTC_OBJC_TYPE(RTCFileVideoCapturer) alloc] initWithDelegate:source];
[_delegate appClient:self didCreateLocalFileCapturer:fileCapturer];
}
#endif
@ -781,40 +786,38 @@ static int const kKbpsMultiplier = 1000;
#pragma mark - Defaults
- (RTCMediaConstraints *)defaultMediaAudioConstraints {
NSDictionary *mandatoryConstraints = @{};
RTCMediaConstraints *constraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultMediaAudioConstraints {
NSDictionary *mandatoryConstraints = @{};
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
}
- (RTCMediaConstraints *)defaultAnswerConstraints {
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultAnswerConstraints {
return [self defaultOfferConstraints];
}
- (RTCMediaConstraints *)defaultOfferConstraints {
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultOfferConstraints {
NSDictionary *mandatoryConstraints = @{
@"OfferToReceiveAudio" : @"true",
@"OfferToReceiveVideo" : @"true"
};
RTCMediaConstraints* constraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:mandatoryConstraints
optionalConstraints:nil];
return constraints;
}
- (RTCMediaConstraints *)defaultPeerConnectionConstraints {
- (RTC_OBJC_TYPE(RTCMediaConstraints) *)defaultPeerConnectionConstraints {
if (_defaultPeerConnectionConstraints) {
return _defaultPeerConnectionConstraints;
}
NSString *value = _isLoopback ? @"false" : @"true";
NSDictionary *optionalConstraints = @{ @"DtlsSrtpKeyAgreement" : value };
RTCMediaConstraints* constraints =
[[RTCMediaConstraints alloc]
initWithMandatoryConstraints:nil
optionalConstraints:optionalConstraints];
RTC_OBJC_TYPE(RTCMediaConstraints) *constraints =
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:optionalConstraints];
return constraints;
}

View File

@ -15,7 +15,7 @@
// Controls the camera. Handles starting the capture, switching cameras etc.
@interface ARDCaptureController : NSObject
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
settings:(ARDSettingsModel *)settings;
- (void)startCapture;
- (void)stopCapture;

View File

@ -17,12 +17,12 @@
const Float64 kFramerateLimit = 30.0;
@implementation ARDCaptureController {
RTCCameraVideoCapturer *_capturer;
RTC_OBJC_TYPE(RTCCameraVideoCapturer) * _capturer;
ARDSettingsModel *_settings;
BOOL _usingFrontCamera;
}
- (instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)capturer
settings:(ARDSettingsModel *)settings {
if (self = [super init]) {
_capturer = capturer;
@ -63,7 +63,8 @@ const Float64 kFramerateLimit = 30.0;
#pragma mark - Private
- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position {
NSArray<AVCaptureDevice *> *captureDevices = [RTCCameraVideoCapturer captureDevices];
NSArray<AVCaptureDevice *> *captureDevices =
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices];
for (AVCaptureDevice *device in captureDevices) {
if (device.position == position) {
return device;
@ -74,7 +75,7 @@ const Float64 kFramerateLimit = 30.0;
- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device {
NSArray<AVCaptureDeviceFormat *> *formats =
[RTCCameraVideoCapturer supportedFormatsForDevice:device];
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device];
int targetWidth = [_settings currentVideoResolutionWidthFromStore];
int targetHeight = [_settings currentVideoResolutionHeightFromStore];
AVCaptureDeviceFormat *selectedFormat = nil;

View File

@ -14,5 +14,5 @@
- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end
@interface ARDExternalSampleCapturer : RTCVideoCapturer <ARDExternalSampleDelegate>
@end
@interface ARDExternalSampleCapturer : RTC_OBJC_TYPE
(RTCVideoCapturer)<ARDExternalSampleDelegate> @end

View File

@ -15,7 +15,7 @@
@implementation ARDExternalSampleCapturer
- (instancetype)initWithDelegate:(__weak id<RTCVideoCapturerDelegate>)delegate {
- (instancetype)initWithDelegate:(__weak id<RTC_OBJC_TYPE(RTCVideoCapturerDelegate)>)delegate {
return [super initWithDelegate:delegate];
}
@ -32,12 +32,14 @@
return;
}
RTCCVPixelBuffer *rtcPixelBuffer = [[RTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer];
RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer =
[[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer];
int64_t timeStampNs =
CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * NSEC_PER_SEC;
RTCVideoFrame *videoFrame = [[RTCVideoFrame alloc] initWithBuffer:rtcPixelBuffer
rotation:RTCVideoRotation_0
timeStampNs:timeStampNs];
RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame =
[[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer
rotation:RTCVideoRotation_0
timeStampNs:timeStampNs];
[self.delegate capturer:self didCaptureVideoFrame:videoFrame];
}

View File

@ -53,12 +53,12 @@ NS_ASSUME_NONNULL_BEGIN
/**
* Returns array of available video codecs.
*/
- (NSArray<RTCVideoCodecInfo *> *)availableVideoCodecs;
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs;
/**
* Returns current video codec setting from store if present or default (H264) otherwise.
*/
- (RTCVideoCodecInfo *)currentVideoCodecSettingFromStore;
- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore;
/**
* Stores the provided video codec setting into the store.
@ -68,7 +68,7 @@ NS_ASSUME_NONNULL_BEGIN
* @param video codec settings the string to be stored.
* @return YES/NO depending on success.
*/
- (BOOL)storeVideoCodecSetting:(RTCVideoCodecInfo *)videoCodec;
- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec;
/**
* Returns current max bitrate setting from store if present.

View File

@ -27,9 +27,9 @@ NS_ASSUME_NONNULL_BEGIN
- (NSArray<NSString *> *)availableVideoResolutions {
NSMutableSet<NSArray<NSNumber *> *> *resolutions =
[[NSMutableSet<NSArray<NSNumber *> *> alloc] init];
for (AVCaptureDevice *device in [RTCCameraVideoCapturer captureDevices]) {
for (AVCaptureDevice *device in [RTC_OBJC_TYPE(RTCCameraVideoCapturer) captureDevices]) {
for (AVCaptureDeviceFormat *format in
[RTCCameraVideoCapturer supportedFormatsForDevice:device]) {
[RTC_OBJC_TYPE(RTCCameraVideoCapturer) supportedFormatsForDevice:device]) {
CMVideoDimensions resolution =
CMVideoFormatDescriptionGetDimensions(format.formatDescription);
NSArray<NSNumber *> *resolutionObject = @[ @(resolution.width), @(resolution.height) ];
@ -70,17 +70,17 @@ NS_ASSUME_NONNULL_BEGIN
return YES;
}
- (NSArray<RTCVideoCodecInfo *> *)availableVideoCodecs {
return [RTCDefaultVideoEncoderFactory supportedCodecs];
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)availableVideoCodecs {
return [RTC_OBJC_TYPE(RTCDefaultVideoEncoderFactory) supportedCodecs];
}
- (RTCVideoCodecInfo *)currentVideoCodecSettingFromStore {
- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)currentVideoCodecSettingFromStore {
[self registerStoreDefaults];
NSData *codecData = [[self settingsStore] videoCodec];
return [NSKeyedUnarchiver unarchiveObjectWithData:codecData];
}
- (BOOL)storeVideoCodecSetting:(RTCVideoCodecInfo *)videoCodec {
- (BOOL)storeVideoCodecSetting:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodec {
if (![[self availableVideoCodecs] containsObject:videoCodec]) {
return NO;
}
@ -149,7 +149,7 @@ NS_ASSUME_NONNULL_BEGIN
return [self availableVideoResolutions].firstObject;
}
- (RTCVideoCodecInfo *)defaultVideoCodecSetting {
- (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)defaultVideoCodecSetting {
return [self availableVideoCodecs].firstObject;
}

View File

@ -32,25 +32,25 @@ typedef enum {
@interface ARDICECandidateMessage : ARDSignalingMessage
@property(nonatomic, readonly) RTCIceCandidate *candidate;
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceCandidate) * candidate;
- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate;
- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate;
@end
@interface ARDICECandidateRemovalMessage : ARDSignalingMessage
@property(nonatomic, readonly) NSArray<RTCIceCandidate *> *candidates;
@property(nonatomic, readonly) NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates;
- (instancetype)initWithRemovedCandidates:(NSArray<RTCIceCandidate *> *)candidates;
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates;
@end
@interface ARDSessionDescriptionMessage : ARDSignalingMessage
@property(nonatomic, readonly) RTCSessionDescription *sessionDescription;
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSessionDescription) * sessionDescription;
- (instancetype)initWithDescription:(RTCSessionDescription *)description;
- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description;
@end

View File

@ -45,19 +45,19 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
NSString *typeString = values[kARDSignalingMessageTypeKey];
ARDSignalingMessage *message = nil;
if ([typeString isEqualToString:@"candidate"]) {
RTCIceCandidate *candidate =
[RTCIceCandidate candidateFromJSONDictionary:values];
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
[RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:values];
message = [[ARDICECandidateMessage alloc] initWithCandidate:candidate];
} else if ([typeString isEqualToString:kARDTypeValueRemoveCandidates]) {
RTCLogInfo(@"Received remove-candidates message");
NSArray<RTCIceCandidate *> *candidates =
[RTCIceCandidate candidatesFromJSONDictionary:values];
NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
[RTC_OBJC_TYPE(RTCIceCandidate) candidatesFromJSONDictionary:values];
message = [[ARDICECandidateRemovalMessage alloc]
initWithRemovedCandidates:candidates];
} else if ([typeString isEqualToString:@"offer"] ||
[typeString isEqualToString:@"answer"]) {
RTCSessionDescription *description =
[RTCSessionDescription descriptionFromJSONDictionary:values];
RTC_OBJC_TYPE(RTCSessionDescription) *description =
[RTC_OBJC_TYPE(RTCSessionDescription) descriptionFromJSONDictionary:values];
message =
[[ARDSessionDescriptionMessage alloc] initWithDescription:description];
} else if ([typeString isEqualToString:@"bye"]) {
@ -78,7 +78,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
@synthesize candidate = _candidate;
- (instancetype)initWithCandidate:(RTCIceCandidate *)candidate {
- (instancetype)initWithCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate {
if (self = [super initWithType:kARDSignalingMessageTypeCandidate]) {
_candidate = candidate;
}
@ -95,8 +95,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
@synthesize candidates = _candidates;
- (instancetype)initWithRemovedCandidates:(
NSArray<RTCIceCandidate *> *)candidates {
- (instancetype)initWithRemovedCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates {
NSParameterAssert(candidates.count);
if (self = [super initWithType:kARDSignalingMessageTypeCandidateRemoval]) {
_candidates = candidates;
@ -105,9 +104,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
}
- (NSData *)JSONData {
return
[RTCIceCandidate JSONDataForIceCandidates:_candidates
withType:kARDTypeValueRemoveCandidates];
return [RTC_OBJC_TYPE(RTCIceCandidate) JSONDataForIceCandidates:_candidates
withType:kARDTypeValueRemoveCandidates];
}
@end
@ -116,7 +114,7 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
@synthesize sessionDescription = _sessionDescription;
- (instancetype)initWithDescription:(RTCSessionDescription *)description {
- (instancetype)initWithDescription:(RTC_OBJC_TYPE(RTCSessionDescription) *)description {
ARDSignalingMessageType messageType = kARDSignalingMessageTypeOffer;
RTCSdpType sdpType = description.type;
switch (sdpType) {
@ -127,8 +125,8 @@ static NSString * const kARDTypeValueRemoveCandidates = @"remove-candidates";
messageType = kARDSignalingMessageTypeAnswer;
break;
case RTCSdpTypePrAnswer:
NSAssert(NO, @"Unexpected type: %@",
[RTCSessionDescription stringForType:sdpType]);
NSAssert(
NO, @"Unexpected type: %@", [RTC_OBJC_TYPE(RTCSessionDescription) stringForType:sdpType]);
break;
}
if (self = [super initWithType:messageType]) {

View File

@ -10,7 +10,9 @@
#import <Foundation/Foundation.h>
@class RTCLegacyStatsReport;
#import <WebRTC/RTCMacros.h>
@class RTC_OBJC_TYPE(RTCLegacyStatsReport);
/** Class used to accumulate stats information into a single displayable string.
*/
@ -24,6 +26,6 @@
/** Parses the information in the stats report into an appropriate internal
* format used to generate the stats string.
*/
- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport;
- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport;
@end

View File

@ -11,6 +11,7 @@
#import "ARDStatsBuilder.h"
#import <WebRTC/RTCLegacyStatsReport.h>
#import <WebRTC/RTCMacros.h>
#import "ARDBitrateTracker.h"
#import "ARDUtilities.h"
@ -141,7 +142,7 @@
return result;
}
- (void)parseStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSString *reportType = statsReport.type;
if ([reportType isEqualToString:@"ssrc"] &&
[statsReport.reportId rangeOfString:@"ssrc"].location != NSNotFound) {
@ -179,7 +180,7 @@
}
}
- (void)parseBweStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseBweStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateBweStatOfKey:key value:value];
@ -206,7 +207,7 @@
}
}
- (void)parseConnectionStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseConnectionStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
if (![activeConnection isEqualToString:@"true"]) {
return;
@ -217,7 +218,7 @@
}];
}
- (void)parseSendSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseSendSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSDictionary *values = statsReport.values;
if ([values objectForKey:@"googFrameRateSent"]) {
// Video track.
@ -238,7 +239,7 @@
}
}
- (void)parseAudioSendStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseAudioSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioSendStatOfKey:key value:value];
@ -275,14 +276,14 @@
}
}
- (void)parseVideoSendStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseVideoSendStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoSendStatOfKey:key value:value];
}];
}
- (void)parseRecvSsrcStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseRecvSsrcStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
NSDictionary *values = statsReport.values;
if ([values objectForKey:@"googFrameWidthReceived"]) {
// Video track.
@ -307,7 +308,7 @@
}
}
- (void)parseAudioRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseAudioRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioRecvStatOfKey:key value:value];
@ -334,7 +335,7 @@
}
}
- (void)parseVideoRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
- (void)parseVideoRecvStatsReport:(RTC_OBJC_TYPE(RTCLegacyStatsReport) *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoRecvStatOfKey:key value:value];

View File

@ -10,7 +10,9 @@
#import <Foundation/Foundation.h>
@class RTCIceServer;
#import <WebRTC/RTCMacros.h>
@class RTC_OBJC_TYPE(RTCIceServer);
@protocol ARDTURNClient <NSObject>

View File

@ -65,10 +65,10 @@ static NSInteger kARDTURNClientErrorBadResponse = -1;
}
NSDictionary *turnResponseDict = [NSDictionary dictionaryWithJSONData:data];
NSMutableArray *turnServers = [NSMutableArray array];
[turnResponseDict[@"iceServers"] enumerateObjectsUsingBlock:
^(NSDictionary *obj, NSUInteger idx, BOOL *stop){
[turnServers addObject:[RTCIceServer serverFromJSONDictionary:obj]];
}];
[turnResponseDict[@"iceServers"]
enumerateObjectsUsingBlock:^(NSDictionary *obj, NSUInteger idx, BOOL *stop) {
[turnServers addObject:[RTC_OBJC_TYPE(RTCIceServer) serverFromJSONDictionary:obj]];
}];
if (!turnServers) {
NSError *responseError =
[[NSError alloc] initWithDomain:kARDTURNClientErrorDomain

View File

@ -217,12 +217,12 @@ static NSString const *kARDWSSMessagePayloadKey = @"msg";
// Change message to answer, send back to server.
ARDSessionDescriptionMessage *sdpMessage =
(ARDSessionDescriptionMessage *)message;
RTCSessionDescription *description = sdpMessage.sessionDescription;
RTC_OBJC_TYPE(RTCSessionDescription) *description = sdpMessage.sessionDescription;
NSString *dsc = description.sdp;
dsc = [dsc stringByReplacingOccurrencesOfString:@"offer"
withString:@"answer"];
RTCSessionDescription *answerDescription =
[[RTCSessionDescription alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
RTC_OBJC_TYPE(RTCSessionDescription) *answerDescription =
[[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:RTCSdpTypeAnswer sdp:dsc];
ARDSignalingMessage *answer =
[[ARDSessionDescriptionMessage alloc]
initWithDescription:answerDescription];

View File

@ -10,11 +10,13 @@
#import <WebRTC/RTCIceCandidate.h>
@interface RTCIceCandidate (JSON)
@interface RTC_OBJC_TYPE (RTCIceCandidate)
(JSON)
+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary;
+ (NSArray<RTCIceCandidate *> *)candidatesFromJSONDictionary:(NSDictionary *)dictionary;
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTCIceCandidate *> *)candidates
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary;
+ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
(NSDictionary *)dictionary;
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
withType:(NSString *)typeValue;
- (NSData *)JSONData;

View File

@ -19,24 +19,24 @@ static NSString const *kRTCICECandidateMLineIndexKey = @"label";
static NSString const *kRTCICECandidateSdpKey = @"candidate";
static NSString const *kRTCICECandidatesTypeKey = @"candidates";
@implementation RTC_OBJC_TYPE (RTCIceCandidate)
(JSON)
@implementation RTCIceCandidate (JSON)
+ (RTCIceCandidate *)candidateFromJSONDictionary:(NSDictionary *)dictionary {
+ (RTC_OBJC_TYPE(RTCIceCandidate) *)candidateFromJSONDictionary : (NSDictionary *)dictionary {
NSString *mid = dictionary[kRTCICECandidateMidKey];
NSString *sdp = dictionary[kRTCICECandidateSdpKey];
NSNumber *num = dictionary[kRTCICECandidateMLineIndexKey];
NSInteger mLineIndex = [num integerValue];
return [[RTCIceCandidate alloc] initWithSdp:sdp
sdpMLineIndex:mLineIndex
sdpMid:mid];
return [[RTC_OBJC_TYPE(RTCIceCandidate) alloc] initWithSdp:sdp
sdpMLineIndex:mLineIndex
sdpMid:mid];
}
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTCIceCandidate *> *)candidates
+ (NSData *)JSONDataForIceCandidates:(NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidates
withType:(NSString *)typeValue {
NSMutableArray *jsonCandidates =
[NSMutableArray arrayWithCapacity:candidates.count];
for (RTCIceCandidate *candidate in candidates) {
for (RTC_OBJC_TYPE(RTCIceCandidate) * candidate in candidates) {
NSDictionary *jsonCandidate = [candidate JSONDictionary];
[jsonCandidates addObject:jsonCandidate];
}
@ -56,14 +56,14 @@ static NSString const *kRTCICECandidatesTypeKey = @"candidates";
return data;
}
+ (NSArray<RTCIceCandidate *> *)candidatesFromJSONDictionary:
+ (NSArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *)candidatesFromJSONDictionary:
(NSDictionary *)dictionary {
NSArray *jsonCandidates = dictionary[kRTCICECandidatesTypeKey];
NSMutableArray<RTCIceCandidate *> *candidates =
NSMutableArray<RTC_OBJC_TYPE(RTCIceCandidate) *> *candidates =
[NSMutableArray arrayWithCapacity:jsonCandidates.count];
for (NSDictionary *jsonCandidate in jsonCandidates) {
RTCIceCandidate *candidate =
[RTCIceCandidate candidateFromJSONDictionary:jsonCandidate];
RTC_OBJC_TYPE(RTCIceCandidate) *candidate =
[RTC_OBJC_TYPE(RTCIceCandidate) candidateFromJSONDictionary:jsonCandidate];
[candidates addObject:candidate];
}
return candidates;

View File

@ -10,8 +10,9 @@
#import <WebRTC/RTCIceServer.h>
@interface RTCIceServer (JSON)
@interface RTC_OBJC_TYPE (RTCIceServer)
(JSON)
+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary;
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary;
@end

View File

@ -10,15 +10,16 @@
#import "RTCIceServer+JSON.h"
@implementation RTCIceServer (JSON)
@implementation RTC_OBJC_TYPE (RTCIceServer)
(JSON)
+ (RTCIceServer *)serverFromJSONDictionary:(NSDictionary *)dictionary {
+ (RTC_OBJC_TYPE(RTCIceServer) *)serverFromJSONDictionary : (NSDictionary *)dictionary {
NSArray *turnUrls = dictionary[@"urls"];
NSString *username = dictionary[@"username"] ?: @"";
NSString *credential = dictionary[@"credential"] ?: @"";
return [[RTCIceServer alloc] initWithURLStrings:turnUrls
username:username
credential:credential];
return [[RTC_OBJC_TYPE(RTCIceServer) alloc] initWithURLStrings:turnUrls
username:username
credential:credential];
}
@end

View File

@ -10,9 +10,11 @@
#import <WebRTC/RTCSessionDescription.h>
@interface RTCSessionDescription (JSON)
@interface RTC_OBJC_TYPE (RTCSessionDescription)
(JSON)
+ (RTCSessionDescription *)descriptionFromJSONDictionary:(NSDictionary *)dictionary;
+ (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
: (NSDictionary *)dictionary;
- (NSData *)JSONData;
@end

View File

@ -13,14 +13,15 @@
static NSString const *kRTCSessionDescriptionTypeKey = @"type";
static NSString const *kRTCSessionDescriptionSdpKey = @"sdp";
@implementation RTCSessionDescription (JSON)
@implementation RTC_OBJC_TYPE (RTCSessionDescription)
(JSON)
+ (RTCSessionDescription *)descriptionFromJSONDictionary:
(NSDictionary *)dictionary {
+ (RTC_OBJC_TYPE(RTCSessionDescription) *)descriptionFromJSONDictionary
: (NSDictionary *)dictionary {
NSString *typeString = dictionary[kRTCSessionDescriptionTypeKey];
RTCSdpType type = [[self class] typeForString:typeString];
NSString *sdp = dictionary[kRTCSessionDescriptionSdpKey];
return [[RTCSessionDescription alloc] initWithType:type sdp:sdp];
return [[RTC_OBJC_TYPE(RTCSessionDescription) alloc] initWithType:type sdp:sdp];
}
- (NSData *)JSONData {

View File

@ -10,7 +10,9 @@
#import <Foundation/Foundation.h>
@class RTCFileVideoCapturer;
#import <WebRTC/RTCMacros.h>
@class RTC_OBJC_TYPE(RTCFileVideoCapturer);
/**
* Controls a file capturer.
@ -23,7 +25,7 @@ NS_CLASS_AVAILABLE_IOS(10)
*
* @param capturer The capturer to be controlled.
*/
- (instancetype)initWithCapturer:(RTCFileVideoCapturer *)capturer;
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer;
/**
* Starts the file capturer.

View File

@ -14,14 +14,14 @@
@interface ARDFileCaptureController ()
@property(nonatomic, strong) RTCFileVideoCapturer *fileCapturer;
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCFileVideoCapturer) * fileCapturer;
@end
@implementation ARDFileCaptureController
@synthesize fileCapturer = _fileCapturer;
- (instancetype)initWithCapturer:(RTCFileVideoCapturer *)capturer {
- (instancetype)initWithCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)capturer {
if (self = [super init]) {
_fileCapturer = capturer;
}

View File

@ -28,10 +28,9 @@ static NSString *const barButtonImageString = @"ic_settings_black_24dp.png";
// Launch argument to be passed to indicate that the app should start loopback immediatly
static NSString *const loopbackLaunchProcessArgument = @"loopback";
@interface ARDMainViewController () <
ARDMainViewDelegate,
ARDVideoCallViewControllerDelegate,
RTCAudioSessionDelegate>
@interface ARDMainViewController () <ARDMainViewDelegate,
ARDVideoCallViewControllerDelegate,
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@property(nonatomic, strong) ARDMainView *mainView;
@property(nonatomic, strong) AVAudioPlayer *audioPlayer;
@end
@ -57,13 +56,13 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
self.view = _mainView;
[self addSettingsBarButton];
RTCAudioSessionConfiguration *webRTCConfig =
[RTCAudioSessionConfiguration webRTCConfiguration];
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *webRTCConfig =
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) webRTCConfiguration];
webRTCConfig.categoryOptions = webRTCConfig.categoryOptions |
AVAudioSessionCategoryOptionDefaultToSpeaker;
[RTCAudioSessionConfiguration setWebRTCConfiguration:webRTCConfig];
[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) setWebRTCConfiguration:webRTCConfig];
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session addDelegate:self];
[self configureAudioSession];
@ -124,7 +123,7 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
session.useManualAudio = [settingsModel currentUseManualAudioConfigSettingFromStore];
session.isAudioEnabled = NO;
@ -158,32 +157,33 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
[self restartAudioPlayerIfNeeded];
}];
}
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
session.isAudioEnabled = NO;
}
#pragma mark - RTCAudioSessionDelegate
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
- (void)audioSessionDidStartPlayOrRecord:(RTCAudioSession *)session {
- (void)audioSessionDidStartPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
// Stop playback on main queue and then configure WebRTC.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
if (self.mainView.isAudioLoopPlaying) {
RTCLog(@"Stopping audio loop due to WebRTC start.");
[self.audioPlayer stop];
}
RTCLog(@"Setting isAudioEnabled to YES.");
session.isAudioEnabled = YES;
}];
[RTC_OBJC_TYPE(RTCDispatcher)
dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
if (self.mainView.isAudioLoopPlaying) {
RTCLog(@"Stopping audio loop due to WebRTC start.");
[self.audioPlayer stop];
}
RTCLog(@"Setting isAudioEnabled to YES.");
session.isAudioEnabled = YES;
}];
}
- (void)audioSessionDidStopPlayOrRecord:(RTCAudioSession *)session {
- (void)audioSessionDidStopPlayOrRecord:(RTC_OBJC_TYPE(RTCAudioSession) *)session {
// WebRTC is done with the audio session. Restart playback.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
RTCLog(@"audioSessionDidStopPlayOrRecord");
[self restartAudioPlayerIfNeeded];
}];
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{
RTCLog(@"audioSessionDidStopPlayOrRecord");
[self restartAudioPlayerIfNeeded];
}];
}
#pragma mark - Private
@ -202,13 +202,13 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
}
- (void)configureAudioSession {
RTCAudioSessionConfiguration *configuration =
[[RTCAudioSessionConfiguration alloc] init];
RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *configuration =
[[RTC_OBJC_TYPE(RTCAudioSessionConfiguration) alloc] init];
configuration.category = AVAudioSessionCategoryAmbient;
configuration.categoryOptions = AVAudioSessionCategoryOptionDuckOthers;
configuration.mode = AVAudioSessionModeDefault;
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
BOOL hasSucceeded = NO;
NSError *error = nil;

View File

@ -62,7 +62,7 @@ typedef NS_ENUM(int, ARDAudioSettingsOptions) {
return [_settingsModel availableVideoResolutions];
}
- (NSArray<RTCVideoCodecInfo *> *)videoCodecArray {
- (NSArray<RTC_OBJC_TYPE(RTCVideoCodecInfo) *> *)videoCodecArray {
return [_settingsModel availableVideoCodecs];
}
@ -214,7 +214,7 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
cell = [[UITableViewCell alloc] initWithStyle:UITableViewCellStyleDefault
reuseIdentifier:dequeueIdentifier];
}
RTCVideoCodecInfo *codec = self.videoCodecArray[indexPath.row];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = self.videoCodecArray[indexPath.row];
cell.textLabel.text = [codec humanReadableDescription];
if ([codec isEqualToCodecInfo:[_settingsModel currentVideoCodecSettingFromStore]]) {
cell.accessoryType = UITableViewCellAccessoryCheckmark;
@ -231,7 +231,7 @@ updateListSelectionAtIndexPath:(NSIndexPath *)indexPath
updateListSelectionAtIndexPath:indexPath
inSection:ARDSettingsSectionVideoCodec];
RTCVideoCodecInfo *videoCodec = self.videoCodecArray[indexPath.row];
RTC_OBJC_TYPE(RTCVideoCodecInfo) *videoCodec = self.videoCodecArray[indexPath.row];
[_settingsModel storeVideoCodecSetting:videoCodec];
}

View File

@ -35,7 +35,7 @@
}
- (void)setStats:(NSArray *)stats {
for (RTCLegacyStatsReport *report in stats) {
for (RTC_OBJC_TYPE(RTCLegacyStatsReport) * report in stats) {
[_statsBuilder parseStatsReport:report];
}
_statsLabel.text = _statsBuilder.statsString;

View File

@ -37,8 +37,8 @@
@interface ARDVideoCallView : UIView
@property(nonatomic, readonly) UILabel *statusLabel;
@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
@property(nonatomic, readonly) __kindof UIView<RTCVideoRenderer> *remoteVideoView;
@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCCameraPreviewView) * localVideoView;
@property(nonatomic, readonly) __kindof UIView<RTC_OBJC_TYPE(RTCVideoRenderer)> *remoteVideoView;
@property(nonatomic, readonly) ARDStatsView *statsView;
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;

View File

@ -25,7 +25,7 @@ static CGFloat const kLocalVideoViewSize = 120;
static CGFloat const kLocalVideoViewPadding = 8;
static CGFloat const kStatusBarHeight = 20;
@interface ARDVideoCallView () <RTCVideoViewDelegate>
@interface ARDVideoCallView () <RTC_OBJC_TYPE (RTCVideoViewDelegate)>
@end
@implementation ARDVideoCallView {
@ -45,16 +45,17 @@ static CGFloat const kStatusBarHeight = 20;
if (self = [super initWithFrame:frame]) {
#if defined(RTC_SUPPORTS_METAL)
_remoteVideoView = [[RTCMTLVideoView alloc] initWithFrame:CGRectZero];
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLVideoView) alloc] initWithFrame:CGRectZero];
#else
RTCEAGLVideoView *remoteView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
RTC_OBJC_TYPE(RTCEAGLVideoView) *remoteView =
[[RTC_OBJC_TYPE(RTCEAGLVideoView) alloc] initWithFrame:CGRectZero];
remoteView.delegate = self;
_remoteVideoView = remoteView;
#endif
[self addSubview:_remoteVideoView];
_localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
_localVideoView = [[RTC_OBJC_TYPE(RTCCameraPreviewView) alloc] initWithFrame:CGRectZero];
[self addSubview:_localVideoView];
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
@ -175,9 +176,9 @@ static CGFloat const kStatusBarHeight = 20;
CGPointMake(CGRectGetMidX(bounds), CGRectGetMidY(bounds));
}
#pragma mark - RTCVideoViewDelegate
#pragma mark - RTC_OBJC_TYPE(RTCVideoViewDelegate)
- (void)videoView:(id<RTCVideoRenderer>)videoView didChangeVideoSize:(CGSize)size {
- (void)videoView:(id<RTC_OBJC_TYPE(RTCVideoRenderer)>)videoView didChangeVideoSize:(CGSize)size {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
}

View File

@ -24,15 +24,15 @@
@interface ARDVideoCallViewController () <ARDAppClientDelegate,
ARDVideoCallViewDelegate,
RTCAudioSessionDelegate>
@property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack;
RTC_OBJC_TYPE (RTCAudioSessionDelegate)>
@property(nonatomic, strong) RTC_OBJC_TYPE(RTCVideoTrack) * remoteVideoTrack;
@property(nonatomic, readonly) ARDVideoCallView *videoCallView;
@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
@end
@implementation ARDVideoCallViewController {
ARDAppClient *_client;
RTCVideoTrack *_remoteVideoTrack;
RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
ARDCaptureController *_captureController;
ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10);
}
@ -62,7 +62,7 @@
[self statusTextForState:RTCIceConnectionStateNew];
self.view = _videoCallView;
RTCAudioSession *session = [RTCAudioSession sharedInstance];
RTC_OBJC_TYPE(RTCAudioSession) *session = [RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session addDelegate:self];
}
@ -100,7 +100,7 @@
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer {
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
_videoCallView.localVideoView.captureSession = localCapturer.captureSession;
ARDSettingsModel *settingsModel = [[ARDSettingsModel alloc] init];
_captureController =
@ -109,7 +109,7 @@
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalFileCapturer:(RTCFileVideoCapturer *)fileCapturer {
didCreateLocalFileCapturer:(RTC_OBJC_TYPE(RTCFileVideoCapturer) *)fileCapturer {
#if defined(__IPHONE_11_0) && (__IPHONE_OS_VERSION_MAX_ALLOWED >= __IPHONE_11_0)
if (@available(iOS 10, *)) {
_fileCaptureController = [[ARDFileCaptureController alloc] initWithCapturer:fileCapturer];
@ -119,11 +119,11 @@
}
- (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
self.remoteVideoTrack = remoteVideoTrack;
__weak ARDVideoCallViewController *weakSelf = self;
dispatch_async(dispatch_get_main_queue(), ^{
@ -163,19 +163,21 @@
if (_portOverride == AVAudioSessionPortOverrideNone) {
override = AVAudioSessionPortOverrideSpeaker;
}
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeAudioSession
block:^{
RTCAudioSession *session = [RTCAudioSession sharedInstance];
[session lockForConfiguration];
NSError *error = nil;
if ([session overrideOutputAudioPort:override error:&error]) {
self.portOverride = override;
} else {
RTCLogError(@"Error overriding output port: %@",
error.localizedDescription);
}
[session unlockForConfiguration];
}];
[RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeAudioSession
block:^{
RTC_OBJC_TYPE(RTCAudioSession) *session =
[RTC_OBJC_TYPE(RTCAudioSession) sharedInstance];
[session lockForConfiguration];
NSError *error = nil;
if ([session overrideOutputAudioPort:override
error:&error]) {
self.portOverride = override;
} else {
RTCLogError(@"Error overriding output port: %@",
error.localizedDescription);
}
[session unlockForConfiguration];
}];
}
- (void)videoCallViewDidEnableStats:(ARDVideoCallView *)view {
@ -183,16 +185,16 @@
_videoCallView.statsView.hidden = NO;
}
#pragma mark - RTCAudioSessionDelegate
#pragma mark - RTC_OBJC_TYPE(RTCAudioSessionDelegate)
- (void)audioSession:(RTCAudioSession *)audioSession
- (void)audioSession:(RTC_OBJC_TYPE(RTCAudioSession) *)audioSession
didDetectPlayoutGlitch:(int64_t)totalNumberOfGlitches {
RTCLog(@"Audio session detected glitch, total: %lld", totalNumberOfGlitches);
}
#pragma mark - Private
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
- (void)setRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
if (_remoteVideoTrack == remoteVideoTrack) {
return;
}

View File

@ -10,8 +10,9 @@
#import <WebRTC/RTCVideoCodecInfo.h>
@interface RTCVideoCodecInfo (HumanReadable)
@interface RTC_OBJC_TYPE (RTCVideoCodecInfo)
(HumanReadable)
- (NSString *)humanReadableDescription;
- (NSString *)humanReadableDescription;
@end

View File

@ -12,13 +12,14 @@
#import <WebRTC/RTCH264ProfileLevelId.h>
@implementation RTCVideoCodecInfo (HumanReadable)
@implementation RTC_OBJC_TYPE (RTCVideoCodecInfo)
(HumanReadable)
- (NSString *)humanReadableDescription {
- (NSString *)humanReadableDescription {
if ([self.name isEqualToString:@"H264"]) {
NSString *profileId = self.parameters[@"profile-level-id"];
RTCH264ProfileLevelId *profileLevelId =
[[RTCH264ProfileLevelId alloc] initWithHexString:profileId];
RTC_OBJC_TYPE(RTCH264ProfileLevelId) *profileLevelId =
[[RTC_OBJC_TYPE(RTCH264ProfileLevelId) alloc] initWithHexString:profileId];
if (profileLevelId.profile == RTCH264ProfileConstrainedHigh ||
profileLevelId.profile == RTCH264ProfileHigh) {
return @"H264 (High)";

View File

@ -20,14 +20,14 @@
@implementation ARDBroadcastSampleHandler {
ARDAppClient *_client;
RTCCallbackLogger *_callbackLogger;
RTC_OBJC_TYPE(RTCCallbackLogger) * _callbackLogger;
}
@synthesize capturer = _capturer;
- (instancetype)init {
if (self = [super init]) {
_callbackLogger = [[RTCCallbackLogger alloc] init];
_callbackLogger = [[RTC_OBJC_TYPE(RTCCallbackLogger) alloc] init];
os_log_t rtc_os_log = os_log_create("com.google.AppRTCMobile", "RTCLog");
[_callbackLogger start:^(NSString *logMessage) {
os_log(rtc_os_log, "%{public}s", [logMessage cStringUsingEncoding:NSUTF8StringEncoding]);
@ -104,7 +104,7 @@
}
- (void)appClient:(ARDAppClient *)client
didCreateLocalCapturer:(RTCCameraVideoCapturer *)localCapturer {
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
}
- (void)appClient:(ARDAppClient *)client
@ -113,11 +113,11 @@
}
- (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
}
- (void)appClient:(ARDAppClient *)client didGetStats:(NSArray *)stats {

View File

@ -37,15 +37,15 @@ static NSUInteger const kBottomViewHeight = 200;
@interface APPRTCMainView : NSView
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* localVideoView;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* remoteVideoView;
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* localVideoView;
@property(nonatomic, readonly) NSView<RTC_OBJC_TYPE(RTCVideoRenderer)>* remoteVideoView;
@property(nonatomic, readonly) NSTextView* logView;
- (void)displayLogMessage:(NSString*)message;
@end
@interface APPRTCMainView () <NSTextFieldDelegate, RTCNSGLVideoViewDelegate>
@interface APPRTCMainView () <NSTextFieldDelegate, RTC_OBJC_TYPE (RTCNSGLVideoViewDelegate)>
@end
@implementation APPRTCMainView {
NSScrollView* _scrollView;
@ -178,10 +178,9 @@ static NSUInteger const kBottomViewHeight = 200;
[self setNeedsUpdateConstraints:YES];
}
#pragma mark - RTCNSGLVideoViewDelegate
#pragma mark - RTC_OBJC_TYPE(RTCNSGLVideoViewDelegate)
- (void)videoView:(RTCNSGLVideoView*)videoView
didChangeVideoSize:(NSSize)size {
- (void)videoView:(RTC_OBJC_TYPE(RTCNSGLVideoView) *)videoView didChangeVideoSize:(NSSize)size {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
} else if (videoView == _localVideoView) {
@ -222,9 +221,10 @@ static NSUInteger const kBottomViewHeight = 200;
// If not we're providing sensible default.
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wpartial-availability"
if ([RTCMTLNSVideoView class] && [RTCMTLNSVideoView isMetalAvailable]) {
_remoteVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
_localVideoView = [[RTCMTLNSVideoView alloc] initWithFrame:NSZeroRect];
if ([RTC_OBJC_TYPE(RTCMTLNSVideoView) class] &&
[RTC_OBJC_TYPE(RTCMTLNSVideoView) isMetalAvailable]) {
_remoteVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
_localVideoView = [[RTC_OBJC_TYPE(RTCMTLNSVideoView) alloc] initWithFrame:NSZeroRect];
}
#pragma clang diagnostic pop
if (_remoteVideoView == nil) {
@ -238,13 +238,13 @@ static NSUInteger const kBottomViewHeight = 200;
NSOpenGLPixelFormat* pixelFormat =
[[NSOpenGLPixelFormat alloc] initWithAttributes:attributes];
RTCNSGLVideoView* remote =
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
RTC_OBJC_TYPE(RTCNSGLVideoView)* remote =
[[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
remote.delegate = self;
_remoteVideoView = remote;
RTCNSGLVideoView* local =
[[RTCNSGLVideoView alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
RTC_OBJC_TYPE(RTCNSGLVideoView)* local =
[[RTC_OBJC_TYPE(RTCNSGLVideoView) alloc] initWithFrame:NSZeroRect pixelFormat:pixelFormat];
local.delegate = self;
_localVideoView = local;
}
@ -299,8 +299,8 @@ static NSUInteger const kBottomViewHeight = 200;
@implementation APPRTCViewController {
ARDAppClient* _client;
RTCVideoTrack* _localVideoTrack;
RTCVideoTrack* _remoteVideoTrack;
RTC_OBJC_TYPE(RTCVideoTrack) * _localVideoTrack;
RTC_OBJC_TYPE(RTCVideoTrack) * _remoteVideoTrack;
ARDCaptureController* _captureController;
}
@ -357,21 +357,21 @@ static NSUInteger const kBottomViewHeight = 200;
}
- (void)appClient:(ARDAppClient*)client
didCreateLocalCapturer:(RTCCameraVideoCapturer*)localCapturer {
didCreateLocalCapturer:(RTC_OBJC_TYPE(RTCCameraVideoCapturer) *)localCapturer {
_captureController =
[[ARDCaptureController alloc] initWithCapturer:localCapturer
settings:[[ARDSettingsModel alloc] init]];
[_captureController startCapture];
}
- (void)appClient:(ARDAppClient *)client
didReceiveLocalVideoTrack:(RTCVideoTrack *)localVideoTrack {
- (void)appClient:(ARDAppClient*)client
didReceiveLocalVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)localVideoTrack {
_localVideoTrack = localVideoTrack;
[_localVideoTrack addRenderer:self.mainView.localVideoView];
}
- (void)appClient:(ARDAppClient *)client
didReceiveRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
- (void)appClient:(ARDAppClient*)client
didReceiveRemoteVideoTrack:(RTC_OBJC_TYPE(RTCVideoTrack) *)remoteVideoTrack {
_remoteVideoTrack = remoteVideoTrack;
[_remoteVideoTrack addRenderer:self.mainView.remoteVideoView];
}

View File

@ -196,8 +196,8 @@
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
// crash in Debug.
caller.defaultPeerConnectionConstraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
weakCaller = caller;
answerer = [self createAppClientForRoomId:roomId
@ -214,8 +214,8 @@
// TODO(tkchin): Figure out why DTLS-SRTP constraint causes thread assertion
// crash in Debug.
answerer.defaultPeerConnectionConstraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
weakAnswerer = answerer;
// Kick off connection.
@ -248,8 +248,8 @@
connectedHandler:^{}
localVideoTrackHandler:^{ [localVideoTrackExpectation fulfill]; }];
caller.defaultPeerConnectionConstraints =
[[RTCMediaConstraints alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
[[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:nil
optionalConstraints:nil];
// Kick off connection.
[caller connectToRoomWithId:roomId

View File

@ -31,7 +31,7 @@ NS_CLASS_AVAILABLE_IOS(10)
- (void)setUp {
[super setUp];
self.fileCapturerMock = OCMClassMock([RTCFileVideoCapturer class]);
self.fileCapturerMock = OCMClassMock([RTC_OBJC_TYPE(RTCFileVideoCapturer) class]);
self.fileCaptureController =
[[ARDFileCaptureController alloc] initWithCapturer:self.fileCapturerMock];
}