Explicitly retain self in objc blocks to avoid compiler warning.

Implicitly retaining self pointer (assuming this is intended behavior) causes compiler warning `-Wimplicit-retain-self`. We should do it explicitly.

Bug: webrtc:9971
Change-Id: If77a67168d8a65ced78d5119b9a7332391d20bc9
Reviewed-on: https://webrtc-review.googlesource.com/c/109641
Commit-Queue: Jiawei Ou <ouj@fb.com>
Reviewed-by: Kári Helgason <kthelgason@webrtc.org>
Reviewed-by: Tommi <tommi@webrtc.org>
Cr-Commit-Position: refs/heads/master@{#25609}
This commit is contained in:
Jiawei Ou
2018-11-09 13:55:45 -08:00
committed by Commit Bot
parent 0c32e33b48
commit 4aeb35b6d0
10 changed files with 248 additions and 208 deletions

View File

@ -400,7 +400,7 @@ static int const kKbpsMultiplier = 1000;
didChangeIceConnectionState:(RTCIceConnectionState)newState { didChangeIceConnectionState:(RTCIceConnectionState)newState {
RTCLog(@"ICE state changed: %ld", (long)newState); RTCLog(@"ICE state changed: %ld", (long)newState);
dispatch_async(dispatch_get_main_queue(), ^{ dispatch_async(dispatch_get_main_queue(), ^{
[_delegate appClient:self didChangeConnectionState:newState]; [self.delegate appClient:self didChangeConnectionState:newState];
}); });
} }
@ -450,11 +450,11 @@ static int const kKbpsMultiplier = 1000;
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorCreateSDP code:kARDAppClientErrorCreateSDP
userInfo:userInfo]; userInfo:userInfo];
[_delegate appClient:self didError:sdpError]; [self.delegate appClient:self didError:sdpError];
return; return;
} }
__weak ARDAppClient *weakSelf = self; __weak ARDAppClient *weakSelf = self;
[_peerConnection setLocalDescription:sdp [self.peerConnection setLocalDescription:sdp
completionHandler:^(NSError *error) { completionHandler:^(NSError *error) {
ARDAppClient *strongSelf = weakSelf; ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection [strongSelf peerConnection:strongSelf.peerConnection
@ -480,17 +480,16 @@ static int const kKbpsMultiplier = 1000;
[[NSError alloc] initWithDomain:kARDAppClientErrorDomain [[NSError alloc] initWithDomain:kARDAppClientErrorDomain
code:kARDAppClientErrorSetSDP code:kARDAppClientErrorSetSDP
userInfo:userInfo]; userInfo:userInfo];
[_delegate appClient:self didError:sdpError]; [self.delegate appClient:self didError:sdpError];
return; return;
} }
// If we're answering and we've just set the remote offer we need to create // If we're answering and we've just set the remote offer we need to create
// an answer and set the local description. // an answer and set the local description.
if (!_isInitiator && !_peerConnection.localDescription) { if (!self.isInitiator && !self.peerConnection.localDescription) {
RTCMediaConstraints *constraints = [self defaultAnswerConstraints]; RTCMediaConstraints *constraints = [self defaultAnswerConstraints];
__weak ARDAppClient *weakSelf = self; __weak ARDAppClient *weakSelf = self;
[_peerConnection answerForConstraints:constraints [self.peerConnection answerForConstraints:constraints
completionHandler:^(RTCSessionDescription *sdp, completionHandler:^(RTCSessionDescription *sdp, NSError *error) {
NSError *error) {
ARDAppClient *strongSelf = weakSelf; ARDAppClient *strongSelf = weakSelf;
[strongSelf peerConnection:strongSelf.peerConnection [strongSelf peerConnection:strongSelf.peerConnection
didCreateSessionDescription:sdp didCreateSessionDescription:sdp

View File

@ -167,32 +167,26 @@
return deltaFramesEncoded != 0 ? deltaQPSum / deltaFramesEncoded : 0; return deltaFramesEncoded != 0 ? deltaQPSum / deltaFramesEncoded : 0;
} }
- (void)parseBweStatsReport:(RTCLegacyStatsReport *)statsReport { - (void)updateBweStatOfKey:(NSString *)key value:(NSString *)value {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googAvailableSendBandwidth"]) { if ([key isEqualToString:@"googAvailableSendBandwidth"]) {
_availableSendBw = _availableSendBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) { } else if ([key isEqualToString:@"googAvailableReceiveBandwidth"]) {
_availableRecvBw = _availableRecvBw = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googActualEncBitrate"]) { } else if ([key isEqualToString:@"googActualEncBitrate"]) {
_actualEncBitrate = _actualEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} else if ([key isEqualToString:@"googTargetEncBitrate"]) { } else if ([key isEqualToString:@"googTargetEncBitrate"]) {
_targetEncBitrate = _targetEncBitrate = [ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
[ARDBitrateTracker bitrateStringForBitrate:value.doubleValue];
} }
}
- (void)parseBweStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateBweStatOfKey:key value:value];
}]; }];
} }
- (void)parseConnectionStatsReport:(RTCLegacyStatsReport *)statsReport { - (void)updateConnectionStatOfKey:(NSString *)key value:(NSString *)value {
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
if (![activeConnection isEqualToString:@"true"]) {
return;
}
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googRtt"]) { if ([key isEqualToString:@"googRtt"]) {
_connRtt = value; _connRtt = value;
} else if ([key isEqualToString:@"googLocalCandidateType"]) { } else if ([key isEqualToString:@"googLocalCandidateType"]) {
@ -210,6 +204,16 @@
[_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount]; [_connSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_connSendBitrate = _connSendBitrateTracker.bitrateString; _connSendBitrate = _connSendBitrateTracker.bitrateString;
} }
}
- (void)parseConnectionStatsReport:(RTCLegacyStatsReport *)statsReport {
NSString *activeConnection = statsReport.values[@"googActiveConnection"];
if (![activeConnection isEqualToString:@"true"]) {
return;
}
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateConnectionStatOfKey:key value:value];
}]; }];
} }
@ -224,9 +228,7 @@
} }
} }
- (void)parseAudioSendStatsReport:(RTCLegacyStatsReport *)statsReport { - (void)updateAudioSendStatOfKey:(NSString *)key value:(NSString *)value {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) { if ([key isEqualToString:@"googCodecName"]) {
_audioSendCodec = value; _audioSendCodec = value;
} else if ([key isEqualToString:@"bytesSent"]) { } else if ([key isEqualToString:@"bytesSent"]) {
@ -234,12 +236,16 @@
[_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount]; [_audioSendBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_audioSendBitrate = _audioSendBitrateTracker.bitrateString; _audioSendBitrate = _audioSendBitrateTracker.bitrateString;
} }
}
- (void)parseAudioSendStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioSendStatOfKey:key value:value];
}]; }];
} }
- (void)parseVideoSendStatsReport:(RTCLegacyStatsReport *)statsReport { - (void)updateVideoSendStatOfKey:(NSString *)key value:(NSString *)value {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) { if ([key isEqualToString:@"googCodecName"]) {
_videoSendCodec = value; _videoSendCodec = value;
} else if ([key isEqualToString:@"googFrameHeightInput"]) { } else if ([key isEqualToString:@"googFrameHeightInput"]) {
@ -267,6 +273,12 @@
_oldFramesEncoded = _framesEncoded; _oldFramesEncoded = _framesEncoded;
_framesEncoded = value.integerValue; _framesEncoded = value.integerValue;
} }
}
- (void)parseVideoSendStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoSendStatOfKey:key value:value];
}]; }];
} }
@ -281,9 +293,7 @@
} }
} }
- (void)parseAudioRecvStatsReport:(RTCLegacyStatsReport *)statsReport { - (void)updateAudioRecvStatOfKey:(NSString *)key value:(NSString *)value {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googCodecName"]) { if ([key isEqualToString:@"googCodecName"]) {
_audioRecvCodec = value; _audioRecvCodec = value;
} else if ([key isEqualToString:@"bytesReceived"]) { } else if ([key isEqualToString:@"bytesReceived"]) {
@ -295,12 +305,16 @@
} else if ([key isEqualToString:@"googCurrentDelayMs"]) { } else if ([key isEqualToString:@"googCurrentDelayMs"]) {
_audioCurrentDelay = value; _audioCurrentDelay = value;
} }
}
- (void)parseAudioRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateAudioRecvStatOfKey:key value:value];
}]; }];
} }
- (void)parseVideoRecvStatsReport:(RTCLegacyStatsReport *)statsReport { - (void)updateVideoRecvStatOfKey:(NSString *)key value:(NSString *)value {
[statsReport.values enumerateKeysAndObjectsUsingBlock:^(
NSString *key, NSString *value, BOOL *stop) {
if ([key isEqualToString:@"googFrameHeightReceived"]) { if ([key isEqualToString:@"googFrameHeightReceived"]) {
_videoRecvHeight = value; _videoRecvHeight = value;
} else if ([key isEqualToString:@"googFrameWidthReceived"]) { } else if ([key isEqualToString:@"googFrameWidthReceived"]) {
@ -318,6 +332,12 @@
[_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount]; [_videoRecvBitrateTracker updateBitrateWithCurrentByteCount:byteCount];
_videoRecvBitrate = _videoRecvBitrateTracker.bitrateString; _videoRecvBitrate = _videoRecvBitrateTracker.bitrateString;
} }
}
- (void)parseVideoRecvStatsReport:(RTCLegacyStatsReport *)statsReport {
[statsReport.values
enumerateKeysAndObjectsUsingBlock:^(NSString *key, NSString *value, BOOL *stop) {
[self updateVideoRecvStatOfKey:key value:value];
}]; }];
} }

View File

@ -32,14 +32,17 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
ARDMainViewDelegate, ARDMainViewDelegate,
ARDVideoCallViewControllerDelegate, ARDVideoCallViewControllerDelegate,
RTCAudioSessionDelegate> RTCAudioSessionDelegate>
@property(nonatomic, strong) ARDMainView *mainView;
@property(nonatomic, strong) AVAudioPlayer *audioPlayer;
@end @end
@implementation ARDMainViewController { @implementation ARDMainViewController {
ARDMainView *_mainView;
AVAudioPlayer *_audioPlayer;
BOOL _useManualAudio; BOOL _useManualAudio;
} }
@synthesize mainView = _mainView;
@synthesize audioPlayer = _audioPlayer;
- (void)viewDidLoad { - (void)viewDidLoad {
[super viewDidLoad]; [super viewDidLoad];
if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) { if ([[[NSProcessInfo processInfo] arguments] containsObject:loopbackLaunchProcessArgument]) {
@ -165,9 +168,9 @@ static NSString *const loopbackLaunchProcessArgument = @"loopback";
// Stop playback on main queue and then configure WebRTC. // Stop playback on main queue and then configure WebRTC.
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{ block:^{
if (_mainView.isAudioLoopPlaying) { if (self.mainView.isAudioLoopPlaying) {
RTCLog(@"Stopping audio loop due to WebRTC start."); RTCLog(@"Stopping audio loop due to WebRTC start.");
[_audioPlayer stop]; [self.audioPlayer stop];
} }
RTCLog(@"Setting isAudioEnabled to YES."); RTCLog(@"Setting isAudioEnabled to YES.");
session.isAudioEnabled = YES; session.isAudioEnabled = YES;

View File

@ -27,6 +27,7 @@
RTCAudioSessionDelegate> RTCAudioSessionDelegate>
@property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack; @property(nonatomic, strong) RTCVideoTrack *remoteVideoTrack;
@property(nonatomic, readonly) ARDVideoCallView *videoCallView; @property(nonatomic, readonly) ARDVideoCallView *videoCallView;
@property(nonatomic, assign) AVAudioSessionPortOverride portOverride;
@end @end
@implementation ARDVideoCallViewController { @implementation ARDVideoCallViewController {
@ -34,12 +35,12 @@
RTCVideoTrack *_remoteVideoTrack; RTCVideoTrack *_remoteVideoTrack;
ARDCaptureController *_captureController; ARDCaptureController *_captureController;
ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10); ARDFileCaptureController *_fileCaptureController NS_AVAILABLE_IOS(10);
AVAudioSessionPortOverride _portOverride;
} }
@synthesize videoCallView = _videoCallView; @synthesize videoCallView = _videoCallView;
@synthesize remoteVideoTrack = _remoteVideoTrack; @synthesize remoteVideoTrack = _remoteVideoTrack;
@synthesize delegate = _delegate; @synthesize delegate = _delegate;
@synthesize portOverride = _portOverride;
- (instancetype)initForRoom:(NSString *)room - (instancetype)initForRoom:(NSString *)room
isLoopback:(BOOL)isLoopback isLoopback:(BOOL)isLoopback
@ -168,7 +169,7 @@
[session lockForConfiguration]; [session lockForConfiguration];
NSError *error = nil; NSError *error = nil;
if ([session overrideOutputAudioPort:override error:&error]) { if ([session overrideOutputAudioPort:override error:&error]) {
_portOverride = override; self.portOverride = override;
} else { } else {
RTCLogError(@"Error overriding output port: %@", RTCLogError(@"Error overriding output port: %@",
error.localizedDescription); error.localizedDescription);

View File

@ -39,6 +39,7 @@ static NSUInteger const kBottomViewHeight = 200;
@property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate; @property(nonatomic, weak) id<APPRTCMainViewDelegate> delegate;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* localVideoView; @property(nonatomic, readonly) NSView<RTCVideoRenderer>* localVideoView;
@property(nonatomic, readonly) NSView<RTCVideoRenderer>* remoteVideoView; @property(nonatomic, readonly) NSView<RTCVideoRenderer>* remoteVideoView;
@property(nonatomic, readonly) NSTextView* logView;
- (void)displayLogMessage:(NSString*)message; - (void)displayLogMessage:(NSString*)message;
@ -52,7 +53,6 @@ static NSUInteger const kBottomViewHeight = 200;
NSButton* _connectButton; NSButton* _connectButton;
NSButton* _loopbackButton; NSButton* _loopbackButton;
NSTextField* _roomField; NSTextField* _roomField;
NSTextView* _logView;
CGSize _localVideoSize; CGSize _localVideoSize;
CGSize _remoteVideoSize; CGSize _remoteVideoSize;
} }
@ -60,14 +60,13 @@ static NSUInteger const kBottomViewHeight = 200;
@synthesize delegate = _delegate; @synthesize delegate = _delegate;
@synthesize localVideoView = _localVideoView; @synthesize localVideoView = _localVideoView;
@synthesize remoteVideoView = _remoteVideoView; @synthesize remoteVideoView = _remoteVideoView;
@synthesize logView = _logView;
- (void)displayLogMessage:(NSString *)message { - (void)displayLogMessage:(NSString *)message {
dispatch_async(dispatch_get_main_queue(), ^{ dispatch_async(dispatch_get_main_queue(), ^{
_logView.string = self.logView.string = [NSString stringWithFormat:@"%@%@\n", self.logView.string, message];
[NSString stringWithFormat:@"%@%@\n", _logView.string, message]; NSRange range = NSMakeRange(self.logView.string.length, 0);
NSRange range = NSMakeRange(_logView.string.length, 0); [self.logView scrollRangeToVisible:range];
[_logView scrollRangeToVisible:range];
}); });
} }

View File

@ -40,6 +40,7 @@ if (is_ios || is_mac) {
"objc/Framework/Headers", # TODO(bugs.webrtc.org/9627): Remove this. "objc/Framework/Headers", # TODO(bugs.webrtc.org/9627): Remove this.
] ]
cflags = [ cflags = [
"-Wimplicit-retain-self",
"-Wstrict-overflow", "-Wstrict-overflow",
"-Wmissing-field-initializers", "-Wmissing-field-initializers",
] ]

View File

@ -26,18 +26,18 @@ const int64_t kNanosecondsPerSecond = 1000000000;
@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate> @interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
@property(nonatomic, readonly) dispatch_queue_t frameQueue; @property(nonatomic, readonly) dispatch_queue_t frameQueue;
@property(nonatomic, strong) AVCaptureDevice *currentDevice;
@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
@property(nonatomic, assign) BOOL isRunning;
// Will the session be running once all asynchronous operations have been completed?
@property(nonatomic, assign) BOOL willBeRunning;
@end @end
@implementation RTCCameraVideoCapturer { @implementation RTCCameraVideoCapturer {
AVCaptureVideoDataOutput *_videoDataOutput; AVCaptureVideoDataOutput *_videoDataOutput;
AVCaptureSession *_captureSession; AVCaptureSession *_captureSession;
AVCaptureDevice *_currentDevice;
FourCharCode _preferredOutputPixelFormat; FourCharCode _preferredOutputPixelFormat;
FourCharCode _outputPixelFormat; FourCharCode _outputPixelFormat;
BOOL _hasRetriedOnFatalError;
BOOL _isRunning;
// Will the session be running once all asynchronous operations have been completed?
BOOL _willBeRunning;
RTCVideoRotation _rotation; RTCVideoRotation _rotation;
#if TARGET_OS_IPHONE #if TARGET_OS_IPHONE
UIDeviceOrientation _orientation; UIDeviceOrientation _orientation;
@ -46,6 +46,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
@synthesize frameQueue = _frameQueue; @synthesize frameQueue = _frameQueue;
@synthesize captureSession = _captureSession; @synthesize captureSession = _captureSession;
@synthesize currentDevice = _currentDevice;
@synthesize hasRetriedOnFatalError = _hasRetriedOnFatalError;
@synthesize isRunning = _isRunning;
@synthesize willBeRunning = _willBeRunning;
- (instancetype)init { - (instancetype)init {
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]]; return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
@ -157,25 +161,26 @@ const int64_t kNanosecondsPerSecond = 1000000000;
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
#endif #endif
_currentDevice = device; self.currentDevice = device;
NSError *error = nil; NSError *error = nil;
if (![_currentDevice lockForConfiguration:&error]) { if (![self.currentDevice lockForConfiguration:&error]) {
RTCLogError( RTCLogError(@"Failed to lock device %@. Error: %@",
@"Failed to lock device %@. Error: %@", _currentDevice, error.userInfo); self.currentDevice,
error.userInfo);
if (completionHandler) { if (completionHandler) {
completionHandler(error); completionHandler(error);
} }
_willBeRunning = NO; self.willBeRunning = NO;
return; return;
} }
[self reconfigureCaptureSessionInput]; [self reconfigureCaptureSessionInput];
[self updateOrientation]; [self updateOrientation];
[self updateDeviceCaptureFormat:format fps:fps]; [self updateDeviceCaptureFormat:format fps:fps];
[self updateVideoDataOutputPixelFormat:format]; [self updateVideoDataOutputPixelFormat:format];
[_captureSession startRunning]; [self.captureSession startRunning];
[_currentDevice unlockForConfiguration]; [self.currentDevice unlockForConfiguration];
_isRunning = YES; self.isRunning = YES;
if (completionHandler) { if (completionHandler) {
completionHandler(nil); completionHandler(nil);
} }
@ -188,16 +193,16 @@ const int64_t kNanosecondsPerSecond = 1000000000;
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{ block:^{
RTCLogInfo("Stop"); RTCLogInfo("Stop");
_currentDevice = nil; self.currentDevice = nil;
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) { for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
[_captureSession removeInput:oldInput]; [self.captureSession removeInput:oldInput];
} }
[_captureSession stopRunning]; [self.captureSession stopRunning];
#if TARGET_OS_IPHONE #if TARGET_OS_IPHONE
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
#endif #endif
_isRunning = NO; self.isRunning = NO;
if (completionHandler) { if (completionHandler) {
completionHandler(); completionHandler();
} }
@ -340,7 +345,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
block:^{ block:^{
// If we successfully restarted after an unknown error, // If we successfully restarted after an unknown error,
// allow future retries on fatal errors. // allow future retries on fatal errors.
_hasRetriedOnFatalError = NO; self.hasRetriedOnFatalError = NO;
}]; }];
} }
@ -352,10 +357,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
[RTCDispatcher [RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{ block:^{
if (!_hasRetriedOnFatalError) { if (!self.hasRetriedOnFatalError) {
RTCLogWarning(@"Attempting to recover from fatal capture error."); RTCLogWarning(@"Attempting to recover from fatal capture error.");
[self handleNonFatalError]; [self handleNonFatalError];
_hasRetriedOnFatalError = YES; self.hasRetriedOnFatalError = YES;
} else { } else {
RTCLogError(@"Previous fatal error recovery failed."); RTCLogError(@"Previous fatal error recovery failed.");
} }
@ -366,8 +371,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{ block:^{
RTCLog(@"Restarting capture session after error."); RTCLog(@"Restarting capture session after error.");
if (_isRunning) { if (self.isRunning) {
[_captureSession startRunning]; [self.captureSession startRunning];
} }
}]; }];
} }
@ -379,9 +384,9 @@ const int64_t kNanosecondsPerSecond = 1000000000;
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{ block:^{
if (_isRunning && !_captureSession.isRunning) { if (self.isRunning && !self.captureSession.isRunning) {
RTCLog(@"Restarting capture session on active."); RTCLog(@"Restarting capture session on active.");
[_captureSession startRunning]; [self.captureSession startRunning];
} }
}]; }];
} }

View File

@ -27,15 +27,21 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
RTCFileVideoCapturerStatusStopped RTCFileVideoCapturerStatusStopped
}; };
@interface RTCFileVideoCapturer ()
@property(nonatomic, assign) CMTime lastPresentationTime;
@property(nonatomic, strong) NSURL *fileURL;
@end
@implementation RTCFileVideoCapturer { @implementation RTCFileVideoCapturer {
AVAssetReader *_reader; AVAssetReader *_reader;
AVAssetReaderTrackOutput *_outTrack; AVAssetReaderTrackOutput *_outTrack;
RTCFileVideoCapturerStatus _status; RTCFileVideoCapturerStatus _status;
CMTime _lastPresentationTime;
dispatch_queue_t _frameQueue; dispatch_queue_t _frameQueue;
NSURL *_fileURL;
} }
@synthesize lastPresentationTime = _lastPresentationTime;
@synthesize fileURL = _fileURL;
- (void)startCapturingFromFileNamed:(NSString *)nameOfFile - (void)startCapturingFromFileNamed:(NSString *)nameOfFile
onError:(RTCFileVideoCapturerErrorBlock)errorBlock { onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
if (_status == RTCFileVideoCapturerStatusStarted) { if (_status == RTCFileVideoCapturerStatusStarted) {
@ -62,9 +68,9 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
return; return;
} }
_lastPresentationTime = CMTimeMake(0, 0); self.lastPresentationTime = CMTimeMake(0, 0);
_fileURL = [NSURL fileURLWithPath:pathForFile]; self.fileURL = [NSURL fileURLWithPath:pathForFile];
[self setupReaderOnError:errorBlock]; [self setupReaderOnError:errorBlock];
}); });
} }

View File

@ -47,14 +47,17 @@
if (_captureSession == captureSession) { if (_captureSession == captureSession) {
return; return;
} }
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain [RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{ block:^{
_captureSession = captureSession; self.captureSession = captureSession;
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer]; AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession [RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{ block:^{
previewLayer.session = captureSession; previewLayer.session = captureSession;
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain [RTCDispatcher
dispatchAsyncOnType:RTCDispatcherTypeMain
block:^{ block:^{
[self setCorrectVideoOrientation]; [self setCorrectVideoOrientation];
}]; }];

View File

@ -128,17 +128,20 @@ static const NSUInteger kFullDuplexTimeInSec = 10;
static const NSUInteger kNumIgnoreFirstCallbacks = 50; static const NSUInteger kNumIgnoreFirstCallbacks = 50;
@interface RTCAudioDeviceModuleTests : XCTestCase { @interface RTCAudioDeviceModuleTests : XCTestCase {
rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule; rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule;
webrtc::AudioParameters playoutParameters;
webrtc::AudioParameters recordParameters;
MockAudioTransport mock; MockAudioTransport mock;
} }
@property(nonatomic, assign) webrtc::AudioParameters playoutParameters;
@property(nonatomic, assign) webrtc::AudioParameters recordParameters;
@end @end
@implementation RTCAudioDeviceModuleTests @implementation RTCAudioDeviceModuleTests
@synthesize playoutParameters;
@synthesize recordParameters;
- (void)setUp { - (void)setUp {
[super setUp]; [super setUp];
audioDeviceModule = webrtc::CreateAudioDeviceModule(); audioDeviceModule = webrtc::CreateAudioDeviceModule();
@ -254,10 +257,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms, int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) { int64_t *ntp_time_ms) {
nSamplesOut = nSamples; nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer()); XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels()); XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples); XCTAssertNotEqual((void*)NULL, audioSamples);
return 0; return 0;
@ -291,10 +294,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms, int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) { int64_t *ntp_time_ms) {
nSamplesOut = nSamples; nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer()); XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels()); XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples); XCTAssertNotEqual((void*)NULL, audioSamples);
if (++num_callbacks == kNumCallbacks) { if (++num_callbacks == kNumCallbacks) {
[playoutExpectation fulfill]; [playoutExpectation fulfill];
@ -330,10 +333,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms, int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) { int64_t *ntp_time_ms) {
nSamplesOut = nSamples; nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer()); XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels()); XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples); XCTAssertNotEqual((void*)NULL, audioSamples);
if (++num_callbacks == kNumCallbacks) { if (++num_callbacks == kNumCallbacks) {
[playoutExpectation fulfill]; [playoutExpectation fulfill];
@ -366,10 +369,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
const bool keyPressed, const bool keyPressed,
uint32_t& newMicLevel) { uint32_t& newMicLevel) {
XCTAssertNotEqual((void*)NULL, audioSamples); XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertEqual(nSamples, recordParameters.frames_per_10ms_buffer()); XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, recordParameters.channels()); XCTAssertEqual(nChannels, self.recordParameters.channels());
XCTAssertEqual((int) samplesPerSec, recordParameters.sample_rate()); XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
XCTAssertEqual(0, clockDrift); XCTAssertEqual(0, clockDrift);
XCTAssertEqual(0u, currentMicLevel); XCTAssertEqual(0u, currentMicLevel);
XCTAssertFalse(keyPressed); XCTAssertFalse(keyPressed);
@ -405,10 +408,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
int64_t *elapsed_time_ms, int64_t *elapsed_time_ms,
int64_t *ntp_time_ms) { int64_t *ntp_time_ms) {
nSamplesOut = nSamples; nSamplesOut = nSamples;
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer()); XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, playoutParameters.channels()); XCTAssertEqual(nChannels, self.playoutParameters.channels());
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate()); XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
XCTAssertNotEqual((void*)NULL, audioSamples); XCTAssertNotEqual((void*)NULL, audioSamples);
if (callbackCount++ >= kNumCallbacks) { if (callbackCount++ >= kNumCallbacks) {
[playoutExpectation fulfill]; [playoutExpectation fulfill];
@ -428,10 +431,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
const bool keyPressed, const bool keyPressed,
uint32_t& newMicLevel) { uint32_t& newMicLevel) {
XCTAssertNotEqual((void*)NULL, audioSamples); XCTAssertNotEqual((void*)NULL, audioSamples);
XCTAssertEqual(nSamples, recordParameters.frames_per_10ms_buffer()); XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
XCTAssertEqual(nBytesPerSample, kBytesPerSample); XCTAssertEqual(nBytesPerSample, kBytesPerSample);
XCTAssertEqual(nChannels, recordParameters.channels()); XCTAssertEqual(nChannels, self.recordParameters.channels());
XCTAssertEqual((int) samplesPerSec, recordParameters.sample_rate()); XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
XCTAssertEqual(0, clockDrift); XCTAssertEqual(0, clockDrift);
XCTAssertEqual(0u, currentMicLevel); XCTAssertEqual(0u, currentMicLevel);
XCTAssertFalse(keyPressed); XCTAssertFalse(keyPressed);