Explicitly retain self in objc blocks to avoid compiler warning.
Implicitly retaining self pointer (assuming this is intended behavior) causes compiler warning `-Wimplicit-retain-self`. We should do it explicitly. Bug: webrtc:9971 Change-Id: If77a67168d8a65ced78d5119b9a7332391d20bc9 Reviewed-on: https://webrtc-review.googlesource.com/c/109641 Commit-Queue: Jiawei Ou <ouj@fb.com> Reviewed-by: Kári Helgason <kthelgason@webrtc.org> Reviewed-by: Tommi <tommi@webrtc.org> Cr-Commit-Position: refs/heads/master@{#25609}
This commit is contained in:
@ -26,18 +26,18 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
|
||||
@interface RTCCameraVideoCapturer ()<AVCaptureVideoDataOutputSampleBufferDelegate>
|
||||
@property(nonatomic, readonly) dispatch_queue_t frameQueue;
|
||||
@property(nonatomic, strong) AVCaptureDevice *currentDevice;
|
||||
@property(nonatomic, assign) BOOL hasRetriedOnFatalError;
|
||||
@property(nonatomic, assign) BOOL isRunning;
|
||||
// Will the session be running once all asynchronous operations have been completed?
|
||||
@property(nonatomic, assign) BOOL willBeRunning;
|
||||
@end
|
||||
|
||||
@implementation RTCCameraVideoCapturer {
|
||||
AVCaptureVideoDataOutput *_videoDataOutput;
|
||||
AVCaptureSession *_captureSession;
|
||||
AVCaptureDevice *_currentDevice;
|
||||
FourCharCode _preferredOutputPixelFormat;
|
||||
FourCharCode _outputPixelFormat;
|
||||
BOOL _hasRetriedOnFatalError;
|
||||
BOOL _isRunning;
|
||||
// Will the session be running once all asynchronous operations have been completed?
|
||||
BOOL _willBeRunning;
|
||||
RTCVideoRotation _rotation;
|
||||
#if TARGET_OS_IPHONE
|
||||
UIDeviceOrientation _orientation;
|
||||
@ -46,6 +46,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
|
||||
@synthesize frameQueue = _frameQueue;
|
||||
@synthesize captureSession = _captureSession;
|
||||
@synthesize currentDevice = _currentDevice;
|
||||
@synthesize hasRetriedOnFatalError = _hasRetriedOnFatalError;
|
||||
@synthesize isRunning = _isRunning;
|
||||
@synthesize willBeRunning = _willBeRunning;
|
||||
|
||||
- (instancetype)init {
|
||||
return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]];
|
||||
@ -157,25 +161,26 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
||||
#endif
|
||||
|
||||
_currentDevice = device;
|
||||
self.currentDevice = device;
|
||||
|
||||
NSError *error = nil;
|
||||
if (![_currentDevice lockForConfiguration:&error]) {
|
||||
RTCLogError(
|
||||
@"Failed to lock device %@. Error: %@", _currentDevice, error.userInfo);
|
||||
if (![self.currentDevice lockForConfiguration:&error]) {
|
||||
RTCLogError(@"Failed to lock device %@. Error: %@",
|
||||
self.currentDevice,
|
||||
error.userInfo);
|
||||
if (completionHandler) {
|
||||
completionHandler(error);
|
||||
}
|
||||
_willBeRunning = NO;
|
||||
self.willBeRunning = NO;
|
||||
return;
|
||||
}
|
||||
[self reconfigureCaptureSessionInput];
|
||||
[self updateOrientation];
|
||||
[self updateDeviceCaptureFormat:format fps:fps];
|
||||
[self updateVideoDataOutputPixelFormat:format];
|
||||
[_captureSession startRunning];
|
||||
[_currentDevice unlockForConfiguration];
|
||||
_isRunning = YES;
|
||||
[self.captureSession startRunning];
|
||||
[self.currentDevice unlockForConfiguration];
|
||||
self.isRunning = YES;
|
||||
if (completionHandler) {
|
||||
completionHandler(nil);
|
||||
}
|
||||
@ -188,16 +193,16 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLogInfo("Stop");
|
||||
_currentDevice = nil;
|
||||
for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) {
|
||||
[_captureSession removeInput:oldInput];
|
||||
self.currentDevice = nil;
|
||||
for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) {
|
||||
[self.captureSession removeInput:oldInput];
|
||||
}
|
||||
[_captureSession stopRunning];
|
||||
[self.captureSession stopRunning];
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
||||
#endif
|
||||
_isRunning = NO;
|
||||
self.isRunning = NO;
|
||||
if (completionHandler) {
|
||||
completionHandler();
|
||||
}
|
||||
@ -340,7 +345,7 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
block:^{
|
||||
// If we successfully restarted after an unknown error,
|
||||
// allow future retries on fatal errors.
|
||||
_hasRetriedOnFatalError = NO;
|
||||
self.hasRetriedOnFatalError = NO;
|
||||
}];
|
||||
}
|
||||
|
||||
@ -352,10 +357,10 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
if (!_hasRetriedOnFatalError) {
|
||||
if (!self.hasRetriedOnFatalError) {
|
||||
RTCLogWarning(@"Attempting to recover from fatal capture error.");
|
||||
[self handleNonFatalError];
|
||||
_hasRetriedOnFatalError = YES;
|
||||
self.hasRetriedOnFatalError = YES;
|
||||
} else {
|
||||
RTCLogError(@"Previous fatal error recovery failed.");
|
||||
}
|
||||
@ -366,8 +371,8 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
RTCLog(@"Restarting capture session after error.");
|
||||
if (_isRunning) {
|
||||
[_captureSession startRunning];
|
||||
if (self.isRunning) {
|
||||
[self.captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
}
|
||||
@ -379,9 +384,9 @@ const int64_t kNanosecondsPerSecond = 1000000000;
|
||||
- (void)handleApplicationDidBecomeActive:(NSNotification *)notification {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
if (_isRunning && !_captureSession.isRunning) {
|
||||
if (self.isRunning && !self.captureSession.isRunning) {
|
||||
RTCLog(@"Restarting capture session on active.");
|
||||
[_captureSession startRunning];
|
||||
[self.captureSession startRunning];
|
||||
}
|
||||
}];
|
||||
}
|
||||
|
||||
@ -27,15 +27,21 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
||||
RTCFileVideoCapturerStatusStopped
|
||||
};
|
||||
|
||||
@interface RTCFileVideoCapturer ()
|
||||
@property(nonatomic, assign) CMTime lastPresentationTime;
|
||||
@property(nonatomic, strong) NSURL *fileURL;
|
||||
@end
|
||||
|
||||
@implementation RTCFileVideoCapturer {
|
||||
AVAssetReader *_reader;
|
||||
AVAssetReaderTrackOutput *_outTrack;
|
||||
RTCFileVideoCapturerStatus _status;
|
||||
CMTime _lastPresentationTime;
|
||||
dispatch_queue_t _frameQueue;
|
||||
NSURL *_fileURL;
|
||||
}
|
||||
|
||||
@synthesize lastPresentationTime = _lastPresentationTime;
|
||||
@synthesize fileURL = _fileURL;
|
||||
|
||||
- (void)startCapturingFromFileNamed:(NSString *)nameOfFile
|
||||
onError:(RTCFileVideoCapturerErrorBlock)errorBlock {
|
||||
if (_status == RTCFileVideoCapturerStatusStarted) {
|
||||
@ -62,9 +68,9 @@ typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) {
|
||||
return;
|
||||
}
|
||||
|
||||
_lastPresentationTime = CMTimeMake(0, 0);
|
||||
self.lastPresentationTime = CMTimeMake(0, 0);
|
||||
|
||||
_fileURL = [NSURL fileURLWithPath:pathForFile];
|
||||
self.fileURL = [NSURL fileURLWithPath:pathForFile];
|
||||
[self setupReaderOnError:errorBlock];
|
||||
});
|
||||
}
|
||||
|
||||
@ -47,19 +47,22 @@
|
||||
if (_captureSession == captureSession) {
|
||||
return;
|
||||
}
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||
block:^{
|
||||
_captureSession = captureSession;
|
||||
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
previewLayer.session = captureSession;
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||
block:^{
|
||||
[self setCorrectVideoOrientation];
|
||||
}];
|
||||
}];
|
||||
}];
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||
block:^{
|
||||
self.captureSession = captureSession;
|
||||
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
previewLayer.session = captureSession;
|
||||
[RTCDispatcher
|
||||
dispatchAsyncOnType:RTCDispatcherTypeMain
|
||||
block:^{
|
||||
[self setCorrectVideoOrientation];
|
||||
}];
|
||||
}];
|
||||
}];
|
||||
}
|
||||
|
||||
- (void)layoutSubviews {
|
||||
|
||||
@ -128,17 +128,20 @@ static const NSUInteger kFullDuplexTimeInSec = 10;
|
||||
static const NSUInteger kNumIgnoreFirstCallbacks = 50;
|
||||
|
||||
@interface RTCAudioDeviceModuleTests : XCTestCase {
|
||||
|
||||
rtc::scoped_refptr<webrtc::AudioDeviceModule> audioDeviceModule;
|
||||
webrtc::AudioParameters playoutParameters;
|
||||
webrtc::AudioParameters recordParameters;
|
||||
MockAudioTransport mock;
|
||||
}
|
||||
|
||||
@property(nonatomic, assign) webrtc::AudioParameters playoutParameters;
|
||||
@property(nonatomic, assign) webrtc::AudioParameters recordParameters;
|
||||
|
||||
@end
|
||||
|
||||
@implementation RTCAudioDeviceModuleTests
|
||||
|
||||
@synthesize playoutParameters;
|
||||
@synthesize recordParameters;
|
||||
|
||||
- (void)setUp {
|
||||
[super setUp];
|
||||
audioDeviceModule = webrtc::CreateAudioDeviceModule();
|
||||
@ -254,10 +257,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
|
||||
int64_t *elapsed_time_ms,
|
||||
int64_t *ntp_time_ms) {
|
||||
nSamplesOut = nSamples;
|
||||
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
|
||||
XCTAssertEqual(nChannels, playoutParameters.channels());
|
||||
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
|
||||
XCTAssertEqual(nChannels, self.playoutParameters.channels());
|
||||
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
|
||||
XCTAssertNotEqual((void*)NULL, audioSamples);
|
||||
|
||||
return 0;
|
||||
@ -291,10 +294,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
|
||||
int64_t *elapsed_time_ms,
|
||||
int64_t *ntp_time_ms) {
|
||||
nSamplesOut = nSamples;
|
||||
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
|
||||
XCTAssertEqual(nChannels, playoutParameters.channels());
|
||||
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
|
||||
XCTAssertEqual(nChannels, self.playoutParameters.channels());
|
||||
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
|
||||
XCTAssertNotEqual((void*)NULL, audioSamples);
|
||||
if (++num_callbacks == kNumCallbacks) {
|
||||
[playoutExpectation fulfill];
|
||||
@ -330,10 +333,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
|
||||
int64_t *elapsed_time_ms,
|
||||
int64_t *ntp_time_ms) {
|
||||
nSamplesOut = nSamples;
|
||||
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
|
||||
XCTAssertEqual(nChannels, playoutParameters.channels());
|
||||
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
|
||||
XCTAssertEqual(nChannels, self.playoutParameters.channels());
|
||||
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
|
||||
XCTAssertNotEqual((void*)NULL, audioSamples);
|
||||
if (++num_callbacks == kNumCallbacks) {
|
||||
[playoutExpectation fulfill];
|
||||
@ -366,10 +369,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
|
||||
const bool keyPressed,
|
||||
uint32_t& newMicLevel) {
|
||||
XCTAssertNotEqual((void*)NULL, audioSamples);
|
||||
XCTAssertEqual(nSamples, recordParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
|
||||
XCTAssertEqual(nChannels, recordParameters.channels());
|
||||
XCTAssertEqual((int) samplesPerSec, recordParameters.sample_rate());
|
||||
XCTAssertEqual(nChannels, self.recordParameters.channels());
|
||||
XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
|
||||
XCTAssertEqual(0, clockDrift);
|
||||
XCTAssertEqual(0u, currentMicLevel);
|
||||
XCTAssertFalse(keyPressed);
|
||||
@ -405,10 +408,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
|
||||
int64_t *elapsed_time_ms,
|
||||
int64_t *ntp_time_ms) {
|
||||
nSamplesOut = nSamples;
|
||||
XCTAssertEqual(nSamples, playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nSamples, self.playoutParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
|
||||
XCTAssertEqual(nChannels, playoutParameters.channels());
|
||||
XCTAssertEqual((int) samplesPerSec, playoutParameters.sample_rate());
|
||||
XCTAssertEqual(nChannels, self.playoutParameters.channels());
|
||||
XCTAssertEqual((int)samplesPerSec, self.playoutParameters.sample_rate());
|
||||
XCTAssertNotEqual((void*)NULL, audioSamples);
|
||||
if (callbackCount++ >= kNumCallbacks) {
|
||||
[playoutExpectation fulfill];
|
||||
@ -428,10 +431,10 @@ static const NSUInteger kNumIgnoreFirstCallbacks = 50;
|
||||
const bool keyPressed,
|
||||
uint32_t& newMicLevel) {
|
||||
XCTAssertNotEqual((void*)NULL, audioSamples);
|
||||
XCTAssertEqual(nSamples, recordParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nSamples, self.recordParameters.frames_per_10ms_buffer());
|
||||
XCTAssertEqual(nBytesPerSample, kBytesPerSample);
|
||||
XCTAssertEqual(nChannels, recordParameters.channels());
|
||||
XCTAssertEqual((int) samplesPerSec, recordParameters.sample_rate());
|
||||
XCTAssertEqual(nChannels, self.recordParameters.channels());
|
||||
XCTAssertEqual((int)samplesPerSec, self.recordParameters.sample_rate());
|
||||
XCTAssertEqual(0, clockDrift);
|
||||
XCTAssertEqual(0u, currentMicLevel);
|
||||
XCTAssertFalse(keyPressed);
|
||||
|
||||
Reference in New Issue
Block a user