Add AVFoundation video capture support to Mac objc SDK (based on iOS)

The AppRTCDemo app on Mac OSX does not show or send local video streams,
as ACFoundation capture session is not compiled in or implemented in
the appropriate places.  This is the first part of a two-part patch
that implements local capture on the Mac for AppRTCDemo

P.S. This is my first patch to WebRTC.   I didn't see any relevant tests, but I could write some if you can point me at a location. Also, I don't have access to the automated tests (I don't think)

BUG=webrtc:3417

Review-Url: https://codereview.webrtc.org/2046863004
Cr-Commit-Position: refs/heads/master@{#13080}
This commit is contained in:
adam.fedor
2016-06-08 17:24:37 -07:00
committed by Commit bot
parent f2a1c89241
commit fc22e03eb8
4 changed files with 47 additions and 19 deletions

View File

@ -11,9 +11,7 @@
#import "RTCPeerConnectionFactory+Private.h"
#import "NSString+StdString.h"
#if defined(WEBRTC_IOS)
#import "RTCAVFoundationVideoSource+Private.h"
#endif
#import "RTCAudioTrack+Private.h"
#import "RTCMediaStream+Private.h"
#import "RTCPeerConnection+Private.h"
@ -54,12 +52,8 @@
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
(nullable RTCMediaConstraints *)constraints {
#if defined(WEBRTC_IOS)
return [[RTCAVFoundationVideoSource alloc] initWithFactory:self
constraints:constraints];
#else
return nil;
#endif
}
- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {

View File

@ -12,7 +12,9 @@
#import <AVFoundation/AVFoundation.h>
#import <Foundation/Foundation.h>
#if TARGET_OS_IPHONE
#import <UIKit/UIKit.h>
#endif
#import "RTCDispatcher+Private.h"
#import "WebRTC/RTCLogging.h"
@ -88,6 +90,7 @@ static cricket::VideoFormat const kDefaultFormat =
return nil;
}
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
#if TARGET_OS_IPHONE
[center addObserver:self
selector:@selector(deviceOrientationDidChange:)
name:UIDeviceOrientationDidChangeNotification
@ -100,6 +103,7 @@ static cricket::VideoFormat const kDefaultFormat =
selector:@selector(handleCaptureSessionInterruptionEnded:)
name:AVCaptureSessionInterruptionEndedNotification
object:_captureSession];
#endif
[center addObserver:self
selector:@selector(handleCaptureSessionRuntimeError:)
name:AVCaptureSessionRuntimeErrorNotification
@ -188,7 +192,9 @@ static cricket::VideoFormat const kDefaultFormat =
block:^{
_orientationHasChanged = NO;
[self updateOrientation];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
#endif
AVCaptureSession *captureSession = self.captureSession;
[captureSession startRunning];
}];
@ -207,12 +213,15 @@ static cricket::VideoFormat const kDefaultFormat =
block:^{
[_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
[_captureSession stopRunning];
#if TARGET_OS_IPHONE
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
#endif
}];
}
#pragma mark iOS notifications
#if TARGET_OS_IPHONE
- (void)deviceOrientationDidChange:(NSNotification *)notification {
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
@ -220,6 +229,7 @@ static cricket::VideoFormat const kDefaultFormat =
[self updateOrientation];
}];
}
#endif
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
@ -273,16 +283,21 @@ static cricket::VideoFormat const kDefaultFormat =
}
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
NSError *error =
[notification.userInfo objectForKey:AVCaptureSessionErrorKey];
RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
#if TARGET_OS_IPHONE
if (error.code == AVErrorMediaServicesWereReset) {
[self handleNonFatalError];
} else {
[self handleFatalError];
}
#else
[self handleFatalError];
#endif
}];
}
@ -402,8 +417,13 @@ static cricket::VideoFormat const kDefaultFormat =
- (AVCaptureDeviceInput *)frontCameraInput {
if (!_frontCameraInput) {
#if TARGET_OS_IPHONE
AVCaptureDevice *frontCameraDevice =
[self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
#else
AVCaptureDevice *frontCameraDevice =
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
#endif
if (!frontCameraDevice) {
RTCLogWarning(@"Failed to find front capture device.");
return nil;
@ -452,6 +472,7 @@ static cricket::VideoFormat const kDefaultFormat =
// TODO(tkchin): set rotation bit on frames.
return;
}
#if TARGET_OS_IPHONE
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
switch ([UIDevice currentDevice].orientation) {
case UIDeviceOrientationPortrait:
@ -475,6 +496,7 @@ static cricket::VideoFormat const kDefaultFormat =
return;
}
connection.videoOrientation = orientation;
#endif
}
// Update the current session input to match what's stored in _useBackCamera.