Add AVFoundation video capture support to Mac objc SDK (based on iOS)
The AppRTCDemo app on Mac OSX does not show or send local video streams, as ACFoundation capture session is not compiled in or implemented in the appropriate places. This is the first part of a two-part patch that implements local capture on the Mac for AppRTCDemo P.S. This is my first patch to WebRTC. I didn't see any relevant tests, but I could write some if you can point me at a location. Also, I don't have access to the automated tests (I don't think) BUG=webrtc:3417 Review-Url: https://codereview.webrtc.org/2046863004 Cr-Commit-Position: refs/heads/master@{#13080}
This commit is contained in:
@ -69,6 +69,8 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
||||
|
||||
source_set("rtc_sdk_peerconnection_objc") {
|
||||
sources = [
|
||||
"objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h",
|
||||
"objc/Framework/Classes/RTCAVFoundationVideoSource.mm",
|
||||
"objc/Framework/Classes/RTCAudioTrack+Private.h",
|
||||
"objc/Framework/Classes/RTCAudioTrack.mm",
|
||||
"objc/Framework/Classes/RTCConfiguration+Private.h",
|
||||
@ -118,6 +120,9 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
||||
"objc/Framework/Classes/RTCVideoSource.mm",
|
||||
"objc/Framework/Classes/RTCVideoTrack+Private.h",
|
||||
"objc/Framework/Classes/RTCVideoTrack.mm",
|
||||
"objc/Framework/Classes/avfoundationvideocapturer.h",
|
||||
"objc/Framework/Classes/avfoundationvideocapturer.mm",
|
||||
"objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCConfiguration.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCDataChannel.h",
|
||||
@ -144,12 +149,7 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
||||
|
||||
if (is_ios) {
|
||||
sources += [
|
||||
"objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h",
|
||||
"objc/Framework/Classes/RTCAVFoundationVideoSource.mm",
|
||||
"objc/Framework/Classes/RTCEAGLVideoView.m",
|
||||
"objc/Framework/Classes/avfoundationvideocapturer.h",
|
||||
"objc/Framework/Classes/avfoundationvideocapturer.mm",
|
||||
"objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
|
||||
"objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
|
||||
]
|
||||
libs = [
|
||||
@ -165,7 +165,10 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
||||
"objc/Framework/Classes/RTCNSGLVideoView.m",
|
||||
"objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h",
|
||||
]
|
||||
libs = [ "OpenGL.framework" ]
|
||||
libs = [
|
||||
"CoreMedia.framework",
|
||||
"OpenGL.framework",
|
||||
]
|
||||
}
|
||||
|
||||
configs += [
|
||||
@ -181,7 +184,10 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
||||
configs -= [ "//build/config/clang:find_bad_constructs" ]
|
||||
}
|
||||
|
||||
libs += [ "stdc++" ]
|
||||
libs += [
|
||||
"AVFoundation.framework",
|
||||
"stdc++",
|
||||
]
|
||||
|
||||
deps = [
|
||||
":rtc_sdk_common_objc",
|
||||
|
||||
@ -11,9 +11,7 @@
|
||||
#import "RTCPeerConnectionFactory+Private.h"
|
||||
|
||||
#import "NSString+StdString.h"
|
||||
#if defined(WEBRTC_IOS)
|
||||
#import "RTCAVFoundationVideoSource+Private.h"
|
||||
#endif
|
||||
#import "RTCAudioTrack+Private.h"
|
||||
#import "RTCMediaStream+Private.h"
|
||||
#import "RTCPeerConnection+Private.h"
|
||||
@ -54,12 +52,8 @@
|
||||
|
||||
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
|
||||
(nullable RTCMediaConstraints *)constraints {
|
||||
#if defined(WEBRTC_IOS)
|
||||
return [[RTCAVFoundationVideoSource alloc] initWithFactory:self
|
||||
constraints:constraints];
|
||||
#else
|
||||
return nil;
|
||||
#endif
|
||||
}
|
||||
|
||||
- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
|
||||
|
||||
@ -12,7 +12,9 @@
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <Foundation/Foundation.h>
|
||||
#if TARGET_OS_IPHONE
|
||||
#import <UIKit/UIKit.h>
|
||||
#endif
|
||||
|
||||
#import "RTCDispatcher+Private.h"
|
||||
#import "WebRTC/RTCLogging.h"
|
||||
@ -88,6 +90,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
return nil;
|
||||
}
|
||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||
#if TARGET_OS_IPHONE
|
||||
[center addObserver:self
|
||||
selector:@selector(deviceOrientationDidChange:)
|
||||
name:UIDeviceOrientationDidChangeNotification
|
||||
@ -100,6 +103,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
selector:@selector(handleCaptureSessionInterruptionEnded:)
|
||||
name:AVCaptureSessionInterruptionEndedNotification
|
||||
object:_captureSession];
|
||||
#endif
|
||||
[center addObserver:self
|
||||
selector:@selector(handleCaptureSessionRuntimeError:)
|
||||
name:AVCaptureSessionRuntimeErrorNotification
|
||||
@ -188,7 +192,9 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
block:^{
|
||||
_orientationHasChanged = NO;
|
||||
[self updateOrientation];
|
||||
#if TARGET_OS_IPHONE
|
||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
||||
#endif
|
||||
AVCaptureSession *captureSession = self.captureSession;
|
||||
[captureSession startRunning];
|
||||
}];
|
||||
@ -207,12 +213,15 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
block:^{
|
||||
[_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
|
||||
[_captureSession stopRunning];
|
||||
#if TARGET_OS_IPHONE
|
||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
||||
#endif
|
||||
}];
|
||||
}
|
||||
|
||||
#pragma mark iOS notifications
|
||||
|
||||
#if TARGET_OS_IPHONE
|
||||
- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
@ -220,6 +229,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
[self updateOrientation];
|
||||
}];
|
||||
}
|
||||
#endif
|
||||
|
||||
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
|
||||
|
||||
@ -273,16 +283,21 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
}
|
||||
|
||||
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
|
||||
NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
|
||||
NSError *error =
|
||||
[notification.userInfo objectForKey:AVCaptureSessionErrorKey];
|
||||
RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
|
||||
|
||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||
block:^{
|
||||
#if TARGET_OS_IPHONE
|
||||
if (error.code == AVErrorMediaServicesWereReset) {
|
||||
[self handleNonFatalError];
|
||||
} else {
|
||||
[self handleFatalError];
|
||||
}
|
||||
#else
|
||||
[self handleFatalError];
|
||||
#endif
|
||||
}];
|
||||
}
|
||||
|
||||
@ -402,8 +417,13 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
|
||||
- (AVCaptureDeviceInput *)frontCameraInput {
|
||||
if (!_frontCameraInput) {
|
||||
#if TARGET_OS_IPHONE
|
||||
AVCaptureDevice *frontCameraDevice =
|
||||
[self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
|
||||
#else
|
||||
AVCaptureDevice *frontCameraDevice =
|
||||
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
||||
#endif
|
||||
if (!frontCameraDevice) {
|
||||
RTCLogWarning(@"Failed to find front capture device.");
|
||||
return nil;
|
||||
@ -452,6 +472,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
// TODO(tkchin): set rotation bit on frames.
|
||||
return;
|
||||
}
|
||||
#if TARGET_OS_IPHONE
|
||||
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
|
||||
switch ([UIDevice currentDevice].orientation) {
|
||||
case UIDeviceOrientationPortrait:
|
||||
@ -475,6 +496,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
||||
return;
|
||||
}
|
||||
connection.videoOrientation = orientation;
|
||||
#endif
|
||||
}
|
||||
|
||||
// Update the current session input to match what's stored in _useBackCamera.
|
||||
|
||||
@ -94,6 +94,11 @@
|
||||
],
|
||||
},
|
||||
'link_settings': {
|
||||
'xcode_settings': {
|
||||
'OTHER_LDFLAGS': [
|
||||
'-framework AVFoundation',
|
||||
],
|
||||
},
|
||||
'libraries': [
|
||||
'-lstdc++',
|
||||
],
|
||||
@ -101,6 +106,8 @@
|
||||
'sources': [
|
||||
'objc/Framework/Classes/RTCAudioTrack+Private.h',
|
||||
'objc/Framework/Classes/RTCAudioTrack.mm',
|
||||
'objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h',
|
||||
'objc/Framework/Classes/RTCAVFoundationVideoSource.mm',
|
||||
'objc/Framework/Classes/RTCConfiguration+Private.h',
|
||||
'objc/Framework/Classes/RTCConfiguration.mm',
|
||||
'objc/Framework/Classes/RTCDataChannel+Private.h',
|
||||
@ -148,7 +155,10 @@
|
||||
'objc/Framework/Classes/RTCVideoSource.mm',
|
||||
'objc/Framework/Classes/RTCVideoTrack+Private.h',
|
||||
'objc/Framework/Classes/RTCVideoTrack.mm',
|
||||
'objc/Framework/Classes/avfoundationvideocapturer.h',
|
||||
'objc/Framework/Classes/avfoundationvideocapturer.mm',
|
||||
'objc/Framework/Headers/WebRTC/RTCAudioTrack.h',
|
||||
'objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h',
|
||||
'objc/Framework/Headers/WebRTC/RTCConfiguration.h',
|
||||
'objc/Framework/Headers/WebRTC/RTCDataChannel.h',
|
||||
'objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h',
|
||||
@ -174,12 +184,7 @@
|
||||
'conditions': [
|
||||
['OS=="ios"', {
|
||||
'sources': [
|
||||
'objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h',
|
||||
'objc/Framework/Classes/RTCAVFoundationVideoSource.mm',
|
||||
'objc/Framework/Classes/RTCEAGLVideoView.m',
|
||||
'objc/Framework/Classes/avfoundationvideocapturer.h',
|
||||
'objc/Framework/Classes/avfoundationvideocapturer.mm',
|
||||
'objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h',
|
||||
'objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h',
|
||||
],
|
||||
'link_settings': {
|
||||
@ -201,6 +206,7 @@
|
||||
'link_settings': {
|
||||
'xcode_settings': {
|
||||
'OTHER_LDFLAGS': [
|
||||
'-framework CoreMedia',
|
||||
'-framework OpenGL',
|
||||
],
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user