Add AVFoundation video capture support to Mac objc SDK (based on iOS)
The AppRTCDemo app on Mac OSX does not show or send local video streams, as ACFoundation capture session is not compiled in or implemented in the appropriate places. This is the first part of a two-part patch that implements local capture on the Mac for AppRTCDemo P.S. This is my first patch to WebRTC. I didn't see any relevant tests, but I could write some if you can point me at a location. Also, I don't have access to the automated tests (I don't think) BUG=webrtc:3417 Review-Url: https://codereview.webrtc.org/2046863004 Cr-Commit-Position: refs/heads/master@{#13080}
This commit is contained in:
@ -69,6 +69,8 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
|
|
||||||
source_set("rtc_sdk_peerconnection_objc") {
|
source_set("rtc_sdk_peerconnection_objc") {
|
||||||
sources = [
|
sources = [
|
||||||
|
"objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h",
|
||||||
|
"objc/Framework/Classes/RTCAVFoundationVideoSource.mm",
|
||||||
"objc/Framework/Classes/RTCAudioTrack+Private.h",
|
"objc/Framework/Classes/RTCAudioTrack+Private.h",
|
||||||
"objc/Framework/Classes/RTCAudioTrack.mm",
|
"objc/Framework/Classes/RTCAudioTrack.mm",
|
||||||
"objc/Framework/Classes/RTCConfiguration+Private.h",
|
"objc/Framework/Classes/RTCConfiguration+Private.h",
|
||||||
@ -118,6 +120,9 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
"objc/Framework/Classes/RTCVideoSource.mm",
|
"objc/Framework/Classes/RTCVideoSource.mm",
|
||||||
"objc/Framework/Classes/RTCVideoTrack+Private.h",
|
"objc/Framework/Classes/RTCVideoTrack+Private.h",
|
||||||
"objc/Framework/Classes/RTCVideoTrack.mm",
|
"objc/Framework/Classes/RTCVideoTrack.mm",
|
||||||
|
"objc/Framework/Classes/avfoundationvideocapturer.h",
|
||||||
|
"objc/Framework/Classes/avfoundationvideocapturer.mm",
|
||||||
|
"objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
|
||||||
"objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
|
"objc/Framework/Headers/WebRTC/RTCAudioTrack.h",
|
||||||
"objc/Framework/Headers/WebRTC/RTCConfiguration.h",
|
"objc/Framework/Headers/WebRTC/RTCConfiguration.h",
|
||||||
"objc/Framework/Headers/WebRTC/RTCDataChannel.h",
|
"objc/Framework/Headers/WebRTC/RTCDataChannel.h",
|
||||||
@ -144,12 +149,7 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
|
|
||||||
if (is_ios) {
|
if (is_ios) {
|
||||||
sources += [
|
sources += [
|
||||||
"objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h",
|
|
||||||
"objc/Framework/Classes/RTCAVFoundationVideoSource.mm",
|
|
||||||
"objc/Framework/Classes/RTCEAGLVideoView.m",
|
"objc/Framework/Classes/RTCEAGLVideoView.m",
|
||||||
"objc/Framework/Classes/avfoundationvideocapturer.h",
|
|
||||||
"objc/Framework/Classes/avfoundationvideocapturer.mm",
|
|
||||||
"objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h",
|
|
||||||
"objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
|
"objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h",
|
||||||
]
|
]
|
||||||
libs = [
|
libs = [
|
||||||
@ -165,7 +165,10 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
"objc/Framework/Classes/RTCNSGLVideoView.m",
|
"objc/Framework/Classes/RTCNSGLVideoView.m",
|
||||||
"objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h",
|
"objc/Framework/Headers/WebRTC/RTCNSGLVideoView.h",
|
||||||
]
|
]
|
||||||
libs = [ "OpenGL.framework" ]
|
libs = [
|
||||||
|
"CoreMedia.framework",
|
||||||
|
"OpenGL.framework",
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
configs += [
|
configs += [
|
||||||
@ -181,7 +184,10 @@ if (is_ios || (is_mac && mac_deployment_target == "10.7")) {
|
|||||||
configs -= [ "//build/config/clang:find_bad_constructs" ]
|
configs -= [ "//build/config/clang:find_bad_constructs" ]
|
||||||
}
|
}
|
||||||
|
|
||||||
libs += [ "stdc++" ]
|
libs += [
|
||||||
|
"AVFoundation.framework",
|
||||||
|
"stdc++",
|
||||||
|
]
|
||||||
|
|
||||||
deps = [
|
deps = [
|
||||||
":rtc_sdk_common_objc",
|
":rtc_sdk_common_objc",
|
||||||
|
|||||||
@ -11,9 +11,7 @@
|
|||||||
#import "RTCPeerConnectionFactory+Private.h"
|
#import "RTCPeerConnectionFactory+Private.h"
|
||||||
|
|
||||||
#import "NSString+StdString.h"
|
#import "NSString+StdString.h"
|
||||||
#if defined(WEBRTC_IOS)
|
|
||||||
#import "RTCAVFoundationVideoSource+Private.h"
|
#import "RTCAVFoundationVideoSource+Private.h"
|
||||||
#endif
|
|
||||||
#import "RTCAudioTrack+Private.h"
|
#import "RTCAudioTrack+Private.h"
|
||||||
#import "RTCMediaStream+Private.h"
|
#import "RTCMediaStream+Private.h"
|
||||||
#import "RTCPeerConnection+Private.h"
|
#import "RTCPeerConnection+Private.h"
|
||||||
@ -54,12 +52,8 @@
|
|||||||
|
|
||||||
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
|
- (RTCAVFoundationVideoSource *)avFoundationVideoSourceWithConstraints:
|
||||||
(nullable RTCMediaConstraints *)constraints {
|
(nullable RTCMediaConstraints *)constraints {
|
||||||
#if defined(WEBRTC_IOS)
|
|
||||||
return [[RTCAVFoundationVideoSource alloc] initWithFactory:self
|
return [[RTCAVFoundationVideoSource alloc] initWithFactory:self
|
||||||
constraints:constraints];
|
constraints:constraints];
|
||||||
#else
|
|
||||||
return nil;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
|
- (RTCAudioTrack *)audioTrackWithTrackId:(NSString *)trackId {
|
||||||
|
|||||||
@ -12,7 +12,9 @@
|
|||||||
|
|
||||||
#import <AVFoundation/AVFoundation.h>
|
#import <AVFoundation/AVFoundation.h>
|
||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
#import <UIKit/UIKit.h>
|
#import <UIKit/UIKit.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#import "RTCDispatcher+Private.h"
|
#import "RTCDispatcher+Private.h"
|
||||||
#import "WebRTC/RTCLogging.h"
|
#import "WebRTC/RTCLogging.h"
|
||||||
@ -88,6 +90,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
return nil;
|
return nil;
|
||||||
}
|
}
|
||||||
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
NSNotificationCenter *center = [NSNotificationCenter defaultCenter];
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
[center addObserver:self
|
[center addObserver:self
|
||||||
selector:@selector(deviceOrientationDidChange:)
|
selector:@selector(deviceOrientationDidChange:)
|
||||||
name:UIDeviceOrientationDidChangeNotification
|
name:UIDeviceOrientationDidChangeNotification
|
||||||
@ -100,6 +103,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
selector:@selector(handleCaptureSessionInterruptionEnded:)
|
selector:@selector(handleCaptureSessionInterruptionEnded:)
|
||||||
name:AVCaptureSessionInterruptionEndedNotification
|
name:AVCaptureSessionInterruptionEndedNotification
|
||||||
object:_captureSession];
|
object:_captureSession];
|
||||||
|
#endif
|
||||||
[center addObserver:self
|
[center addObserver:self
|
||||||
selector:@selector(handleCaptureSessionRuntimeError:)
|
selector:@selector(handleCaptureSessionRuntimeError:)
|
||||||
name:AVCaptureSessionRuntimeErrorNotification
|
name:AVCaptureSessionRuntimeErrorNotification
|
||||||
@ -188,7 +192,9 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
block:^{
|
block:^{
|
||||||
_orientationHasChanged = NO;
|
_orientationHasChanged = NO;
|
||||||
[self updateOrientation];
|
[self updateOrientation];
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
||||||
|
#endif
|
||||||
AVCaptureSession *captureSession = self.captureSession;
|
AVCaptureSession *captureSession = self.captureSession;
|
||||||
[captureSession startRunning];
|
[captureSession startRunning];
|
||||||
}];
|
}];
|
||||||
@ -207,12 +213,15 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
block:^{
|
block:^{
|
||||||
[_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
|
[_videoDataOutput setSampleBufferDelegate:nil queue:nullptr];
|
||||||
[_captureSession stopRunning];
|
[_captureSession stopRunning];
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
||||||
|
#endif
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark iOS notifications
|
#pragma mark iOS notifications
|
||||||
|
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
- (void)deviceOrientationDidChange:(NSNotification *)notification {
|
||||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
@ -220,6 +229,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
[self updateOrientation];
|
[self updateOrientation];
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
|
#pragma mark AVCaptureVideoDataOutputSampleBufferDelegate
|
||||||
|
|
||||||
@ -273,16 +283,21 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
}
|
}
|
||||||
|
|
||||||
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
|
- (void)handleCaptureSessionRuntimeError:(NSNotification *)notification {
|
||||||
NSError *error = notification.userInfo[AVCaptureSessionErrorKey];
|
NSError *error =
|
||||||
|
[notification.userInfo objectForKey:AVCaptureSessionErrorKey];
|
||||||
RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
|
RTCLogError(@"Capture session runtime error: %@", error.localizedDescription);
|
||||||
|
|
||||||
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
block:^{
|
block:^{
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
if (error.code == AVErrorMediaServicesWereReset) {
|
if (error.code == AVErrorMediaServicesWereReset) {
|
||||||
[self handleNonFatalError];
|
[self handleNonFatalError];
|
||||||
} else {
|
} else {
|
||||||
[self handleFatalError];
|
[self handleFatalError];
|
||||||
}
|
}
|
||||||
|
#else
|
||||||
|
[self handleFatalError];
|
||||||
|
#endif
|
||||||
}];
|
}];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -402,8 +417,13 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
|
|
||||||
- (AVCaptureDeviceInput *)frontCameraInput {
|
- (AVCaptureDeviceInput *)frontCameraInput {
|
||||||
if (!_frontCameraInput) {
|
if (!_frontCameraInput) {
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
AVCaptureDevice *frontCameraDevice =
|
AVCaptureDevice *frontCameraDevice =
|
||||||
[self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
|
[self videoCaptureDeviceForPosition:AVCaptureDevicePositionFront];
|
||||||
|
#else
|
||||||
|
AVCaptureDevice *frontCameraDevice =
|
||||||
|
[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
|
||||||
|
#endif
|
||||||
if (!frontCameraDevice) {
|
if (!frontCameraDevice) {
|
||||||
RTCLogWarning(@"Failed to find front capture device.");
|
RTCLogWarning(@"Failed to find front capture device.");
|
||||||
return nil;
|
return nil;
|
||||||
@ -452,6 +472,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
// TODO(tkchin): set rotation bit on frames.
|
// TODO(tkchin): set rotation bit on frames.
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
#if TARGET_OS_IPHONE
|
||||||
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
|
AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;
|
||||||
switch ([UIDevice currentDevice].orientation) {
|
switch ([UIDevice currentDevice].orientation) {
|
||||||
case UIDeviceOrientationPortrait:
|
case UIDeviceOrientationPortrait:
|
||||||
@ -475,6 +496,7 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
connection.videoOrientation = orientation;
|
connection.videoOrientation = orientation;
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update the current session input to match what's stored in _useBackCamera.
|
// Update the current session input to match what's stored in _useBackCamera.
|
||||||
|
|||||||
@ -94,6 +94,11 @@
|
|||||||
],
|
],
|
||||||
},
|
},
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
|
'xcode_settings': {
|
||||||
|
'OTHER_LDFLAGS': [
|
||||||
|
'-framework AVFoundation',
|
||||||
|
],
|
||||||
|
},
|
||||||
'libraries': [
|
'libraries': [
|
||||||
'-lstdc++',
|
'-lstdc++',
|
||||||
],
|
],
|
||||||
@ -101,6 +106,8 @@
|
|||||||
'sources': [
|
'sources': [
|
||||||
'objc/Framework/Classes/RTCAudioTrack+Private.h',
|
'objc/Framework/Classes/RTCAudioTrack+Private.h',
|
||||||
'objc/Framework/Classes/RTCAudioTrack.mm',
|
'objc/Framework/Classes/RTCAudioTrack.mm',
|
||||||
|
'objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h',
|
||||||
|
'objc/Framework/Classes/RTCAVFoundationVideoSource.mm',
|
||||||
'objc/Framework/Classes/RTCConfiguration+Private.h',
|
'objc/Framework/Classes/RTCConfiguration+Private.h',
|
||||||
'objc/Framework/Classes/RTCConfiguration.mm',
|
'objc/Framework/Classes/RTCConfiguration.mm',
|
||||||
'objc/Framework/Classes/RTCDataChannel+Private.h',
|
'objc/Framework/Classes/RTCDataChannel+Private.h',
|
||||||
@ -148,7 +155,10 @@
|
|||||||
'objc/Framework/Classes/RTCVideoSource.mm',
|
'objc/Framework/Classes/RTCVideoSource.mm',
|
||||||
'objc/Framework/Classes/RTCVideoTrack+Private.h',
|
'objc/Framework/Classes/RTCVideoTrack+Private.h',
|
||||||
'objc/Framework/Classes/RTCVideoTrack.mm',
|
'objc/Framework/Classes/RTCVideoTrack.mm',
|
||||||
|
'objc/Framework/Classes/avfoundationvideocapturer.h',
|
||||||
|
'objc/Framework/Classes/avfoundationvideocapturer.mm',
|
||||||
'objc/Framework/Headers/WebRTC/RTCAudioTrack.h',
|
'objc/Framework/Headers/WebRTC/RTCAudioTrack.h',
|
||||||
|
'objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h',
|
||||||
'objc/Framework/Headers/WebRTC/RTCConfiguration.h',
|
'objc/Framework/Headers/WebRTC/RTCConfiguration.h',
|
||||||
'objc/Framework/Headers/WebRTC/RTCDataChannel.h',
|
'objc/Framework/Headers/WebRTC/RTCDataChannel.h',
|
||||||
'objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h',
|
'objc/Framework/Headers/WebRTC/RTCDataChannelConfiguration.h',
|
||||||
@ -174,12 +184,7 @@
|
|||||||
'conditions': [
|
'conditions': [
|
||||||
['OS=="ios"', {
|
['OS=="ios"', {
|
||||||
'sources': [
|
'sources': [
|
||||||
'objc/Framework/Classes/RTCAVFoundationVideoSource+Private.h',
|
|
||||||
'objc/Framework/Classes/RTCAVFoundationVideoSource.mm',
|
|
||||||
'objc/Framework/Classes/RTCEAGLVideoView.m',
|
'objc/Framework/Classes/RTCEAGLVideoView.m',
|
||||||
'objc/Framework/Classes/avfoundationvideocapturer.h',
|
|
||||||
'objc/Framework/Classes/avfoundationvideocapturer.mm',
|
|
||||||
'objc/Framework/Headers/WebRTC/RTCAVFoundationVideoSource.h',
|
|
||||||
'objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h',
|
'objc/Framework/Headers/WebRTC/RTCEAGLVideoView.h',
|
||||||
],
|
],
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
@ -201,6 +206,7 @@
|
|||||||
'link_settings': {
|
'link_settings': {
|
||||||
'xcode_settings': {
|
'xcode_settings': {
|
||||||
'OTHER_LDFLAGS': [
|
'OTHER_LDFLAGS': [
|
||||||
|
'-framework CoreMedia',
|
||||||
'-framework OpenGL',
|
'-framework OpenGL',
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
|
|||||||
Reference in New Issue
Block a user