Add new view that renders local video using AVCaptureLayerPreview.

BUG=

Review URL: https://codereview.webrtc.org/1497393002

Cr-Commit-Position: refs/heads/master@{#10940}
This commit is contained in:
haysc
2015-12-08 11:08:39 -08:00
committed by Commit bot
parent 70f9903e57
commit edd8fefa9b
13 changed files with 198 additions and 77 deletions

View File

@ -405,7 +405,6 @@ def _CommonChecks(input_api, output_api):
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
input_api, output_api))
results.extend(_CheckApprovedFilesLintClean(input_api, output_api))
results.extend(_CheckNativeApiHeaderChanges(input_api, output_api))
results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
results.extend(_CheckNoFRIEND_TEST(input_api, output_api))

View File

@ -33,6 +33,8 @@
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import "webrtc/base/objc/RTCDispatcher.h"
// TODO(tkchin): support other formats.
static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
static cricket::VideoFormat const kDefaultFormat =
@ -41,11 +43,6 @@ static cricket::VideoFormat const kDefaultFormat =
cricket::VideoFormat::FpsToInterval(30),
cricket::FOURCC_NV12);
// This queue is used to start and stop the capturer without blocking the
// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
// running.
static dispatch_queue_t kBackgroundQueue = nil;
// This class used to capture frames using AVFoundation APIs on iOS. It is meant
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
// because other webrtc objects own cricket::VideoCapturer, which is not
@ -80,15 +77,6 @@ static dispatch_queue_t kBackgroundQueue = nil;
@synthesize useBackCamera = _useBackCamera;
@synthesize isRunning = _isRunning;
+ (void)initialize {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
kBackgroundQueue = dispatch_queue_create(
"com.google.webrtc.RTCAVFoundationCapturerBackground",
DISPATCH_QUEUE_SERIAL);
});
}
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
NSParameterAssert(capturer);
if (self = [super init]) {
@ -132,9 +120,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
_orientationHasChanged = NO;
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
AVCaptureSession* session = _captureSession;
dispatch_async(kBackgroundQueue, ^{
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[session startRunning];
});
}];
_isRunning = YES;
}
@ -144,9 +133,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
}
[_videoOutput setSampleBufferDelegate:nil queue:nullptr];
AVCaptureSession* session = _captureSession;
dispatch_async(kBackgroundQueue, ^{
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
[session stopRunning];
});
}];
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
_isRunning = NO;
}

View File

@ -368,6 +368,9 @@
'app/webrtc/objc/public/RTCEAGLVideoView.h',
'app/webrtc/objc/public/RTCAVFoundationVideoSource.h',
],
'dependencies': [
'<(webrtc_root)/base/base.gyp:rtc_base_objc',
],
'link_settings': {
'xcode_settings': {
'OTHER_LDFLAGS': [

View File

@ -616,6 +616,10 @@ if (is_ios) {
public_configs = [ "..:common_inherited_config" ]
sources = [
"objc/RTCCameraPreviewView.h",
"objc/RTCCameraPreviewView.m",
"objc/RTCDispatcher.h",
"objc/RTCDispatcher.m",
"objc/RTCLogging.h",
"objc/RTCLogging.mm",
]

View File

@ -33,8 +33,12 @@
'rtc_base',
],
'sources': [
'objc/RTCCameraPreviewView.h',
'objc/RTCCameraPreviewView.m',
'objc/RTCDispatcher.h',
'objc/RTCDispatcher.m',
'objc/RTCLogging.h',
'objc/RTCLogging.mm'
'objc/RTCLogging.mm',
],
'xcode_settings': {
'CLANG_ENABLE_OBJC_ARC': 'YES',

View File

@ -0,0 +1,28 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@class AVCaptureSession;
@class RTCAVFoundationVideoSource;
/** RTCCameraPreviewView is a view that renders local video from an
* AVCaptureSession.
*/
@interface RTCCameraPreviewView : UIView
/** The capture session being rendered in the view. Capture session
* is assigned to AVCaptureVideoPreviewLayer async in the same
* queue that the AVCaptureSession is started/stopped.
*/
@property(nonatomic, strong) AVCaptureSession *captureSession;
@end

View File

@ -0,0 +1,47 @@
/*
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#if !defined(__has_feature) || !__has_feature(objc_arc)
#error "This file requires ARC support."
#endif
#import "webrtc/base/objc/RTCCameraPreviewView.h"
#import <AVFoundation/AVFoundation.h>
#import "webrtc/base/objc/RTCDispatcher.h"
@implementation RTCCameraPreviewView
@synthesize captureSession = _captureSession;
+ (Class)layerClass {
return [AVCaptureVideoPreviewLayer class];
}
- (void)setCaptureSession:(AVCaptureSession *)captureSession {
if (_captureSession == captureSession) {
return;
}
_captureSession = captureSession;
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
block:^{
previewLayer.session = captureSession;
}];
}
#pragma mark - Private
- (AVCaptureVideoPreviewLayer *)previewLayer {
return (AVCaptureVideoPreviewLayer *)self.layer;
}
@end

View File

@ -0,0 +1,35 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import <Foundation/Foundation.h>
typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
// Main dispatcher queue.
RTCDispatcherTypeMain,
// Used for starting/stopping AVCaptureSession, and assigning
// capture session to AVCaptureVideoPreviewLayer.
RTCDispatcherTypeCaptureSession,
};
/** Dispatcher that asynchronously dispatches blocks to a specific
* shared dispatch queue.
*/
@interface RTCDispatcher : NSObject
- (instancetype)init NS_UNAVAILABLE;
/** Dispatch the block asynchronously on the queue for dispatchType.
* @param dispatchType The queue type to dispatch on.
* @param block The block to dispatch asynchronously.
*/
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
block:(dispatch_block_t)block;
@end

View File

@ -0,0 +1,46 @@
/*
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#import "RTCDispatcher.h"
static dispatch_queue_t kCaptureSessionQueue = nil;
@implementation RTCDispatcher {
dispatch_queue_t _captureSessionQueue;
}
+ (void)initialize {
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
kCaptureSessionQueue = dispatch_queue_create(
"org.webrtc.RTCDispatcherCaptureSession",
DISPATCH_QUEUE_SERIAL);
});
}
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
block:(dispatch_block_t)block {
dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
dispatch_async(queue, block);
}
#pragma mark - Private
+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
switch (dispatchType) {
case RTCDispatcherTypeMain:
return dispatch_get_main_queue();
case RTCDispatcherTypeCaptureSession:
return kCaptureSessionQueue;
}
}
@end

View File

@ -21,15 +21,8 @@ static CGFloat const kRoomTextFieldMargin = 8;
static CGFloat const kCallControlMargin = 8;
static CGFloat const kAppLabelHeight = 20;
@class ARDRoomTextField;
@protocol ARDRoomTextFieldDelegate <NSObject>
- (void)roomTextField:(ARDRoomTextField *)roomTextField
didInputRoom:(NSString *)room;
@end
// Helper view that contains a text field and a clear button.
@interface ARDRoomTextField : UIView <UITextFieldDelegate>
@property(nonatomic, weak) id<ARDRoomTextFieldDelegate> delegate;
@property(nonatomic, readonly) NSString *roomText;
@end
@ -38,14 +31,14 @@ static CGFloat const kAppLabelHeight = 20;
UIButton *_clearButton;
}
@synthesize delegate = _delegate;
- (instancetype)initWithFrame:(CGRect)frame {
if (self = [super initWithFrame:frame]) {
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
_roomText.borderStyle = UITextBorderStyleNone;
_roomText.font = [UIFont fontWithName:@"Roboto" size:12];
_roomText.placeholder = @"Room name";
_roomText.autocorrectionType = UITextAutocorrectionTypeNo;
_roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
_roomText.delegate = self;
[_roomText addTarget:self
action:@selector(textFieldDidChange:)
@ -96,10 +89,6 @@ static CGFloat const kAppLabelHeight = 20;
#pragma mark - UITextFieldDelegate
- (void)textFieldDidEndEditing:(UITextField *)textField {
[_delegate roomTextField:self didInputRoom:textField.text];
}
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
// There is no other control that can take focus, so manually resign focus
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
@ -125,9 +114,6 @@ static CGFloat const kAppLabelHeight = 20;
@end
@interface ARDMainView () <ARDRoomTextFieldDelegate>
@end
@implementation ARDMainView {
UILabel *_appLabel;
ARDRoomTextField *_roomText;
@ -151,7 +137,6 @@ static CGFloat const kAppLabelHeight = 20;
[self addSubview:_appLabel];
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
_roomText.delegate = self;
[self addSubview:_roomText];
UIFont *controlFont = [UIFont fontWithName:@"Roboto" size:20];
@ -260,16 +245,6 @@ static CGFloat const kAppLabelHeight = 20;
_startCallButton.frame.size.height);
}
#pragma mark - ARDRoomTextFieldDelegate
- (void)roomTextField:(ARDRoomTextField *)roomTextField
didInputRoom:(NSString *)room {
[_delegate mainView:self
didInputRoom:room
isLoopback:NO
isAudioOnly:_audioOnlySwitch.isOn];
}
#pragma mark - Private
- (void)onStartCall:(id)sender {

View File

@ -10,6 +10,7 @@
#import <UIKit/UIKit.h>
#import "webrtc/base/objc/RTCCameraPreviewView.h"
#import "RTCEAGLVideoView.h"
#import "ARDStatsView.h"
@ -33,7 +34,7 @@
@interface ARDVideoCallView : UIView
@property(nonatomic, readonly) UILabel *statusLabel;
@property(nonatomic, readonly) RTCEAGLVideoView *localVideoView;
@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
@property(nonatomic, readonly) RTCEAGLVideoView *remoteVideoView;
@property(nonatomic, readonly) ARDStatsView *statsView;
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;

View File

@ -25,7 +25,6 @@ static CGFloat const kStatusBarHeight = 20;
@implementation ARDVideoCallView {
UIButton *_cameraSwitchButton;
UIButton *_hangupButton;
CGSize _localVideoSize;
CGSize _remoteVideoSize;
BOOL _useRearCamera;
}
@ -42,10 +41,7 @@ static CGFloat const kStatusBarHeight = 20;
_remoteVideoView.delegate = self;
[self addSubview:_remoteVideoView];
// TODO(tkchin): replace this with a view that renders layer from
// AVCaptureSession.
_localVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
_localVideoView.delegate = self;
_localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
[self addSubview:_localVideoView];
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
@ -114,22 +110,15 @@ static CGFloat const kStatusBarHeight = 20;
_remoteVideoView.frame = bounds;
}
if (_localVideoSize.width && _localVideoSize.height > 0) {
// Aspect fit local video view into a square box.
CGRect localVideoFrame =
CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
localVideoFrame =
AVMakeRectWithAspectRatioInsideRect(_localVideoSize, localVideoFrame);
// Place the view in the bottom right.
localVideoFrame.origin.x = CGRectGetMaxX(bounds)
- localVideoFrame.size.width - kLocalVideoViewPadding;
localVideoFrame.origin.y = CGRectGetMaxY(bounds)
- localVideoFrame.size.height - kLocalVideoViewPadding;
_localVideoView.frame = localVideoFrame;
} else {
_localVideoView.frame = bounds;
}
// Place stats at the top.
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
@ -159,10 +148,7 @@ static CGFloat const kStatusBarHeight = 20;
#pragma mark - RTCEAGLVideoViewDelegate
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
if (videoView == _localVideoView) {
_localVideoSize = size;
_localVideoView.hidden = CGSizeEqualToSize(CGSizeZero, _localVideoSize);
} else if (videoView == _remoteVideoView) {
if (videoView == _remoteVideoView) {
_remoteVideoSize = size;
}
[self setNeedsLayout];

View File

@ -128,18 +128,21 @@
if (_localVideoTrack == localVideoTrack) {
return;
}
[_localVideoTrack removeRenderer:_videoCallView.localVideoView];
_localVideoTrack = nil;
[_videoCallView.localVideoView renderFrame:nil];
_localVideoTrack = localVideoTrack;
[_localVideoTrack addRenderer:_videoCallView.localVideoView];
RTCAVFoundationVideoSource *source = nil;
if ([localVideoTrack.source
isKindOfClass:[RTCAVFoundationVideoSource class]]) {
source = (RTCAVFoundationVideoSource*)localVideoTrack.source;
}
_videoCallView.localVideoView.captureSession = source.captureSession;
}
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
if (_remoteVideoTrack == remoteVideoTrack) {
return;
}
[_remoteVideoTrack removeRenderer:_videoCallView.localVideoView];
[_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
_remoteVideoTrack = nil;
[_videoCallView.remoteVideoView renderFrame:nil];
_remoteVideoTrack = remoteVideoTrack;