Add new view that renders local video using AVCaptureLayerPreview.
BUG= Review URL: https://codereview.webrtc.org/1497393002 Cr-Commit-Position: refs/heads/master@{#10940}
This commit is contained in:
@ -405,7 +405,6 @@ def _CommonChecks(input_api, output_api):
|
|||||||
input_api, output_api))
|
input_api, output_api))
|
||||||
results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
|
results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
|
||||||
input_api, output_api))
|
input_api, output_api))
|
||||||
results.extend(_CheckApprovedFilesLintClean(input_api, output_api))
|
|
||||||
results.extend(_CheckNativeApiHeaderChanges(input_api, output_api))
|
results.extend(_CheckNativeApiHeaderChanges(input_api, output_api))
|
||||||
results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
|
results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
|
||||||
results.extend(_CheckNoFRIEND_TEST(input_api, output_api))
|
results.extend(_CheckNoFRIEND_TEST(input_api, output_api))
|
||||||
|
@ -33,6 +33,8 @@
|
|||||||
#import <Foundation/Foundation.h>
|
#import <Foundation/Foundation.h>
|
||||||
#import <UIKit/UIKit.h>
|
#import <UIKit/UIKit.h>
|
||||||
|
|
||||||
|
#import "webrtc/base/objc/RTCDispatcher.h"
|
||||||
|
|
||||||
// TODO(tkchin): support other formats.
|
// TODO(tkchin): support other formats.
|
||||||
static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
|
static NSString* const kDefaultPreset = AVCaptureSessionPreset640x480;
|
||||||
static cricket::VideoFormat const kDefaultFormat =
|
static cricket::VideoFormat const kDefaultFormat =
|
||||||
@ -41,11 +43,6 @@ static cricket::VideoFormat const kDefaultFormat =
|
|||||||
cricket::VideoFormat::FpsToInterval(30),
|
cricket::VideoFormat::FpsToInterval(30),
|
||||||
cricket::FOURCC_NV12);
|
cricket::FOURCC_NV12);
|
||||||
|
|
||||||
// This queue is used to start and stop the capturer without blocking the
|
|
||||||
// calling thread. -[AVCaptureSession startRunning] blocks until the camera is
|
|
||||||
// running.
|
|
||||||
static dispatch_queue_t kBackgroundQueue = nil;
|
|
||||||
|
|
||||||
// This class used to capture frames using AVFoundation APIs on iOS. It is meant
|
// This class used to capture frames using AVFoundation APIs on iOS. It is meant
|
||||||
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
|
// to be owned by an instance of AVFoundationVideoCapturer. The reason for this
|
||||||
// because other webrtc objects own cricket::VideoCapturer, which is not
|
// because other webrtc objects own cricket::VideoCapturer, which is not
|
||||||
@ -80,15 +77,6 @@ static dispatch_queue_t kBackgroundQueue = nil;
|
|||||||
@synthesize useBackCamera = _useBackCamera;
|
@synthesize useBackCamera = _useBackCamera;
|
||||||
@synthesize isRunning = _isRunning;
|
@synthesize isRunning = _isRunning;
|
||||||
|
|
||||||
+ (void)initialize {
|
|
||||||
static dispatch_once_t onceToken;
|
|
||||||
dispatch_once(&onceToken, ^{
|
|
||||||
kBackgroundQueue = dispatch_queue_create(
|
|
||||||
"com.google.webrtc.RTCAVFoundationCapturerBackground",
|
|
||||||
DISPATCH_QUEUE_SERIAL);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
|
- (instancetype)initWithCapturer:(webrtc::AVFoundationVideoCapturer*)capturer {
|
||||||
NSParameterAssert(capturer);
|
NSParameterAssert(capturer);
|
||||||
if (self = [super init]) {
|
if (self = [super init]) {
|
||||||
@ -132,9 +120,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
|
|||||||
_orientationHasChanged = NO;
|
_orientationHasChanged = NO;
|
||||||
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
[[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications];
|
||||||
AVCaptureSession* session = _captureSession;
|
AVCaptureSession* session = _captureSession;
|
||||||
dispatch_async(kBackgroundQueue, ^{
|
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
|
block:^{
|
||||||
[session startRunning];
|
[session startRunning];
|
||||||
});
|
}];
|
||||||
_isRunning = YES;
|
_isRunning = YES;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -144,9 +133,10 @@ static dispatch_queue_t kBackgroundQueue = nil;
|
|||||||
}
|
}
|
||||||
[_videoOutput setSampleBufferDelegate:nil queue:nullptr];
|
[_videoOutput setSampleBufferDelegate:nil queue:nullptr];
|
||||||
AVCaptureSession* session = _captureSession;
|
AVCaptureSession* session = _captureSession;
|
||||||
dispatch_async(kBackgroundQueue, ^{
|
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
|
block:^{
|
||||||
[session stopRunning];
|
[session stopRunning];
|
||||||
});
|
}];
|
||||||
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
[[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications];
|
||||||
_isRunning = NO;
|
_isRunning = NO;
|
||||||
}
|
}
|
||||||
|
@ -368,6 +368,9 @@
|
|||||||
'app/webrtc/objc/public/RTCEAGLVideoView.h',
|
'app/webrtc/objc/public/RTCEAGLVideoView.h',
|
||||||
'app/webrtc/objc/public/RTCAVFoundationVideoSource.h',
|
'app/webrtc/objc/public/RTCAVFoundationVideoSource.h',
|
||||||
],
|
],
|
||||||
|
'dependencies': [
|
||||||
|
'<(webrtc_root)/base/base.gyp:rtc_base_objc',
|
||||||
|
],
|
||||||
'link_settings': {
|
'link_settings': {
|
||||||
'xcode_settings': {
|
'xcode_settings': {
|
||||||
'OTHER_LDFLAGS': [
|
'OTHER_LDFLAGS': [
|
||||||
|
@ -616,6 +616,10 @@ if (is_ios) {
|
|||||||
public_configs = [ "..:common_inherited_config" ]
|
public_configs = [ "..:common_inherited_config" ]
|
||||||
|
|
||||||
sources = [
|
sources = [
|
||||||
|
"objc/RTCCameraPreviewView.h",
|
||||||
|
"objc/RTCCameraPreviewView.m",
|
||||||
|
"objc/RTCDispatcher.h",
|
||||||
|
"objc/RTCDispatcher.m",
|
||||||
"objc/RTCLogging.h",
|
"objc/RTCLogging.h",
|
||||||
"objc/RTCLogging.mm",
|
"objc/RTCLogging.mm",
|
||||||
]
|
]
|
||||||
|
@ -33,8 +33,12 @@
|
|||||||
'rtc_base',
|
'rtc_base',
|
||||||
],
|
],
|
||||||
'sources': [
|
'sources': [
|
||||||
|
'objc/RTCCameraPreviewView.h',
|
||||||
|
'objc/RTCCameraPreviewView.m',
|
||||||
|
'objc/RTCDispatcher.h',
|
||||||
|
'objc/RTCDispatcher.m',
|
||||||
'objc/RTCLogging.h',
|
'objc/RTCLogging.h',
|
||||||
'objc/RTCLogging.mm'
|
'objc/RTCLogging.mm',
|
||||||
],
|
],
|
||||||
'xcode_settings': {
|
'xcode_settings': {
|
||||||
'CLANG_ENABLE_OBJC_ARC': 'YES',
|
'CLANG_ENABLE_OBJC_ARC': 'YES',
|
||||||
|
28
webrtc/base/objc/RTCCameraPreviewView.h
Normal file
28
webrtc/base/objc/RTCCameraPreviewView.h
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
#import <UIKit/UIKit.h>
|
||||||
|
|
||||||
|
@class AVCaptureSession;
|
||||||
|
@class RTCAVFoundationVideoSource;
|
||||||
|
|
||||||
|
/** RTCCameraPreviewView is a view that renders local video from an
|
||||||
|
* AVCaptureSession.
|
||||||
|
*/
|
||||||
|
@interface RTCCameraPreviewView : UIView
|
||||||
|
|
||||||
|
/** The capture session being rendered in the view. Capture session
|
||||||
|
* is assigned to AVCaptureVideoPreviewLayer async in the same
|
||||||
|
* queue that the AVCaptureSession is started/stopped.
|
||||||
|
*/
|
||||||
|
@property(nonatomic, strong) AVCaptureSession *captureSession;
|
||||||
|
|
||||||
|
@end
|
47
webrtc/base/objc/RTCCameraPreviewView.m
Normal file
47
webrtc/base/objc/RTCCameraPreviewView.m
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 The WebRTC Project Authors. All rights reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#if !defined(__has_feature) || !__has_feature(objc_arc)
|
||||||
|
#error "This file requires ARC support."
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#import "webrtc/base/objc/RTCCameraPreviewView.h"
|
||||||
|
|
||||||
|
#import <AVFoundation/AVFoundation.h>
|
||||||
|
|
||||||
|
#import "webrtc/base/objc/RTCDispatcher.h"
|
||||||
|
|
||||||
|
@implementation RTCCameraPreviewView
|
||||||
|
|
||||||
|
@synthesize captureSession = _captureSession;
|
||||||
|
|
||||||
|
+ (Class)layerClass {
|
||||||
|
return [AVCaptureVideoPreviewLayer class];
|
||||||
|
}
|
||||||
|
|
||||||
|
- (void)setCaptureSession:(AVCaptureSession *)captureSession {
|
||||||
|
if (_captureSession == captureSession) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
_captureSession = captureSession;
|
||||||
|
AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer];
|
||||||
|
[RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession
|
||||||
|
block:^{
|
||||||
|
previewLayer.session = captureSession;
|
||||||
|
}];
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
- (AVCaptureVideoPreviewLayer *)previewLayer {
|
||||||
|
return (AVCaptureVideoPreviewLayer *)self.layer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
35
webrtc/base/objc/RTCDispatcher.h
Normal file
35
webrtc/base/objc/RTCDispatcher.h
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import <Foundation/Foundation.h>
|
||||||
|
|
||||||
|
typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) {
|
||||||
|
// Main dispatcher queue.
|
||||||
|
RTCDispatcherTypeMain,
|
||||||
|
// Used for starting/stopping AVCaptureSession, and assigning
|
||||||
|
// capture session to AVCaptureVideoPreviewLayer.
|
||||||
|
RTCDispatcherTypeCaptureSession,
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Dispatcher that asynchronously dispatches blocks to a specific
|
||||||
|
* shared dispatch queue.
|
||||||
|
*/
|
||||||
|
@interface RTCDispatcher : NSObject
|
||||||
|
|
||||||
|
- (instancetype)init NS_UNAVAILABLE;
|
||||||
|
|
||||||
|
/** Dispatch the block asynchronously on the queue for dispatchType.
|
||||||
|
* @param dispatchType The queue type to dispatch on.
|
||||||
|
* @param block The block to dispatch asynchronously.
|
||||||
|
*/
|
||||||
|
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
|
||||||
|
block:(dispatch_block_t)block;
|
||||||
|
|
||||||
|
@end
|
46
webrtc/base/objc/RTCDispatcher.m
Normal file
46
webrtc/base/objc/RTCDispatcher.m
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
/*
|
||||||
|
* Copyright 2015 The WebRTC project authors. All Rights Reserved.
|
||||||
|
*
|
||||||
|
* Use of this source code is governed by a BSD-style license
|
||||||
|
* that can be found in the LICENSE file in the root of the source
|
||||||
|
* tree. An additional intellectual property rights grant can be found
|
||||||
|
* in the file PATENTS. All contributing project authors may
|
||||||
|
* be found in the AUTHORS file in the root of the source tree.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#import "RTCDispatcher.h"
|
||||||
|
|
||||||
|
static dispatch_queue_t kCaptureSessionQueue = nil;
|
||||||
|
|
||||||
|
@implementation RTCDispatcher {
|
||||||
|
dispatch_queue_t _captureSessionQueue;
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (void)initialize {
|
||||||
|
static dispatch_once_t onceToken;
|
||||||
|
dispatch_once(&onceToken, ^{
|
||||||
|
kCaptureSessionQueue = dispatch_queue_create(
|
||||||
|
"org.webrtc.RTCDispatcherCaptureSession",
|
||||||
|
DISPATCH_QUEUE_SERIAL);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType
|
||||||
|
block:(dispatch_block_t)block {
|
||||||
|
dispatch_queue_t queue = [self dispatchQueueForType:dispatchType];
|
||||||
|
dispatch_async(queue, block);
|
||||||
|
}
|
||||||
|
|
||||||
|
#pragma mark - Private
|
||||||
|
|
||||||
|
+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType {
|
||||||
|
switch (dispatchType) {
|
||||||
|
case RTCDispatcherTypeMain:
|
||||||
|
return dispatch_get_main_queue();
|
||||||
|
case RTCDispatcherTypeCaptureSession:
|
||||||
|
return kCaptureSessionQueue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@end
|
||||||
|
|
@ -21,15 +21,8 @@ static CGFloat const kRoomTextFieldMargin = 8;
|
|||||||
static CGFloat const kCallControlMargin = 8;
|
static CGFloat const kCallControlMargin = 8;
|
||||||
static CGFloat const kAppLabelHeight = 20;
|
static CGFloat const kAppLabelHeight = 20;
|
||||||
|
|
||||||
@class ARDRoomTextField;
|
|
||||||
@protocol ARDRoomTextFieldDelegate <NSObject>
|
|
||||||
- (void)roomTextField:(ARDRoomTextField *)roomTextField
|
|
||||||
didInputRoom:(NSString *)room;
|
|
||||||
@end
|
|
||||||
|
|
||||||
// Helper view that contains a text field and a clear button.
|
// Helper view that contains a text field and a clear button.
|
||||||
@interface ARDRoomTextField : UIView <UITextFieldDelegate>
|
@interface ARDRoomTextField : UIView <UITextFieldDelegate>
|
||||||
@property(nonatomic, weak) id<ARDRoomTextFieldDelegate> delegate;
|
|
||||||
@property(nonatomic, readonly) NSString *roomText;
|
@property(nonatomic, readonly) NSString *roomText;
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@ -38,14 +31,14 @@ static CGFloat const kAppLabelHeight = 20;
|
|||||||
UIButton *_clearButton;
|
UIButton *_clearButton;
|
||||||
}
|
}
|
||||||
|
|
||||||
@synthesize delegate = _delegate;
|
|
||||||
|
|
||||||
- (instancetype)initWithFrame:(CGRect)frame {
|
- (instancetype)initWithFrame:(CGRect)frame {
|
||||||
if (self = [super initWithFrame:frame]) {
|
if (self = [super initWithFrame:frame]) {
|
||||||
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
|
_roomText = [[UITextField alloc] initWithFrame:CGRectZero];
|
||||||
_roomText.borderStyle = UITextBorderStyleNone;
|
_roomText.borderStyle = UITextBorderStyleNone;
|
||||||
_roomText.font = [UIFont fontWithName:@"Roboto" size:12];
|
_roomText.font = [UIFont fontWithName:@"Roboto" size:12];
|
||||||
_roomText.placeholder = @"Room name";
|
_roomText.placeholder = @"Room name";
|
||||||
|
_roomText.autocorrectionType = UITextAutocorrectionTypeNo;
|
||||||
|
_roomText.autocapitalizationType = UITextAutocapitalizationTypeNone;
|
||||||
_roomText.delegate = self;
|
_roomText.delegate = self;
|
||||||
[_roomText addTarget:self
|
[_roomText addTarget:self
|
||||||
action:@selector(textFieldDidChange:)
|
action:@selector(textFieldDidChange:)
|
||||||
@ -96,10 +89,6 @@ static CGFloat const kAppLabelHeight = 20;
|
|||||||
|
|
||||||
#pragma mark - UITextFieldDelegate
|
#pragma mark - UITextFieldDelegate
|
||||||
|
|
||||||
- (void)textFieldDidEndEditing:(UITextField *)textField {
|
|
||||||
[_delegate roomTextField:self didInputRoom:textField.text];
|
|
||||||
}
|
|
||||||
|
|
||||||
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
|
- (BOOL)textFieldShouldReturn:(UITextField *)textField {
|
||||||
// There is no other control that can take focus, so manually resign focus
|
// There is no other control that can take focus, so manually resign focus
|
||||||
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
|
// when return (Join) is pressed to trigger |textFieldDidEndEditing|.
|
||||||
@ -125,9 +114,6 @@ static CGFloat const kAppLabelHeight = 20;
|
|||||||
|
|
||||||
@end
|
@end
|
||||||
|
|
||||||
@interface ARDMainView () <ARDRoomTextFieldDelegate>
|
|
||||||
@end
|
|
||||||
|
|
||||||
@implementation ARDMainView {
|
@implementation ARDMainView {
|
||||||
UILabel *_appLabel;
|
UILabel *_appLabel;
|
||||||
ARDRoomTextField *_roomText;
|
ARDRoomTextField *_roomText;
|
||||||
@ -151,7 +137,6 @@ static CGFloat const kAppLabelHeight = 20;
|
|||||||
[self addSubview:_appLabel];
|
[self addSubview:_appLabel];
|
||||||
|
|
||||||
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
|
_roomText = [[ARDRoomTextField alloc] initWithFrame:CGRectZero];
|
||||||
_roomText.delegate = self;
|
|
||||||
[self addSubview:_roomText];
|
[self addSubview:_roomText];
|
||||||
|
|
||||||
UIFont *controlFont = [UIFont fontWithName:@"Roboto" size:20];
|
UIFont *controlFont = [UIFont fontWithName:@"Roboto" size:20];
|
||||||
@ -260,16 +245,6 @@ static CGFloat const kAppLabelHeight = 20;
|
|||||||
_startCallButton.frame.size.height);
|
_startCallButton.frame.size.height);
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma mark - ARDRoomTextFieldDelegate
|
|
||||||
|
|
||||||
- (void)roomTextField:(ARDRoomTextField *)roomTextField
|
|
||||||
didInputRoom:(NSString *)room {
|
|
||||||
[_delegate mainView:self
|
|
||||||
didInputRoom:room
|
|
||||||
isLoopback:NO
|
|
||||||
isAudioOnly:_audioOnlySwitch.isOn];
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma mark - Private
|
#pragma mark - Private
|
||||||
|
|
||||||
- (void)onStartCall:(id)sender {
|
- (void)onStartCall:(id)sender {
|
||||||
|
@ -10,6 +10,7 @@
|
|||||||
|
|
||||||
#import <UIKit/UIKit.h>
|
#import <UIKit/UIKit.h>
|
||||||
|
|
||||||
|
#import "webrtc/base/objc/RTCCameraPreviewView.h"
|
||||||
#import "RTCEAGLVideoView.h"
|
#import "RTCEAGLVideoView.h"
|
||||||
|
|
||||||
#import "ARDStatsView.h"
|
#import "ARDStatsView.h"
|
||||||
@ -33,7 +34,7 @@
|
|||||||
@interface ARDVideoCallView : UIView
|
@interface ARDVideoCallView : UIView
|
||||||
|
|
||||||
@property(nonatomic, readonly) UILabel *statusLabel;
|
@property(nonatomic, readonly) UILabel *statusLabel;
|
||||||
@property(nonatomic, readonly) RTCEAGLVideoView *localVideoView;
|
@property(nonatomic, readonly) RTCCameraPreviewView *localVideoView;
|
||||||
@property(nonatomic, readonly) RTCEAGLVideoView *remoteVideoView;
|
@property(nonatomic, readonly) RTCEAGLVideoView *remoteVideoView;
|
||||||
@property(nonatomic, readonly) ARDStatsView *statsView;
|
@property(nonatomic, readonly) ARDStatsView *statsView;
|
||||||
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
|
@property(nonatomic, weak) id<ARDVideoCallViewDelegate> delegate;
|
||||||
|
@ -25,7 +25,6 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
@implementation ARDVideoCallView {
|
@implementation ARDVideoCallView {
|
||||||
UIButton *_cameraSwitchButton;
|
UIButton *_cameraSwitchButton;
|
||||||
UIButton *_hangupButton;
|
UIButton *_hangupButton;
|
||||||
CGSize _localVideoSize;
|
|
||||||
CGSize _remoteVideoSize;
|
CGSize _remoteVideoSize;
|
||||||
BOOL _useRearCamera;
|
BOOL _useRearCamera;
|
||||||
}
|
}
|
||||||
@ -42,10 +41,7 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
_remoteVideoView.delegate = self;
|
_remoteVideoView.delegate = self;
|
||||||
[self addSubview:_remoteVideoView];
|
[self addSubview:_remoteVideoView];
|
||||||
|
|
||||||
// TODO(tkchin): replace this with a view that renders layer from
|
_localVideoView = [[RTCCameraPreviewView alloc] initWithFrame:CGRectZero];
|
||||||
// AVCaptureSession.
|
|
||||||
_localVideoView = [[RTCEAGLVideoView alloc] initWithFrame:CGRectZero];
|
|
||||||
_localVideoView.delegate = self;
|
|
||||||
[self addSubview:_localVideoView];
|
[self addSubview:_localVideoView];
|
||||||
|
|
||||||
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
|
_statsView = [[ARDStatsView alloc] initWithFrame:CGRectZero];
|
||||||
@ -114,22 +110,15 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
_remoteVideoView.frame = bounds;
|
_remoteVideoView.frame = bounds;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_localVideoSize.width && _localVideoSize.height > 0) {
|
// Aspect fit local video view into a square box.
|
||||||
// Aspect fit local video view into a square box.
|
CGRect localVideoFrame =
|
||||||
CGRect localVideoFrame =
|
CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
|
||||||
CGRectMake(0, 0, kLocalVideoViewSize, kLocalVideoViewSize);
|
// Place the view in the bottom right.
|
||||||
localVideoFrame =
|
localVideoFrame.origin.x = CGRectGetMaxX(bounds)
|
||||||
AVMakeRectWithAspectRatioInsideRect(_localVideoSize, localVideoFrame);
|
- localVideoFrame.size.width - kLocalVideoViewPadding;
|
||||||
|
localVideoFrame.origin.y = CGRectGetMaxY(bounds)
|
||||||
// Place the view in the bottom right.
|
- localVideoFrame.size.height - kLocalVideoViewPadding;
|
||||||
localVideoFrame.origin.x = CGRectGetMaxX(bounds)
|
_localVideoView.frame = localVideoFrame;
|
||||||
- localVideoFrame.size.width - kLocalVideoViewPadding;
|
|
||||||
localVideoFrame.origin.y = CGRectGetMaxY(bounds)
|
|
||||||
- localVideoFrame.size.height - kLocalVideoViewPadding;
|
|
||||||
_localVideoView.frame = localVideoFrame;
|
|
||||||
} else {
|
|
||||||
_localVideoView.frame = bounds;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Place stats at the top.
|
// Place stats at the top.
|
||||||
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
|
CGSize statsSize = [_statsView sizeThatFits:bounds.size];
|
||||||
@ -159,10 +148,7 @@ static CGFloat const kStatusBarHeight = 20;
|
|||||||
#pragma mark - RTCEAGLVideoViewDelegate
|
#pragma mark - RTCEAGLVideoViewDelegate
|
||||||
|
|
||||||
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
|
- (void)videoView:(RTCEAGLVideoView*)videoView didChangeVideoSize:(CGSize)size {
|
||||||
if (videoView == _localVideoView) {
|
if (videoView == _remoteVideoView) {
|
||||||
_localVideoSize = size;
|
|
||||||
_localVideoView.hidden = CGSizeEqualToSize(CGSizeZero, _localVideoSize);
|
|
||||||
} else if (videoView == _remoteVideoView) {
|
|
||||||
_remoteVideoSize = size;
|
_remoteVideoSize = size;
|
||||||
}
|
}
|
||||||
[self setNeedsLayout];
|
[self setNeedsLayout];
|
||||||
|
@ -128,18 +128,21 @@
|
|||||||
if (_localVideoTrack == localVideoTrack) {
|
if (_localVideoTrack == localVideoTrack) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
[_localVideoTrack removeRenderer:_videoCallView.localVideoView];
|
|
||||||
_localVideoTrack = nil;
|
_localVideoTrack = nil;
|
||||||
[_videoCallView.localVideoView renderFrame:nil];
|
|
||||||
_localVideoTrack = localVideoTrack;
|
_localVideoTrack = localVideoTrack;
|
||||||
[_localVideoTrack addRenderer:_videoCallView.localVideoView];
|
RTCAVFoundationVideoSource *source = nil;
|
||||||
|
if ([localVideoTrack.source
|
||||||
|
isKindOfClass:[RTCAVFoundationVideoSource class]]) {
|
||||||
|
source = (RTCAVFoundationVideoSource*)localVideoTrack.source;
|
||||||
|
}
|
||||||
|
_videoCallView.localVideoView.captureSession = source.captureSession;
|
||||||
}
|
}
|
||||||
|
|
||||||
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
|
- (void)setRemoteVideoTrack:(RTCVideoTrack *)remoteVideoTrack {
|
||||||
if (_remoteVideoTrack == remoteVideoTrack) {
|
if (_remoteVideoTrack == remoteVideoTrack) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
[_remoteVideoTrack removeRenderer:_videoCallView.localVideoView];
|
[_remoteVideoTrack removeRenderer:_videoCallView.remoteVideoView];
|
||||||
_remoteVideoTrack = nil;
|
_remoteVideoTrack = nil;
|
||||||
[_videoCallView.remoteVideoView renderFrame:nil];
|
[_videoCallView.remoteVideoView renderFrame:nil];
|
||||||
_remoteVideoTrack = remoteVideoTrack;
|
_remoteVideoTrack = remoteVideoTrack;
|
||||||
|
Reference in New Issue
Block a user